/[escript]/trunk/SConstruct
ViewVC logotype

Diff of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1324 by ksteube, Sun Sep 30 23:35:17 2007 UTC revision 6035 by caltinay, Wed Mar 9 00:39:20 2016 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2016 by The University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', '/usr')  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10    # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development 2012-2013 by School of Earth Sciences
12  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development from 2014 by Centre for Geoscience Computing (GeoComp)
13     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
14     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
15  else:  
16     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
17     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
18    
19  sys_dir_examples = prefix+"/share/doc/esys"  import atexit, sys, os, platform, re
20    from distutils import sysconfig
21  source_root = Dir('#.').abspath  from dependencies import *
22    from site_init import *
23  dir_packages = os.path.join(source_root,"esys")  
24  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
25  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
26    REQUIRED_OPTS_VERSION=202
27  print "Source root is : ",source_root  
28  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
29  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
30  print " Default example local  installation:    ", dir_examples  
31  print "Install prefix is: ", prefix  IS_OSX = (os.uname()[0] == 'Darwin')
32  print " Default packages system installation:   ", sys_dir_packages  
33  print " Default library system installation     ", sys_dir_libraries  ########################## Determine options file ############################
34  print " Default example system installation:    ", sys_dir_examples  # 1. command line
35    # 2. scons/<hostname>_options.py
36  #==============================================================================================      # 3. name as part of a cluster
37    options_file=ARGUMENTS.get('options_file', None)
38  # Default options and options help text  if not options_file:
39  # These are defaults and can be overridden using command line arguments or an options file.      ext_dir = os.path.join(os.getcwd(), 'scons')
40  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used      hostname = platform.node().split('.')[0]
41  # DO NOT CHANGE THEM HERE      for name in hostname, effectiveName(hostname):
42  # Where to install?          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
43  #==============================================================================================              options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
44  #              if os.path.isfile(options_file): break
45  #    get the options file if present:  
46  #  if not os.path.isfile(options_file):
47  options_file = ARGUMENTS.get('options_file','')      print("\nWARNING:\nOptions file %s" % options_file)
48        print("not found! Default options will be used which is most likely suboptimal.")
49  if not os.path.isfile(options_file) :      print("We recommend that you copy the most relavent options file in the scons/template/")
50      options_file = False      print("subdirectory and customize it to your needs.\n")
51        options_file = None
52  if not options_file :  
53     import socket  ############################### Build options ################################
54     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  
55     tmp = os.path.join("scons",hostname+"_options.py")  default_prefix='/usr'
56    mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
57     if os.path.isfile(tmp) :  lapack_flavours=('none', 'clapack', 'mkl')
58        options_file = tmp  
59    #Note that scons construction vars the the following purposes:
60  IS_WINDOWS_PLATFORM = (os.name== "nt")  #  CPPFLAGS -> to the preprocessor
61    #  CCFLAGS  -> flags for _both_ C and C++
62  # If you're not going to tell me then......  #  CXXFLAGS -> flags for c++ _only_
63  # FIXME: add one for the altix too.  #  CFLAGS   -> flags for c only
64  if not options_file :  
65     if IS_WINDOWS_PLATFORM :  vars = Variables(options_file, ARGUMENTS)
66        options_file = "scons/windows_mscv71_options.py"  vars.AddVariables(
67     else:    PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
68        options_file = "scons/linux_gcc_eg_options.py"    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
69      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
70  # and load it    BoolVariable('verbose', 'Output full compile/link lines', False),
71  opts = Options(options_file, ARGUMENTS)  # Compiler/Linker options
72  #================================================================    ('cxx', 'Path to C++ compiler', 'default'),
73  #    ('cc_flags', 'Base (C and C++) compiler flags', 'default'),
74  #   check if UMFPACK is installed on the system:    ('cc_optim', 'Additional (C and C++) flags for a non-debug build', 'default'),
75  #    ('cc_debug', 'Additional (C and C++) flags for a debug build', 'default'),
76  uf_root=None    ('cxx_extra', 'Extra C++ compiler flags', ''),
77  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:    ('cpp_flags', 'C Pre-processor flags', ''),
78     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('cpp_extra', 'Extra C Pre-processor flags', ''),
79         uf_root=i    ('ld_extra', 'Extra linker flags', ''),
80         print i," is used form ",tools_prefix    ('nvcc', 'Path to CUDA compiler', 'default'),
81         break    ('nvccflags', 'Base CUDA compiler flags', 'default'),
82  if not uf_root==None:    BoolVariable('werror','Treat compiler warnings as errors', True),
83     umf_path_default=os.path.join(tools_prefix,'include',uf_root)    BoolVariable('debug', 'Compile with debug flags', False),
84     umf_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
85     umf_libs_default=['umfpack']    ('omp_flags', 'OpenMP compiler flags', 'default'),
86     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('omp_ldflags', 'OpenMP linker flags', 'default'),
87     amd_lib_path_default=os.path.join(tools_prefix,'lib')  # Mandatory libraries
88     amd_libs_default=['amd']    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
89     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
90  else:  # Mandatory for tests
91     umf_path_default=None    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
92     umf_lib_path_default=None    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
93     umf_libs_default=None  # Optional libraries and options
94     amd_path_default=None    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
95     amd_lib_path_default=None    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
96     amd_libs_default=None    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
97     ufc_path_default=None    BoolVariable('cuda', 'Enable GPU code with CUDA (requires thrust)', False),
98  #    ('cuda_prefix', 'Prefix/Paths to NVidia CUDA installation', default_prefix),
99  #==========================================================================    BoolVariable('netcdf', 'Enable netCDF file support', False),
100  #    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
101  #    python installation:    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
102  #    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
103  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
104  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
105  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
106      ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
107  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
108  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
109  #    boost installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
110  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
111  boost_path_default=os.path.join(tools_prefix,'include')    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
112  boost_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
113  boost_lib_default=['boost_python']    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
114      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
115  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
116  #    ('lapack_libs', 'LAPACK libraries to link with', []),
117  #    check if netCDF is installed on the system:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
118  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
119  netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
120  netCDF_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
121      ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
122  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):    ('visit_libs', 'VisIt libraries to link with', ['simV2']),
123       useNetCDF_default='yes'    ListVariable('domains', 'Which domains to build', 'all',\
124       netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]                 ['dudley','finley','ripley','speckley']),
125  else:  # Advanced settings
126       useNetCDF_default='no'    ('launcher', 'Launcher command (e.g. mpirun)', 'default'),
127       netCDF_path_default=None    ('prelaunch', 'Command to execute before launcher (e.g. mpdboot)', 'default'),
128       netCDF_lib_path_default=None    ('postlaunch', 'Command to execute after launcher (e.g. mpdexit)', 'default'),
129       netCDF_libs_default=None    #dudley_assemble_flags = -funroll-loops      to actually do something
130      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
131  #==========================================================================    # To enable passing function pointers through python
132  #    BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
133  #  MPI:    # An option for specifying the compiler tools
134  #    ('tools_names', 'Compiler tools to use', ['default']),
135  if IS_WINDOWS_PLATFORM:    ('env_export', 'Environment variables to be passed to tools',[]),
136     useMPI_default='no'    EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
137     mpi_path_default=None    EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
138     mpi_lib_path_default=None    ('build_shared', 'Build dynamic libraries only (ignored)', True),
139     mpi_libs_default=[]    ('sys_libs', 'Extra libraries to link with', []),
140     mpi_run_default=None    ('escript_opts_version', 'Version of options file (do not specify on command line)'),
141  else:    ('SVN_VERSION', 'Do not use from options file', -2),
142     useMPI_default='no'    ('pythoncmd', 'which python to compile with','python'),
143     mpi_root='/usr/local'    ('usepython3', 'Is this a python3 build?', False),
144     mpi_path_default=os.path.join(mpi_root,'include')    ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
145     mpi_lib_path_default=os.path.join(mpi_root,'lib')    ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
146     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]    ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
147     mpi_run_default='mpiexec -np 1'    BoolVariable('longindices', 'use long indices (for very large matrices)', False),
148  #    BoolVariable('compressed_files','Enables reading from compressed binary files', True),
149  #==========================================================================    ('compression_libs', 'Compression libraries to link with', ['boost_iostreams']),
150  #    BoolVariable('papi', 'Enable PAPI', False),
151  #    compile:    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
152  #    ('papi_libs', 'PAPI libraries to link with', ['papi']),
153  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
154  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'    BoolVariable('osx_dependency_fix', 'Fix dependencies for libraries to have absolute paths (OSX)',
155  cxx_flags_default='--no-warn -ansi'  False)
 cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'  
   
 #==============================================================================================      
 # Default options and options help text  
 # These are defaults and can be overridden using command line arguments or an options file.  
 # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used  
 # DO NOT CHANGE THEM HERE  
 opts.AddOptions(  
 # Where to install esys stuff  
   ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  
   ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
   ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  
   ('exinstall', 'where the esys examples will be installed',             dir_examples),  
   ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),  
   ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),  
   ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),  
   ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
   ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  
   ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
   ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  
   ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  
   ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  
   ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  
   ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  
   ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  
   ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  
 # Compilation options  
   BoolOption('dodebug', 'Do you want a debug build?', 'no'),  
   BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),  
   ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  
   ('cc_defines','C/C++ defines to use', None),  
   ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
   ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  
   ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  
   ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
   ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  
   ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
   ('ar_flags', 'Static library archiver flags to use', None),  
   ('sys_libs', 'System libraries to link with', None),  
   ('tar_flags','flags for zip files','-c -z'),  
 # MKL  
   PathOption('mkl_path', 'Path to MKL includes', None),  
   PathOption('mkl_lib_path', 'Path to MKL libs', None),  
   ('mkl_libs', 'MKL libraries to link with', None),  
 # SCSL  
   PathOption('scsl_path', 'Path to SCSL includes', None),  
   PathOption('scsl_lib_path', 'Path to SCSL libs', None),  
   ('scsl_libs', 'SCSL libraries to link with', None),  
   ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  
 # UMFPACK  
   PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  
   PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  
   PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  
   ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
 # AMD (used by UMFPACK)  
   PathOption('amd_path', 'Path to AMD includes', amd_path_default),  
   PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
   ('amd_libs', 'AMD libraries to link with', amd_libs_default),  
 # TRILINOS  
   PathOption('trilinos_path', 'Path to TRILINOS includes', None),  
   PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),  
   ('trilinos_libs', 'TRILINOS libraries to link with', None),  
 # BLAS  
   PathOption('blas_path', 'Path to BLAS includes', None),  
   PathOption('blas_lib_path', 'Path to BLAS libs', None),  
   ('blas_libs', 'BLAS libraries to link with', None),  
 # netCDF  
   ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),  
   PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),  
   PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),  
   ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),  
 # Python  
 # locations of include files for python  
 # FIXME: python_path should be python_inc_path and the same for boost etc.  
   PathOption('python_path', 'Path to Python includes', python_path_default),  
   PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),  
   ('python_lib', 'Python libraries to link with', python_lib_default),  
   ('python_cmd', 'Python command', 'python'),  
 # Boost  
   PathOption('boost_path', 'Path to Boost includes', boost_path_default),  
   PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),  
   ('boost_lib', 'Boost libraries to link with', boost_lib_default),  
 # Doc building  
 #  PathOption('doxygen_path', 'Path to Doxygen executable', None),  
 #  PathOption('epydoc_path', 'Path to Epydoc executable', None),  
 # PAPI  
   PathOption('papi_path', 'Path to PAPI includes', None),  
   PathOption('papi_lib_path', 'Path to PAPI libs', None),  
   ('papi_libs', 'PAPI libraries to link with', None),  
   ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),  
 # MPI  
   BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  
   ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  
   PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  
   ('mpi_run', 'mpirun name' , mpi_run_default),  
   PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  
   ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  
156  )  )
 #=================================================================================================  
 #  
 #   Note: On the Altix the intel compilers are not automatically  
 #   detected by scons intelc.py script. The Altix has a different directory  
 #   path and in some locations the "modules" facility is used to support  
 #   multiple compiler versions. This forces the need to import the users PATH  
 #   environment which isn't the "scons way"  
 #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)  
 #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix  
 #  
   
 if IS_WINDOWS_PLATFORM:  
       env = Environment(tools = ['default', 'msvc'], options = opts)  
 else:  
    if os.uname()[4]=='ia64':  
       env = Environment(tools = ['default', 'intelc'], options = opts)  
       if env['CXX'] == 'icpc':  
          env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py  
    else:  
       env = Environment(tools = ['default'], options = opts)  
 Help(opts.GenerateHelpText(env))  
   
 if env['bounds_check']:  
    env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])  
    env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])  
   
 #=================================================================================================  
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
   
 env.PrependENVPath('PYTHONPATH', source_root)  
   
 try:  
    omp_num_threads = os.environ['OMP_NUM_THREADS']  
 except KeyError:  
    omp_num_threads = 1  
 env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  
   
 try:  
    path = os.environ['PATH']  
    env['ENV']['PATH'] = path  
 except KeyError:  
    omp_num_threads = 1  
   
 env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  
   
   
 # Copy some variables from the system environment to the build environment  
 try:  
    env['ENV']['DISPLAY'] = os.environ['DISPLAY']  
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
 except KeyError:  
    pass  
   
 try:  
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
   
 try:  
    tmp = os.environ['LD_LIBRARY_PATH']  
    env['ENV']['LD_LIBRARY_PATH'] = tmp  
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
 env.Append(BUILDERS = {'PyCompile' : py_builder});  
   
 runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
 env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  
   
 runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  
 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  
   
 # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  
 try:  
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
157    
158  try:  ##################### Create environment and help text #######################
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
159    
160  try:  # Intel's compiler uses regular expressions improperly and emits a warning
161     tar_flags = env['tar_flags']  # about failing to find the compilers. This warning can be safely ignored.
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
162    
163  try:  # PATH is needed so the compiler, linker and tools are found if they are not
164     exinstall = env['exinstall']  # in default locations.
165  except KeyError:  env = Environment(tools = ['default'], options = vars,
166     exinstall = None                    ENV = {'PATH': os.environ['PATH']})
167  try:  
168     sys_libinstall = env['sys_libinstall']  # set the vars for clang
169  except KeyError:  def mkclang(env):
170     sys_libinstall = None      env['CXX']='clang++'
171  try:  
172     sys_pyinstall = env['sys_pyinstall']  if env['tools_names'] != ['default']:
173  except KeyError:      zz=env['tools_names']
174     sys_pyinstall = None      if 'clang' in zz:
175  try:          zz.remove('clang')
176     sys_exinstall = env['sys_exinstall']          zz.insert(0, mkclang)
177  except KeyError:      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
178     sys_exinstall = None                        ENV = {'PATH' : os.environ['PATH']})
179    
180    if options_file:
181        opts_valid=False
182        if 'escript_opts_version' in env.Dictionary() and \
183            int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
184                opts_valid=True
185        if opts_valid:
186            print("Using options in %s." % options_file)
187        else:
188            print("\nOptions file %s" % options_file)
189            print("is outdated! Please update the file by examining one of the TEMPLATE")
190            print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
191            Exit(1)
192    
193    # Generate help text (scons -h)
194    Help(vars.GenerateHelpText(env))
195    
196    # Check for superfluous options
197    if len(vars.UnknownVariables())>0:
198        for k in vars.UnknownVariables():
199            print("Unknown option '%s'" % k)
200        Exit(1)
201    
202    if env['cuda']:
203        if env['nvcc'] != 'default':
204            env['NVCC'] = env['nvcc']
205        env.Tool('nvcc')
206    
207    if 'dudley' in env['domains']:
208        env['domains'].append('finley')
209    
210    # create dictionary which will be populated with info for buildvars file
211    env['buildvars']={}
212    # create list which will be populated with warnings if there are any
213    env['warnings']=[]
214    
215    #################### Make sure install directories exist #####################
216    
217    env['BUILD_DIR']=Dir(env['build_dir']).abspath
218    prefix=Dir(env['prefix']).abspath
219    env['buildvars']['prefix']=prefix
220    env['incinstall'] = os.path.join(prefix, 'include')
221    env['bininstall'] = os.path.join(prefix, 'bin')
222    env['libinstall'] = os.path.join(prefix, 'lib')
223    env['pyinstall']  = os.path.join(prefix, 'esys')
224    if not os.path.isdir(env['bininstall']):
225        os.makedirs(env['bininstall'])
226    if not os.path.isdir(env['libinstall']):
227        os.makedirs(env['libinstall'])
228    if not os.path.isdir(env['pyinstall']):
229        os.makedirs(env['pyinstall'])
230    
231    env.Append(CPPPATH = [env['incinstall']])
232    env.Append(LIBPATH = [env['libinstall']])
233    
234    ################# Fill in compiler options if not set above ##################
235    
236    if env['cxx'] != 'default': env['CXX']=env['cxx']
237    
238    # version >=9 of intel C++ compiler requires use of icpc to link in C++
239    # runtimes (icc does not)
240    if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
241        env['LINK'] = env['CXX']
242    
243    # default compiler/linker options
244    cc_flags = ''
245    cc_optim = ''
246    cc_debug = ''
247    omp_flags = ''
248    omp_ldflags = ''
249    fatalwarning = '' # switch to turn warnings into errors
250    sysheaderopt = '' # how to indicate that a header is a system header
251    
252    # env['CC'] might be a full path
253    cc_name=os.path.basename(env['CXX'])
254    
255    if cc_name == 'icpc':
256        # Intel compiler
257        # #1478: class "std::auto_ptr<...>" was declared deprecated
258        # #1875: offsetof applied to non-POD types is nonstandard (in boost)
259        # removed -std=c99 because icpc doesn't like it and we aren't using c anymore
260        cc_flags    = "-std=c++11 -fPIC -w2 -wd1875 -wd1478 -Wno-unknown-pragmas"
261        cc_optim    = "-O3 -ftz -fno-alias -inline-level=2 -ipo -xHost"
262        cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
263        omp_flags   = "-openmp"
264        omp_ldflags = "-openmp -openmp_report=1"
265        fatalwarning = "-Werror"
266    elif cc_name[:3] == 'g++':
267        # GNU C++ on any system
268        # note that -ffast-math is not used because it breaks isnan(),
269        # see mantis #691
270        cc_flags     = "-std=c++11 -pedantic -Wall -fPIC -Wno-unknown-pragmas -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
271        cc_optim     = "-O3"
272        #max-vartrack-size: avoid vartrack limit being exceeded with escriptcpp.cpp
273        cc_debug     = "-g3 -O0 -D_GLIBCXX_DEBUG -DDOASSERT -DDOPROF -DBOUNDS_CHECK --param=max-vartrack-size=100000000"
274        omp_flags    = "-fopenmp"
275        omp_ldflags  = "-fopenmp"
276        fatalwarning = "-Werror"
277        sysheaderopt = "-isystem"
278    elif cc_name == 'cl':
279        # Microsoft Visual C on Windows
280        cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
281        cc_optim     = "/O2 /Op /W3"
282        cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
283        fatalwarning = "/WX"
284    elif cc_name == 'icl':
285        # Intel C on Windows
286        cc_flags     = '/EHsc /GR /MD'
287        cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
288        cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
289        omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
290        omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
291    
292    env['sysheaderopt']=sysheaderopt
293    
294    # set defaults if not otherwise specified
295    if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
296    if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
297    if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
298    if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
299    if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
300    if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
301    if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
302    if env['cpp_flags'] != '': env.Append(CPPFLAGS = env['cpp_flags'])
303    if env['cpp_extra'] != '': env.Append(CPPFLAGS = " "+env['cpp_extra'])
304    
305    if env['nvccflags'] != 'default':
306        env['NVCCFLAGS'] = env['nvccflags']
307        env['SHNVCCFLAGS'] = env['nvccflags'] + ' -shared'
308    
309    if env['longindices']:
310        env.Append(CPPDEFINES = ['ESYS_INDEXTYPE_LONG'])
311    
312    if env['usepython3']:
313        env.Append(CPPDEFINES=['ESPYTHON3'])
314    
315    # set up the autolazy values
316    if env['forcelazy'] == 'on':
317        env.Append(CPPDEFINES=['FAUTOLAZYON'])
318    elif env['forcelazy'] == 'off':
319        env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
320    
321    # set up the collective resolve values
322    if env['forcecollres'] == 'on':
323        env.Append(CPPDEFINES=['FRESCOLLECTON'])
324    elif env['forcecollres'] == 'off':
325        env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
326    
327    # allow non-standard C if requested
328    if env['iknowwhatimdoing']:
329        env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
330    
331    # Disable OpenMP if no flags provided
332    if env['openmp'] and env['omp_flags'] == '':
333       env['warnings'].append("OpenMP requested but no flags provided - disabling OpenMP!")
334       env['openmp'] = False
335    
336    if env['openmp']:
337        env.Append(CCFLAGS = env['omp_flags'])
338        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
339    else:
340        env['omp_flags']=''
341        env['omp_ldflags']=''
342    
343    env['buildvars']['openmp']=int(env['openmp'])
344    
345    # add debug/non-debug compiler flags
346    env['buildvars']['debug']=int(env['debug'])
347    if env['debug']:
348        env.Append(CCFLAGS = env['cc_debug'])
349    else:
350        env.Append(CCFLAGS = env['cc_optim'])
351    
352    # always add cc_flags
353    env.Append(CCFLAGS = env['cc_flags'])
354    
355    # add system libraries
356    env.AppendUnique(LIBS = env['sys_libs'])
357    
358    # set defaults for launchers if not otherwise specified
359    if env['prelaunch'] == 'default':
360        if env['mpi'] == 'INTELMPI' and env['openmp']:
361            env['prelaunch'] = "export I_MPI_PIN_DOMAIN=omp"
362        elif env['mpi'] == 'OPENMPI':
363            # transform comma-separated list to '-x a -x b -x c ...'
364            env['prelaunch'] = "EE=$(echo -x %e|sed -e 's/,/ -x /g')"
365        elif env['mpi'] == 'MPT':
366            env['prelaunch'] = "export MPI_NUM_MEMORY_REGIONS=0"
367        elif env['mpi'] == 'MPICH2':
368            env['prelaunch'] = "mpdboot -n %n -r ssh -f %f"
369        else:
370            env['prelaunch'] = ""
371    
372    if env['launcher'] == 'default':
373        if env['mpi'] == 'INTELMPI':
374            env['launcher'] = "mpirun -hostfile %f -n %N -ppn %p %b"
375        elif env['mpi'] == 'OPENMPI':
376            env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} --host %h -bynode -bind-to-core --cpus-per-rank %t -np %N %b"
377        elif env['mpi'] == 'MPT':
378            env['launcher'] = "mpirun %h -np %p %b"
379        elif env['mpi'] == 'MPICH':
380            env['launcher'] = "mpirun -machinefile %f -np %N %b"
381        elif env['mpi'] == 'MPICH2':
382            env['launcher'] = "mpiexec -genvlist %e -np %N %b"
383        else:
384            env['launcher'] = "%b"
385    
386    if env['postlaunch'] == 'default':
387        if env['mpi'] == 'MPICH2':
388            env['postlaunch'] = "mpdallexit"
389        else:
390            env['postlaunch'] = ""
391    
392    # determine svn revision
393    global_revision=ARGUMENTS.get('SVN_VERSION', None)
394    if global_revision:
395        global_revision = re.sub(':.*', '', global_revision)
396        global_revision = re.sub('[^0-9]', '', global_revision)
397        if global_revision == '': global_revision='-2'
398    else:
399      # Get the global Subversion revision number for the getVersion() method
400      try:
401        global_revision = os.popen('svnversion -n .').read()
402        global_revision = re.sub(':.*', '', global_revision)
403        global_revision = re.sub('[^0-9]', '', global_revision)
404        if global_revision == '': global_revision='-2'
405      except:
406        global_revision = '-1'
407    env['svn_revision']=global_revision
408    env['buildvars']['svn_revision']=global_revision
409    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
410    
411    env['IS_WINDOWS']=IS_WINDOWS
412    env['IS_OSX']=IS_OSX
413    
414    ###################### Copy required environment vars ########################
415    
416    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
417    if IS_WINDOWS:
418        LD_LIBRARY_PATH_KEY='PATH'
419        env['ENV']['LD_LIBRARY_PATH']=''
420    else:
421        LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
422    
423  # ====================== debugging ===================================  env['LD_LIBRARY_PATH_KEY']=LD_LIBRARY_PATH_KEY
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
424    
425  # === switch on omp ===================================================  # the following env variables are exported for the unit tests
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
426    
427  try:  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
428    omp_flags_debug = env['omp_flags_debug']      try:
429  except KeyError:          env['ENV'][key] = os.environ[key]
430    omp_flags_debug = ''      except KeyError:
431            env['ENV'][key] = '1'
432    
433  # ========= use mpi? =====================================================  env_export=env['env_export']
434  try:  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP','LD_PRELOAD'])
    useMPI = env['useMPI']  
 except KeyError:  
    useMPI = None  
 # ========= set compiler flags ===========================================  
435    
436  if dodebug:  for key in set(env_export):
437      try:      try:
438        flags = env['cc_flags_debug'] + ' ' + omp_flags_debug          env['ENV'][key] = os.environ[key]
       env.Append(CCFLAGS = flags)  
439      except KeyError:      except KeyError:
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
440          pass          pass
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
     
    try:  
       scsl_libs = env['scsl_libs']  
    except KeyError:  
       scsl_libs = [ ]  
441    
442  else:  for key in os.environ.keys():
443      scsl_libs =  []      if key.startswith("SLURM_"):
444            env['ENV'][key] = os.environ[key]
 # ============= set TRILINOS (but only with MPI) =====================================  
 if useMPI:  
    try:  
       includes = env['trilinos_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['trilinos_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
   
    try:  
       trilinos_libs = env['trilinos_libs']  
    except KeyError:  
       trilinos_libs = []  
 else:  
      trilinos_libs = []  
   
   
 # ============= set umfpack (but only without MPI) =====================================  
 umf_libs=[ ]  
 if not useMPI:  
    try:  
       includes = env['umf_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['umf_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
   
    try:  
       umf_libs = env['umf_libs']  
       umf_libs+=umf_libs  
    except KeyError:  
       pass  
   
    try:  
       includes = env['ufc_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       includes = env['amd_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['amd_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
   
    try:  
       amd_libs = env['amd_libs']  
       umf_libs+=amd_libs  
    except KeyError:  
       pass  
   
 # ============= set TRILINOS (but only with MPI) =====================================  
 if useMPI:  
    try:  
       includes = env['trilinos_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['trilinos_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
    except KeyError:  
       pass  
   
    try:  
       trilinos_libs = env['trilinos_libs']  
    except KeyError:  
       trilinos_libs = []  
 else:  
      trilinos_libs = []  
445    
 # ============= set blas =====================================  
446  try:  try:
447     includes = env['blas_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env.Append(CPPPATH = [includes,])  
448  except KeyError:  except KeyError:
449     pass      pass
450    
451  try:  if IS_OSX:
452     lib_path = env['blas_lib_path']    try:
453     env.Append(LIBPATH = [lib_path,])      env.PrependENVPath('DYLD_LIBRARY_PATH', os.environ['DYLD_LIBRARY_PATH'])
454  except KeyError:    except KeyError:
455     pass      pass
456    
 try:  
    blas_libs = env['blas_libs']  
 except KeyError:  
    blas_libs = [ ]  
457    
458  # ========== netcdf (currently not supported with mpi) ====================================  # these shouldn't be needed
459  if useMPI:  #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
460     useNetCDF = 'no'  #    try:
461  else:  #        env['ENV'][key] = os.environ[key]
462     try:  #    except KeyError:
463        useNetCDF = env['useNetCDF']  #        pass
    except KeyError:  
       useNetCDF = 'yes'  
       pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
 else:  
    print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."  
    netCDF_libs=[ ]  
464    
 # ====================== boost ======================================  
 try:  
    includes = env['boost_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
 try:  
    lib_path = env['boost_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', lib_path)  
 except KeyError:  
    pass  
 try:  
    boost_lib = env['boost_lib']  
 except KeyError:  
    boost_lib = None  
 # ====================== python ======================================  
 try:  
    includes = env['python_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
 try:  
    lib_path = env['python_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
 try:  
    python_lib = env['python_lib']  
 except KeyError:  
    python_lib = None  
 # =============== documentation =======================================  
 try:  
    doxygen_path = env['doxygen_path']  
 except KeyError:  
    doxygen_path = None  
 try:  
    epydoc_path = env['epydoc_path']  
 except KeyError:  
    epydoc_path = None  
 # =============== PAPI =======================================  
465  try:  try:
466     includes = env['papi_path']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env.Append(CPPPATH = [includes,])  
467  except KeyError:  except KeyError:
468     pass      pass
 try:  
    lib_path = env['papi_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
 try:  
    papi_libs = env['papi_libs']  
 except KeyError:  
    papi_libs = None  
 # ============= set mpi =====================================  
 if useMPI:  
    env.Append(CPPDEFINES=['PASO_MPI',])  
    try:  
       includes = env['mpi_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
    try:  
       lib_path = env['mpi_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
    try:  
       mpi_libs = env['mpi_libs']  
    except KeyError:  
       mpi_libs = []  
   
    try:  
       mpi_run = env['mpi_run']  
    except KeyError:  
       mpi_run = ''  
   
    try:  
        mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']  
        env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  
    except KeyError:  
       pass  
 else:  
   mpi_libs=[]  
   mpi_run = mpi_run_default  
 # =========== zip files ===========================================  
 try:  
    includes = env['papi_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
 try:  
    lib_path = env['papi_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
 try:  
    papi_libs = env['papi_libs']  
 except KeyError:  
    papi_libs = None  
 try:  
    papi_instrument_solver = env['papi_instrument_solver']  
 except KeyError:  
    papi_instrument_solver = None  
469    
470    ######################## Add some custom builders ############################
471    
472  # ============= and some helpers =====================================  if env['pythoncmd']=='python':
473  try:      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
474     doxygen_path = env['doxygen_path']  else:
475  except KeyError:      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
476     doxygen_path = None  env.Append(BUILDERS = {'PyCompile' : py_builder});
 try:  
    epydoc_path = env['epydoc_path']  
 except KeyError:  
    epydoc_path = None  
 try:  
    src_zipfile = env.File(env['src_zipfile'])  
 except KeyError:  
    src_zipfile = None  
 try:  
    test_zipfile = env.File(env['test_zipfile'])  
 except KeyError:  
    test_zipfile = None  
 try:  
    examples_zipfile = env.File(env['examples_zipfile'])  
 except KeyError:  
    examples_zipfile = None  
477    
478  try:  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
479     src_tarfile = env.File(env['src_tarfile'])  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
 except KeyError:  
    src_tarfile = None  
 try:  
    test_tarfile = env.File(env['test_tarfile'])  
 except KeyError:  
    test_tarfile = None  
 try:  
    examples_tarfile = env.File(env['examples_tarfile'])  
 except KeyError:  
    examples_tarfile = None  
480    
481  try:  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
482     guide_pdf = env.File(env['guide_pdf'])  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
 except KeyError:  
    guide_pdf = None  
483    
484  try:  runPyExample_builder = Builder(action = runPyExample, suffix = '.passed', src_suffic='.py', single_source=True)
485     guide_html_index = env.File('index.htm',env['guide_html'])  env.Append(BUILDERS = {'RunPyExample' : runPyExample_builder});
 except KeyError:  
    guide_html_index = None  
486    
487  try:  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
488     api_epydoc = env.Dir(env['api_epydoc'])  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
 except KeyError:  
    api_epydoc = None  
489    
490  try:  ############################ Dependency checks ###############################
    api_doxygen = env.Dir(env['api_doxygen'])  
 except KeyError:  
    api_doxygen = None  
491    
492  try:  ######## Compiler
493     svn_pipe = os.popen("svnversion -n .")  env=checkCompiler(env)
494     global_revision = svn_pipe.readlines()  
495     svn_pipe.close()  ######## Python headers & library (required)
496     global_revision = re.sub(":.*", "", global_revision[0])  env=checkPython(env)
497     global_revision = re.sub("[^0-9]", "", global_revision)  
498  except:  ######## boost & boost-python (required)
499     global_revision = "0"  env=checkBoost(env)
500  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)  
501    ######## NVCC version (optional)
502  # Python install - esys __init__.py  if env['cuda']:
503  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))      env=checkCudaVersion(env)
504        env=checkCUDA(env)
505  # FIXME: exinstall and friends related to examples are not working.  
506  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])  ######## numpy (required) and numpy headers (optional)
507    env=checkNumpy(env)
508  env.Default(build_target)  
509    ######## CppUnit (required for tests)
510  # Zipgets  env=checkCppUnit(env)
511  env.Alias('release_src',[ src_zipfile, src_tarfile ])  
512  env.Alias('release_tests',[ test_zipfile, test_tarfile])  ######## optional python modules (sympy, pyproj)
513  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])  env=checkOptionalModules(env)
514  env.Alias('examples_zipfile',examples_zipfile)  
515  env.Alias('examples_tarfile',examples_tarfile)  ######## optional dependencies (netCDF, PAPI, MKL, UMFPACK, Lapack, Silo, ...)
516  env.Alias('api_epydoc',api_epydoc)  env=checkOptionalLibraries(env)
517  env.Alias('api_doxygen',api_doxygen)  
518  env.Alias('guide_html_index',guide_html_index)  #use gmsh info to set some defines
519  env.Alias('guide_pdf', guide_pdf)  if env['gmsh'] == 's':
520  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])      env.Append(CPPDEFINES=['GMSH'])
521  env.Alias('release', ['release_src', 'release_tests', 'docs'])  elif env['gmsh'] == 'm':
522        env.Append(CPPDEFINES=['GMSH','GMSH_MPI'])
523  env.Alias('build_tests',build_target)    # target to build all C++ tests  
524  env.Alias('build_py_tests',build_target) # target to build all python tests  ######## PDFLaTeX (for documentation)
525  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests  env=checkPDFLatex(env)
526  env.Alias('run_tests', 'build_tests')   # target to run all C++ test  
527  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests  # keep some of our install paths first in the list for the unit tests
528  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
529    env.PrependENVPath('PYTHONPATH', prefix)
530    env['ENV']['ESCRIPT_ROOT'] = prefix
531  # Allow sconscripts to see the env  
532  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",  if not env['verbose']:
533      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",      env['CXXCOMSTR'] = "Compiling $TARGET"
534          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",      env['SHCXXCOMSTR'] = "Compiling $TARGET"
535          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])      env['ARCOMSTR'] = "Linking $TARGET"
536        env['LINKCOMSTR'] = "Linking $TARGET"
537  # End initialisation section      env['SHLINKCOMSTR'] = "Linking $TARGET"
538  # Begin configuration section      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
539  # adds this file and the scons option directore to the source tar      env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
540  release_srcfiles=[env.File('SConstruct'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
541  release_testfiles=[env.File('README_TESTS'),]      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
542  env.Zip(src_zipfile, release_srcfiles)      #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
543  env.Zip(test_zipfile, release_testfiles)  
544  try:  ####################### Configure the subdirectories #########################
545     env.Tar(src_tarfile, release_srcfiles)  
546     env.Tar(test_tarfile, release_testfiles)  # remove obsolete files
547  except AttributeError:  if not env['usempi']:
548     pass      Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
549  # Insert new components to be build here      Execute(Delete(os.path.join(env['bininstall'], 'escript-overlord')))
550  # FIXME: might be nice to replace this verbosity with a list of targets and some      Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
551  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines  
552  # Third Party libraries  from grouptest import *
553  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  TestGroups=[]
554  # C/C++ Libraries  
555  env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  # keep an environment without warnings-as-errors
556  # bruce is removed for now as it doesn't really do anything  dodgy_env=env.Clone()
557  # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)  
558  env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  # now add warnings-as-errors flags. This needs to be done after configuration
559  env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  # because the scons test files have warnings in them
560  env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  if ((fatalwarning != '') and (env['werror'])):
561  env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)      env.Append(CCFLAGS = fatalwarning)
562  env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  
563  env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  Export(
564  env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)    ['env',
565  env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)     'dodgy_env',
566  #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)     'IS_WINDOWS',
567       'TestGroups'
568      ]
569    )
570    
571    #do not auto build
572    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
573    env.SConscript(dirs = ['tools/overlord'], variant_dir='$BUILD_DIR/$PLATFORM/tools/overlord', duplicate=0)
574    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
575    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
576    env.SConscript(dirs = ['escript/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
577    
578    env.SConscript(dirs = ['cusplibrary'])
579    
580    #This will pull in the escriptcore/py_src and escriptcore/test
581    env.SConscript(dirs = ['escriptcore/src'], variant_dir='$BUILD_DIR/$PLATFORM/escriptcore', duplicate=0)
582    if 'dudley' in env['domains']:
583        env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
584    if 'finley' in env['domains']:
585        env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
586    if 'ripley' in env['domains']:
587        env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
588    if 'speckley' in env['domains']:
589        env.SConscript(dirs = ['speckley/src'], variant_dir='$BUILD_DIR/$PLATFORM/speckley', duplicate=0)
590    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
591    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
592    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
593    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
594    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
595    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
596    
597    
598    ######################## Populate the buildvars file #########################
599    
600    write_buildvars(env)
601    
602    write_launcher(env)
603    
604    ################### Targets to build and install libraries ###################
605    
606    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
607    env.Alias('target_init', [target_init])
608    # delete buildvars upon cleanup
609    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
610    
611    # The headers have to be installed prior to build
612    
613    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
614    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
615    
616    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
617    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escriptcore_py', 'install_escript_py'])
618    
619    if 'dudley' in env['domains']:
620        env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
621        env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
622    
623    if 'finley' in env['domains']:
624        env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
625        env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
626    
627    if 'ripley' in env['domains']:
628        env.Alias('build_ripley', ['install_cusp_headers', 'install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
629        env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
630    
631    if 'speckley' in env['domains']:
632        env.Alias('build_speckley', ['install_speckley_headers', 'build_speckley_lib', 'build_speckleycpp_lib'])
633        env.Alias('install_speckley', ['build_speckley', 'install_speckley_lib', 'install_speckleycpp_lib', 'install_speckley_py'])
634    
635    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
636    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
637    
638    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
639    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
640    
641    # Now gather all the above into some easy targets: build_all and install_all
642    build_all_list = []
643    build_all_list += ['build_paso']
644    build_all_list += ['build_escript']
645    if 'dudley' in env['domains']: build_all_list += ['build_dudley']
646    if 'finley' in env['domains']: build_all_list += ['build_finley']
647    if 'ripley' in env['domains']: build_all_list += ['build_ripley']
648    if 'speckley' in env['domains']: build_all_list += ['build_speckley']
649    build_all_list += ['build_weipa']
650    if not IS_WINDOWS and 'finley' in env['domains']:
651        build_all_list += ['build_escriptreader']
652    if env['usempi']:
653        build_all_list += ['build_pythonMPI', 'build_overlord']
654    env.Alias('build_all', build_all_list)
655    
656    install_all_list = []
657    install_all_list += ['target_init']
658    install_all_list += ['install_paso']
659    install_all_list += ['install_escript']
660    if 'dudley' in env['domains']: install_all_list += ['install_dudley']
661    if 'finley' in env['domains']: install_all_list += ['install_finley']
662    if 'ripley' in env['domains']: install_all_list += ['install_ripley']
663    if 'speckley' in env['domains']: install_all_list += ['install_speckley']
664    install_all_list += ['install_weipa']
665    if not IS_WINDOWS and 'finley' in env['domains']:
666        install_all_list += ['install_escriptreader']
667    install_all_list += ['install_downunder_py']
668    install_all_list += ['install_modellib_py']
669    install_all_list += ['install_pycad_py']
670    if env['usempi']:
671        install_all_list += ['install_pythonMPI', 'install_overlord']
672    install_all_list += ['install_weipa_py']    
673    install_all_list += [env.Install(os.path.join(env['build_dir'],'scripts'), os.path.join('scripts', 'release_sanity.py'))]
674    
675    
676    if env['osx_dependency_fix']:
677        print("Require dependency fix")
678        install_all=env.Command('install_all',install_all_list,'scripts/moveall.sh')
679    else:
680        install_all=env.Alias('install_all', install_all_list)
681    
682    
683    
684    
685    # Default target is install
686    #env.Default('install_all')
687    
688    
689    sanity=env.Alias('sanity', env.Command('dummy','',os.path.join(env['prefix'], 'bin', 'run-escript')+' '+os.path.join(env['build_dir'],'scripts', 'release_sanity.py')))
690    env.Depends('dummy', install_all)
691    if env['usempi']:
692       #env.Requires('dummy', ['build_pythonMPI', 'install_pythonMPI'])
693       #env.Requires('dummy', env['prefix']+"/lib/pythonMPI")
694       env.Depends('dummy', ['build_pythonMPI', 'install_pythonMPI'])
695       env.Depends('dummy', env['prefix']+"/lib/pythonMPI")  
696    
697    if 'install_dudley' in install_all_list and \
698       'install_finley' in install_all_list and \
699       'install_ripley' in install_all_list and \
700       'install_speckley' in install_all_list:
701           env.AlwaysBuild('sanity')
702           env.Default('sanity')
703    else:
704        env.Default('install_all')
705    
706    ################## Targets to build and run the test suite ###################
707    
708    if not env['cppunit']:
709        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C++ unit tests, CppUnit not found!";exit 1')
710        env.Alias('run_tests', test_msg)
711        env.Alias('build_tests', '')
712    env.Alias('run_tests', ['install_all'])
713    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
714    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
715    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
716    Requires('py_tests', 'install_all')
717    
718    ##################### Targets to build the documentation #####################
719    
720    env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
721    env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen'])
722    env.Alias('docs', ['basedocs', 'sphinxdoc'])
723    env.Alias('release_prep', ['docs', 'install_all'])
724    env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
725    
726    # The test scripts are always generated, this target allows us to
727    # generate the testscripts without doing a full build
728    env.Alias('testscripts',[])
729    
730    if not IS_WINDOWS:
731        generateTestScripts(env, TestGroups)
732    
733    
734    ######################## Summarize our environment ###########################
735    def print_summary():
736        print("")
737        print("*** Config Summary (see config.log and <prefix>/lib/buildvars for details) ***")
738        print("Escript/Finley revision %s"%global_revision)
739        print("  Install prefix:  %s"%env['prefix'])
740        print("          Python:  %s"%sysconfig.PREFIX)
741        print("           boost:  %s (Version %s)"%(env['boost_prefix'],env['boost_version']))
742        if env['numpy_h']:
743            print("           numpy:  YES (with headers)")
744        else:
745            print("           numpy:  YES (without headers)")
746        if env['usempi']:
747            print("             MPI:  YES (flavour: %s)"%env['mpi'])
748        else:
749            print("             MPI:  NO")
750        if env['parmetis']:
751            print("        ParMETIS:  %s (Version %s)"%(env['parmetis_prefix'],env['parmetis_version']))
752        else:
753            print("        ParMETIS:  NO")
754        if env['uselapack']:
755            print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
756        else:
757            print("          LAPACK:  NO")
758        if env['cuda']:
759            print("            CUDA:  YES (nvcc: %s)"%env['nvcc_version'])
760        else:
761            print("            CUDA:  NO")
762        d_list=[]
763        e_list=[]
764        for i in 'debug','openmp','boomeramg','cppunit','gdal','mkl','netcdf','papi','pyproj','scipy','silo','sympy','umfpack','visit':
765            if env[i]: e_list.append(i)
766            else: d_list.append(i)
767        for i in e_list:
768            print("%16s:  YES"%i)
769        for i in d_list:
770            print("%16s:  NO"%i)
771        if env['gmshpy']:
772            gmshpy=" + python module"
773        else:
774            gmshpy=""
775        if env['gmsh']=='m':
776            print("            gmsh:  YES, MPI-ENABLED"+gmshpy)
777        elif env['gmsh']=='s':
778            print("            gmsh:  YES"+gmshpy)
779        else:
780            if env['gmshpy']:
781                print("            gmsh:  python module only")
782            else:
783                print("            gmsh:  NO")
784        print(    "            gzip:  " + ("YES" if env['compressed_files'] else "NO"))
785    
786        if ((fatalwarning != '') and (env['werror'])):
787            print("  Treating warnings as errors")
788        else:
789            print("  NOT treating warnings as errors")
790        print("")
791        for w in env['warnings']:
792            print("WARNING: %s"%w)
793        if len(GetBuildFailures()):
794            print("\nERROR: build stopped due to errors\n")
795        else:
796            print("\nSUCCESS: build complete\n")
797    
798  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  atexit.register(print_summary)
 syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)  
799    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1324  
changed lines
  Added in v.6035

  ViewVC Help
Powered by ViewVC 1.1.26