/[escript]/trunk/SConstruct
ViewVC logotype

Diff of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1391 by gross, Mon Jan 14 03:54:57 2008 UTC revision 4167 by jfenwick, Fri Jan 25 05:21:30 2013 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2013 by University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 # We may also need to know where python's site-packages subdirectory lives  
 python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', sys.prefix )  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development since 2012 by School of Earth Sciences
12     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
13     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
14  else:  
15     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
16     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
17    
18  sys_dir_examples = prefix+"/share/doc/esys"  import sys, os, platform, re
19    from distutils import sysconfig
20  source_root = Dir('#.').abspath  from site_init import *
21    from subprocess import PIPE, Popen
22  dir_packages = os.path.join(source_root,"esys")  
23  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
24  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26  print "Source root is : ",source_root  
27  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
28  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
29  print " Default example local  installation:    ", dir_examples  
30  print "Install prefix is: ", prefix  ########################## Determine options file ############################
31  print " Default packages system installation:   ", sys_dir_packages  # 1. command line
32  print " Default library system installation     ", sys_dir_libraries  # 2. scons/<hostname>_options.py
33  print " Default example system installation:    ", sys_dir_examples  # 3. name as part of a cluster
34    options_file=ARGUMENTS.get('options_file', None)
35  #==============================================================================================      if not options_file:
36        ext_dir = os.path.join(os.getcwd(), 'scons')
37  # Default options and options help text      hostname = platform.node().split('.')[0]
38  # These are defaults and can be overridden using command line arguments or an options file.      for name in hostname, effectiveName(hostname):
39  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40  # DO NOT CHANGE THEM HERE          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41  # Where to install?          if os.path.isfile(options_file): break
42  #==============================================================================================      
43  #      if not os.path.isfile(options_file):
44  #    get the options file if present:      print("\nWARNING:\nOptions file %s" % options_file)
45  #      print("not found! Default options will be used which is most likely suboptimal.")
46  options_file = ARGUMENTS.get('options_file','')      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47        print("subdirectory and customize it to your needs.\n")
48  if not os.path.isfile(options_file) :      options_file = None
49      options_file = False  
50    ############################### Build options ################################
51  if not options_file :  
52     import socket  default_prefix='/usr'
53     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     tmp = os.path.join("scons",hostname+"_options.py")  lapack_flavours=('none', 'clapack', 'mkl')
55    
56     if os.path.isfile(tmp) :  vars = Variables(options_file, ARGUMENTS)
57        options_file = tmp  vars.AddVariables(
58      PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59  IS_WINDOWS_PLATFORM = (os.name== "nt")    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61  # If you're not going to tell me then......    BoolVariable('verbose', 'Output full compile/link lines', False),
62  # FIXME: add one for the altix too.  # Compiler/Linker options
63  if not options_file :    ('cc', 'Path to C compiler', 'default'),
64     if IS_WINDOWS_PLATFORM :    ('cxx', 'Path to C++ compiler', 'default'),
65        options_file = "scons/windows_mscv71_options.py"    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66     else:    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67        options_file = "scons/linux_gcc_eg_options.py"    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
68      ('cc_extra', 'Extra C compiler flags', ''),
69  # and load it    ('cxx_extra', 'Extra C++ compiler flags', ''),
70  opts = Options(options_file, ARGUMENTS)    ('ld_extra', 'Extra linker flags', ''),
71  #================================================================    BoolVariable('werror','Treat compiler warnings as errors', True),
72  #    BoolVariable('debug', 'Compile with debug flags', False),
73  #   check if UMFPACK is installed on the system:    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74  #    ('omp_flags', 'OpenMP compiler flags', 'default'),
75  uf_root=None    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:  # Mandatory libraries
77     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78         uf_root=i    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79         print i," is used form ",tools_prefix  # Mandatory for tests
80         break    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81  if not uf_root==None:    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82     umf_path_default=os.path.join(tools_prefix,'include',uf_root)  # Optional libraries and options
83     umf_lib_path_default=os.path.join(tools_prefix,'lib')    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84     umf_libs_default=['umfpack']    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86     amd_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('netcdf', 'Enable netCDF file support', False),
87     amd_libs_default=['amd']    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89  else:    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90     umf_path_default=None    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91     umf_lib_path_default=None    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92     umf_libs_default=None    BoolVariable('papi', 'Enable PAPI', False),
93     amd_path_default=None    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
94     amd_lib_path_default=None    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95     amd_libs_default=None    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96     ufc_path_default=None    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97  #    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
100  #    python installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  #    ('lapack_libs', 'LAPACK libraries to link with', []),
108  #    boost installation:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
110  boost_path_default=os.path.join(tools_prefix,'include')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  boost_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112  boost_lib_default=['boost_python']    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113      ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114      BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115    # Advanced settings
116      #dudley_assemble_flags = -funroll-loops      to actually do something
117      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118      # To enable passing function pointers through python
119      BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120      # An option for specifying the compiler tools (see windows branch)
121      ('tools_names', 'Compiler tools to use', ['default']),
122      ('env_export', 'Environment variables to be passed to tools',[]),
123      EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125      # finer control over library building, intel aggressive global optimisation
126      # works with dynamic libraries on windows.
127      ('build_shared', 'Build dynamic libraries only', False),
128      ('sys_libs', 'Extra libraries to link with', []),
129      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130      ('SVN_VERSION', 'Do not use from options file', -2),
131      ('pythoncmd', 'which python to compile with','python'),
132      ('usepython3', 'Is this a python3 build? (experimental)', False),
133      ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134      ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135      ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136      BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
137    )
138    
139  #==========================================================================  ##################### Create environment and help text #######################
 #  
 #    check if netCDF is installed on the system:  
 #  
 netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')  
 netCDF_lib_path_default=os.path.join(tools_prefix,'lib')  
140    
141  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):  # Intel's compiler uses regular expressions improperly and emits a warning
142       useNetCDF_default='yes'  # about failing to find the compilers. This warning can be safely ignored.
      netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]  
 else:  
      useNetCDF_default='no'  
      netCDF_path_default=None  
      netCDF_lib_path_default=None  
      netCDF_libs_default=None  
143    
144  #==========================================================================  # PATH is needed so the compiler, linker and tools are found if they are not
145  #  # in default locations.
146  #  MPI:  env = Environment(tools = ['default'], options = vars,
147  #                    ENV = {'PATH': os.environ['PATH']})
148  if IS_WINDOWS_PLATFORM:                    
149     useMPI_default='no'  
150     mpi_path_default=None  #set the vars for clang
151     mpi_lib_path_default=None  def mkclang(env):
152     mpi_libs_default=[]    env['CC']='clang'
153     mpi_run_default=None    env['CXX']='clang++'
154  else:                    
155     useMPI_default='no'                    
156     mpi_root='/usr/local'  if env['tools_names'] != 'default':
157     mpi_path_default=os.path.join(mpi_root,'include')      zz=env['tools_names']
158     mpi_lib_path_default=os.path.join(mpi_root,'lib')      if 'clang' in zz:
159     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]          zz.remove('clang')
160     mpi_run_default='mpiexec -np 1'          zz.insert(0, mkclang)
161  #      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162  #==========================================================================                        ENV = {'PATH' : os.environ['PATH']})
163  #  
164  #    compile:  if options_file:
165  #      opts_valid=False
166  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'      if 'escript_opts_version' in env.Dictionary() and \
167  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'          int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168  cxx_flags_default='--no-warn -ansi'              opts_valid=True
169  cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'      if opts_valid:
170            print("Using options in %s." % options_file)
171  #==============================================================================================          else:
172  # Default options and options help text          print("\nOptions file %s" % options_file)
173  # These are defaults and can be overridden using command line arguments or an options file.          print("is outdated! Please update the file by examining one of the TEMPLATE")
174  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175  # DO NOT CHANGE THEM HERE          Exit(1)
176  opts.AddOptions(  
177  # Where to install esys stuff  # Generate help text (scons -h)
178    ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  Help(vars.GenerateHelpText(env))
179    ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
180    ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  # Check for superfluous options
181    ('exinstall', 'where the esys examples will be installed',             dir_examples),  if len(vars.UnknownVariables())>0:
182    ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),      for k in vars.UnknownVariables():
183    ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),          print("Unknown option '%s'" % k)
184    ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),      Exit(1)
185    ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
186    ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  #################### Make sure install directories exist #####################
187    ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
188    ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  prefix=Dir(env['prefix']).abspath
190    ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  env['incinstall'] = os.path.join(prefix, 'include')
191    ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  env['bininstall'] = os.path.join(prefix, 'bin')
192    ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  env['libinstall'] = os.path.join(prefix, 'lib')
193    ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  env['pyinstall']  = os.path.join(prefix, 'esys')
194    ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  if not os.path.isdir(env['bininstall']):
195  # Compilation options      os.makedirs(env['bininstall'])
196    BoolOption('dodebug', 'Do you want a debug build?', 'no'),  if not os.path.isdir(env['libinstall']):
197    BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),      os.makedirs(env['libinstall'])
198    ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  if not os.path.isdir(env['pyinstall']):
199    ('cc_defines','C/C++ defines to use', None),      os.makedirs(env['pyinstall'])
200    ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
201    ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  env.Append(CPPPATH = [env['incinstall']])
202    ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  env.Append(LIBPATH = [env['libinstall']])
203    ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
204    ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  ################# Fill in compiler options if not set above ##################
205    ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
206    ('ar_flags', 'Static library archiver flags to use', None),  if env['cc'] != 'default': env['CC']=env['cc']
207    ('sys_libs', 'System libraries to link with', None),  if env['cxx'] != 'default': env['CXX']=env['cxx']
208    ('tar_flags','flags for zip files','-c -z'),  
209  # MKL  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210    PathOption('mkl_path', 'Path to MKL includes', None),  # runtimes (icc does not)
211    PathOption('mkl_lib_path', 'Path to MKL libs', None),  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212    ('mkl_libs', 'MKL libraries to link with', None),      env['LINK'] = env['CXX']
213  # SCSL  
214    PathOption('scsl_path', 'Path to SCSL includes', None),  # default compiler/linker options
215    PathOption('scsl_lib_path', 'Path to SCSL libs', None),  cc_flags = ''
216    ('scsl_libs', 'SCSL libraries to link with', None),  cc_optim = ''
217    ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  cc_debug = ''
218  # UMFPACK  omp_flags = ''
219    PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  omp_ldflags = ''
220    PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  fatalwarning = '' # switch to turn warnings into errors
221    PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  sysheaderopt = '' # how to indicate that a header is a system header
222    ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
223  # AMD (used by UMFPACK)  # env['CC'] might be a full path
224    PathOption('amd_path', 'Path to AMD includes', amd_path_default),  cc_name=os.path.basename(env['CC'])
225    PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
226    ('amd_libs', 'AMD libraries to link with', amd_libs_default),  if cc_name == 'icc':
227  # TRILINOS      # Intel compiler
228    PathOption('trilinos_path', 'Path to TRILINOS includes', None),      # #1875: offsetof applied to non-POD types is nonstandard (in boost)
229    PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),      cc_flags    = "-std=c99 -fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1"
230    ('trilinos_libs', 'TRILINOS libraries to link with', None),      cc_optim    = "-O3 -ftz -fno-alias -ipo -xHost"
231  # BLAS      cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
232    PathOption('blas_path', 'Path to BLAS includes', None),      omp_flags   = "-openmp"
233    PathOption('blas_lib_path', 'Path to BLAS libs', None),      omp_ldflags = "-openmp -openmp_report=1"
234    ('blas_libs', 'BLAS libraries to link with', None),      fatalwarning = "-Werror"
235  # netCDF  elif cc_name[:3] == 'gcc':
236    ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),      # GNU C on any system
237    PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),      cc_flags     = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
238    PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),      cc_optim     = "-O3"
239    ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),      cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
240  # Python      omp_flags    = "-fopenmp"
241  # locations of include files for python      omp_ldflags  = "-fopenmp"
242  # FIXME: python_path should be python_inc_path and the same for boost etc.      fatalwarning = "-Werror"
243    PathOption('python_path', 'Path to Python includes', python_path_default),      sysheaderopt = "-isystem"
244    PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),  elif cc_name == 'cl':
245    ('python_lib', 'Python libraries to link with', python_lib_default),      # Microsoft Visual C on Windows
246    ('python_cmd', 'Python command', 'python'),      cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
247  # Boost      cc_optim     = "/O2 /Op /W3"
248    PathOption('boost_path', 'Path to Boost includes', boost_path_default),      cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
249    PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),      fatalwarning = "/WX"
250    ('boost_lib', 'Boost libraries to link with', boost_lib_default),  elif cc_name == 'icl':
251  # Doc building      # Intel C on Windows
252  #  PathOption('doxygen_path', 'Path to Doxygen executable', None),      cc_flags     = '/EHsc /GR /MD'
253  #  PathOption('epydoc_path', 'Path to Epydoc executable', None),      cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
254  # PAPI      cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
255    PathOption('papi_path', 'Path to PAPI includes', None),      omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
256    PathOption('papi_lib_path', 'Path to PAPI libs', None),      omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
257    ('papi_libs', 'PAPI libraries to link with', None),  
258    ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),  # set defaults if not otherwise specified
259  # MPI  if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
260    BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
261    ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
262    PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
263    ('mpi_run', 'mpirun name' , mpi_run_default),  if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
264    PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
265    ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
266  )  if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
267  #=================================================================================================  
268  #  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
269  #   Note: On the Altix the intel compilers are not automatically  
270  #   detected by scons intelc.py script. The Altix has a different directory  if env['usepython3']:
271  #   path and in some locations the "modules" facility is used to support      env.Append(CPPDEFINES=['ESPYTHON3'])
272  #   multiple compiler versions. This forces the need to import the users PATH  
273  #   environment which isn't the "scons way"  # set up the autolazy values
274  #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)  if env['forcelazy'] == 'on':
275  #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix      env.Append(CPPDEFINES=['FAUTOLAZYON'])
276  #  elif env['forcelazy'] == 'off':
277        env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
278  if IS_WINDOWS_PLATFORM:  
279        env = Environment(tools = ['default', 'msvc'], options = opts)  # set up the collective resolve values
280  else:  if env['forcecollres'] == 'on':
281     if os.uname()[4]=='ia64':      env.Append(CPPDEFINES=['FRESCOLLECTON'])
282        env = Environment(tools = ['default', 'intelc'], options = opts)  elif env['forcecollres'] == 'off':
283        if env['CXX'] == 'icpc':      env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
284           env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py  
285     else:  # allow non-standard C if requested
286        env = Environment(tools = ['default'], options = opts)  if env['iknowwhatimdoing']:
287  Help(opts.GenerateHelpText(env))      env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
288    
289  if env['bounds_check']:  # Disable OpenMP if no flags provided
290     env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])  if env['openmp'] and env['omp_flags'] == '':
291     env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])     print("OpenMP requested but no flags provided - disabling OpenMP!")
292     bounds_check = env['bounds_check']     env['openmp'] = False
293    
294    if env['openmp']:
295        env.Append(CCFLAGS = env['omp_flags'])
296        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
297    else:
298        env['omp_flags']=''
299        env['omp_ldflags']=''
300    
301    # add debug/non-debug compiler flags
302    if env['debug']:
303        env.Append(CCFLAGS = env['cc_debug'])
304    else:
305        env.Append(CCFLAGS = env['cc_optim'])
306    
307    # always add cc_flags
308    env.Append(CCFLAGS = env['cc_flags'])
309    
310    # add system libraries
311    env.AppendUnique(LIBS = env['sys_libs'])
312    
313    
314    global_revision=ARGUMENTS.get('SVN_VERSION', None)
315    if global_revision:
316        global_revision = re.sub(':.*', '', global_revision)
317        global_revision = re.sub('[^0-9]', '', global_revision)
318        if global_revision == '': global_revision='-2'
319    else:
320      # Get the global Subversion revision number for the getVersion() method
321      try:
322        global_revision = os.popen('svnversion -n .').read()
323        global_revision = re.sub(':.*', '', global_revision)
324        global_revision = re.sub('[^0-9]', '', global_revision)
325        if global_revision == '': global_revision='-2'
326      except:
327        global_revision = '-1'
328    env['svn_revision']=global_revision
329    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
330    
331    if IS_WINDOWS:
332        if not env['build_shared']:
333            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
334            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
335    
336    ###################### Copy required environment vars ########################
337    
338    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
339    if IS_WINDOWS:
340        LD_LIBRARY_PATH_KEY='PATH'
341        env['ENV']['LD_LIBRARY_PATH']=''
342  else:  else:
343     bounds_check = 0      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
344    
345  #=================================================================================================  # the following env variables are exported for the unit tests
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
346    
347  env.PrependENVPath('PYTHONPATH', source_root)  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
348        try:
349            env['ENV'][key] = os.environ[key]
350        except KeyError:
351            env['ENV'][key] = 1
352    
353  try:  env_export=env['env_export']
354     omp_num_threads = os.environ['OMP_NUM_THREADS']  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
355  except KeyError:  
356     omp_num_threads = 1  for key in set(env_export):
357  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads      try:
358            env['ENV'][key] = os.environ[key]
359        except KeyError:
360            pass
361    
362  try:  try:
363     path = os.environ['PATH']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env['ENV']['PATH'] = path  
364  except KeyError:  except KeyError:
365     omp_num_threads = 1      pass
366    
367  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  # these shouldn't be needed
368    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
369    #    try:
370    #        env['ENV'][key] = os.environ[key]
371    #    except KeyError:
372    #        pass
373    
   
 # Copy some variables from the system environment to the build environment  
374  try:  try:
375     env['ENV']['DISPLAY'] = os.environ['DISPLAY']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
376  except KeyError:  except KeyError:
377     pass      pass
378    
379  try:  ######################## Add some custom builders ############################
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
380    
381  try:  if env['pythoncmd']=='python':
382     tmp = os.environ['LD_LIBRARY_PATH']      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
383     print tmp  else:
384     env['ENV']['LD_LIBRARY_PATH'] = tmp      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
385  env.Append(BUILDERS = {'PyCompile' : py_builder});  env.Append(BUILDERS = {'PyCompile' : py_builder});
386    
387  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
388  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
389    
390  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
391  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
392    
393  # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
394  try:  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
   
 try:  
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
   
 try:  
    tar_flags = env['tar_flags']  
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
   
 try:  
    exinstall = env['exinstall']  
 except KeyError:  
    exinstall = None  
 try:  
    sys_libinstall = env['sys_libinstall']  
 except KeyError:  
    sys_libinstall = None  
 try:  
    sys_pyinstall = env['sys_pyinstall']  
 except KeyError:  
    sys_pyinstall = None  
 try:  
    sys_exinstall = env['sys_exinstall']  
 except KeyError:  
    sys_exinstall = None  
   
 # ====================== debugging ===================================  
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
395    
396  # === switch on omp ===================================================  ############################ Dependency checks ###############################
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
397    
398  try:  # Create a Configure() environment to check for compilers and python
399    omp_flags_debug = env['omp_flags_debug']  conf = Configure(env.Clone())
 except KeyError:  
   omp_flags_debug = ''  
   
 # ========= use mpi? =====================================================  
 try:  
    useMPI = env['useMPI']  
 except KeyError:  
    useMPI = None  
 # ========= set compiler flags ===========================================  
400    
401  # Can't use MPI and OpenMP simultaneously at this time  ######## Test that the compilers work
402  if useMPI:  
403      omp_flags=''  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
404      omp_flags_debug=''      if not conf.CheckCC():
405            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
406            Exit(1)
407        if not conf.CheckCXX():
408            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
409            Exit(1)
410    else:
411        if not conf.CheckFunc('printf', language='c'):
412            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
413            Exit(1)
414        if not conf.CheckFunc('printf', language='c++'):
415            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
416            Exit(1)
417    
418    if conf.CheckFunc('gethostname'):
419        conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
420    
421    ######## Python headers & library (required)
422    
423    #First we check to see if the config file has specified
424    ##Where to find the filae. Ideally, this should be automatic
425    #But we need to deal with the case where python is not in its INSTALL
426    #Directory
427    # Use the python scons is running
428    if env['pythoncmd']=='python':
429        python_inc_path=sysconfig.get_python_inc()
430        if IS_WINDOWS:
431            python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
432        elif env['PLATFORM']=='darwin':
433            python_lib_path=sysconfig.get_config_var('LIBPL')
434        else:
435            python_lib_path=sysconfig.get_config_var('LIBDIR')
436    
437        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
438        if IS_WINDOWS:
439            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
440        else:
441            python_libs=['python'+sysconfig.get_python_version()]
442    
443    #if we want to use a python other than the one scons is running
444    else:
445        initstring='from __future__ import print_function;from distutils import sysconfig;'
446        if env['pythonlibname']!='':
447            python_libs=env['pythonlibname']
448        else:   # work it out by calling python    
449            if IS_WINDOWS:
450                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
451            else:
452                cmd='print("python"+sysconfig.get_python_version())'
453            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
454            python_libs=p.stdout.readline()
455            if env['usepython3']:       # This is to convert unicode str into py2 string
456                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
457            p.wait()
458            python_libs=python_libs.strip()
459    
 if dodebug:  
     try:  
       flags = env['cc_flags_debug'] + ' ' + omp_flags_debug  
       env.Append(CCFLAGS = flags)  
     except KeyError:  
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= Remember what options were used in the compile =====================================  
 if not IS_WINDOWS_PLATFORM:  
   env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*")  
   if dodebug:       env.Execute("touch " + libinstall + "/Compiled.with.debug")  
   if useMPI:        env.Execute("touch " + libinstall + "/Compiled.with.mpi")  
   if omp_flags != '':   env.Execute("touch " + libinstall + "/Compiled.with.OpenMP")  
   if bounds_check:  env.Execute("touch " + libinstall + "/Compiled.with.bounds_check")  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
460        
461     try:      # Now we know whether we are using python3 or not
462        scsl_libs = env['scsl_libs']      p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
463     except KeyError:      python_inc_path=p.stdout.readline()
464        scsl_libs = [ ]      if env['usepython3']:
465             python_inc_path=python_inc_path.encode()
466  else:      p.wait()  
467      scsl_libs =  []      python_inc_path=python_inc_path.strip()
468        if IS_WINDOWS:
469  # ============= set TRILINOS (but only with MPI) =====================================          cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
470  if useMPI:      elif env['PLATFORM']=='darwin':
471     try:          cmd="sysconfig.get_config_var(\"LIBPL\")"
472        includes = env['trilinos_path']      else:
473        env.Append(CPPPATH = [includes,])          cmd="sysconfig.get_config_var(\"LIBDIR\")"
474     except KeyError:  
475        pass      p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
476        python_lib_path=p.stdout.readline()
477     try:      if env['usepython3']:
478        lib_path = env['trilinos_lib_path']          python_lib_path=python_lib_path.decode()
479        env.Append(LIBPATH = [lib_path,])      p.wait()
480     except KeyError:      python_lib_path=python_lib_path.strip()
481        pass  
482    #Check for an override from the config file.
483     try:  #Ideally, this should be automatic
484        trilinos_libs = env['trilinos_libs']  #But we need to deal with the case where python is not in its INSTALL
485     except KeyError:  #Directory
486        trilinos_libs = []  if env['pythonlibpath']!='':
487  else:      python_lib_path=env['pythonlibpath']
488       trilinos_libs = []  
489    if env['pythonincpath']!='':
490        python_inc_path=env['pythonincpath']
491  # ============= set umfpack (but only without MPI) =====================================  
492  umf_libs=[ ]  
493  if not useMPI:  if sysheaderopt == '':
494     try:      conf.env.AppendUnique(CPPPATH = [python_inc_path])
495        includes = env['umf_path']  else:
496        env.Append(CPPPATH = [includes,])      conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
497     except KeyError:  
498        pass  conf.env.AppendUnique(LIBPATH = [python_lib_path])
499    conf.env.AppendUnique(LIBS = python_libs)
500     try:  # The wrapper script needs to find the libs
501        lib_path = env['umf_lib_path']  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
502        env.Append(LIBPATH = [lib_path,])  
503     except KeyError:  if not conf.CheckCHeader('Python.h'):
504        pass      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
505        Exit(1)
506     try:  if not conf.CheckFunc('Py_Exit'):
507        umf_libs = env['umf_libs']      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
508        umf_libs+=umf_libs      Exit(1)
509     except KeyError:  
510        pass  ## reuse conf to check for numpy header (optional)
511    if env['usepython3']:
512     try:      # FIXME: This is until we can work out how to make the checks in python 3
513        includes = env['ufc_path']      conf.env['numpy_h']=False
514        env.Append(CPPPATH = [includes,])  else:
515     except KeyError:      if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
516        pass          conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
517            conf.env['numpy_h']=True
518     try:      else:
519        includes = env['amd_path']          conf.env['numpy_h']=False
520        env.Append(CPPPATH = [includes,])  
521     except KeyError:  # Commit changes to environment
522        pass  env = conf.Finish()
523    
524     try:  ######## boost (required)
525        lib_path = env['amd_lib_path']  
526        env.Append(LIBPATH = [lib_path,])  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
527     except KeyError:  if sysheaderopt == '':
528        pass      env.AppendUnique(CPPPATH = [boost_inc_path])
529    else:
530     try:      # This is required because we can't -isystem /usr/include since it breaks
531        amd_libs = env['amd_libs']      # std includes
532        umf_libs+=amd_libs      if os.path.normpath(boost_inc_path) == '/usr/include':
533     except KeyError:          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
534        pass      else:
535            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
536  # ============= set TRILINOS (but only with MPI) =====================================  
537  if useMPI:  env.AppendUnique(LIBPATH = [boost_lib_path])
538     try:  env.AppendUnique(LIBS = env['boost_libs'])
539        includes = env['trilinos_path']  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
540        env.Append(CPPPATH = [includes,])  
541     except KeyError:  ######## numpy (required)
542        pass  
543    if not detectModule(env, 'numpy'):
544     try:      print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
545        lib_path = env['trilinos_lib_path']      Exit(1)
546        env.Append(LIBPATH = [lib_path,])  
547     except KeyError:  ######## CppUnit (required for tests)
548        pass  
549    try:
550     try:      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
551        trilinos_libs = env['trilinos_libs']      env.AppendUnique(CPPPATH = [cppunit_inc_path])
552     except KeyError:      env.AppendUnique(LIBPATH = [cppunit_lib_path])
553        trilinos_libs = []      env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
554  else:      env['cppunit']=True
555       trilinos_libs = []  except:
556        env['cppunit']=False
 # ============= set blas =====================================  
 try:  
    includes = env['blas_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
   
 try:  
    lib_path = env['blas_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
557    
558  try:  ######## sympy (optional)
    blas_libs = env['blas_libs']  
 except KeyError:  
    blas_libs = [ ]  
559    
560  # ========== netcdf ====================================  if detectModule(env, 'sympy'):
561  try:      env['sympy'] = True
    useNetCDF = env['useNetCDF']  
 except KeyError:  
    useNetCDF = 'yes'  
    pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
562  else:  else:
563     print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
564     netCDF_libs=[ ]      env['sympy'] = False
565    
566  # ====================== boost ======================================  ######## netCDF (optional)
567  try:  
568     includes = env['boost_path']  netcdf_inc_path=''
569     env.Append(CPPPATH = [includes,])  netcdf_lib_path=''
570  except KeyError:  if env['netcdf']:
571     pass      netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
572  try:      env.AppendUnique(CPPPATH = [netcdf_inc_path])
573     lib_path = env['boost_lib_path']      env.AppendUnique(LIBPATH = [netcdf_lib_path])
574     env.Append(LIBPATH = [lib_path,])      env.AppendUnique(LIBS = env['netcdf_libs'])
575     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
576     if IS_WINDOWS_PLATFORM :      env.Append(CPPDEFINES = ['USE_NETCDF'])
577        env.PrependENVPath('PATH', lib_path)  
578  except KeyError:  ######## PAPI (optional)
579     pass  
580  try:  papi_inc_path=''
581     boost_lib = env['boost_lib']  papi_lib_path=''
582  except KeyError:  if env['papi']:
583     boost_lib = None      papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
584  # ====================== python ======================================      env.AppendUnique(CPPPATH = [papi_inc_path])
585  try:      env.AppendUnique(LIBPATH = [papi_lib_path])
586     includes = env['python_path']      env.AppendUnique(LIBS = env['papi_libs'])
587     env.Append(CPPPATH = [includes,])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
588  except KeyError:      env.Append(CPPDEFINES = ['BLOCKPAPI'])
589     pass  
590  try:  ######## MKL (optional)
591     lib_path = env['python_lib_path']  
592     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  mkl_inc_path=''
593     env.Append(LIBPATH = [lib_path,])  mkl_lib_path=''
594  except KeyError:  if env['mkl']:
595     pass      mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
596  try:      env.AppendUnique(CPPPATH = [mkl_inc_path])
597     python_lib = env['python_lib']      env.AppendUnique(LIBPATH = [mkl_lib_path])
598  except KeyError:      env.AppendUnique(LIBS = env['mkl_libs'])
599     python_lib = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
600  # =============== documentation =======================================      env.Append(CPPDEFINES = ['MKL'])
601  try:  
602     doxygen_path = env['doxygen_path']  ######## UMFPACK (optional)
603  except KeyError:  
604     doxygen_path = None  umfpack_inc_path=''
605  try:  umfpack_lib_path=''
606     epydoc_path = env['epydoc_path']  if env['umfpack']:
607  except KeyError:      umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
608     epydoc_path = None      env.AppendUnique(CPPPATH = [umfpack_inc_path])
609  # =============== PAPI =======================================      env.AppendUnique(LIBPATH = [umfpack_lib_path])
610  try:      env.AppendUnique(LIBS = env['umfpack_libs'])
611     includes = env['papi_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
612     env.Append(CPPPATH = [includes,])      env.Append(CPPDEFINES = ['UMFPACK'])
613  except KeyError:  
614     pass  ######## LAPACK (optional)
615  try:  
616     lib_path = env['papi_lib_path']  if env['lapack']=='mkl' and not env['mkl']:
617     env.Append(LIBPATH = [lib_path,])      print("mkl_lapack requires MKL!")
618  except KeyError:      Exit(1)
619     pass  
620  try:  env['uselapack'] = env['lapack']!='none'
621     papi_libs = env['papi_libs']  lapack_inc_path=''
622  except KeyError:  lapack_lib_path=''
623     papi_libs = None  if env['uselapack']:
624  # ============= set mpi =====================================      header='clapack.h'
625  if useMPI:      if env['lapack']=='mkl':
626     env.Append(CPPDEFINES=['PASO_MPI',])          env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
627     try:          header='mkl_lapack.h'
628        includes = env['mpi_path']      lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
629        env.Append(CPPPATH = [includes,])      env.AppendUnique(CPPPATH = [lapack_inc_path])
630     except KeyError:      env.AppendUnique(LIBPATH = [lapack_lib_path])
631        pass      env.AppendUnique(LIBS = env['lapack_libs'])
632     try:      env.Append(CPPDEFINES = ['USE_LAPACK'])
633        lib_path = env['mpi_lib_path']  
634        env.Append(LIBPATH = [lib_path,])  ######## Silo (optional)
635        env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
636     except KeyError:  silo_inc_path=''
637        pass  silo_lib_path=''
638     try:  if env['silo']:
639        mpi_libs = env['mpi_libs']      silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
640     except KeyError:      env.AppendUnique(CPPPATH = [silo_inc_path])
641        mpi_libs = []      env.AppendUnique(LIBPATH = [silo_lib_path])
642        # Note that we do not add the libs since they are only needed for the
643     try:      # weipa library and tools.
644        mpi_run = env['mpi_run']      #env.AppendUnique(LIBS = [env['silo_libs']])
645     except KeyError:  
646        mpi_run = ''  ######## VSL random numbers (optional)
647    if env['vsl_random']:
648     try:      env.Append(CPPDEFINES = ['MKLRANDOM'])
649         mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']  
650         env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  ######## VisIt (optional)
651     except KeyError:  
652        pass  visit_inc_path=''
653  else:  visit_lib_path=''
654    mpi_libs=[]  if env['visit']:
655    mpi_run = mpi_run_default      visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
656  # =========== zip files ===========================================      env.AppendUnique(CPPPATH = [visit_inc_path])
657  try:      env.AppendUnique(LIBPATH = [visit_lib_path])
658     includes = env['papi_path']  
659     env.Append(CPPPATH = [includes,])  ######## MPI (optional)
660  except KeyError:  
661     pass  if env['mpi']=='no':
662  try:      env['mpi']='none'
663     lib_path = env['papi_lib_path']  
664     env.Append(LIBPATH = [lib_path,])  env['usempi'] = env['mpi']!='none'
665  except KeyError:  mpi_inc_path=''
666     pass  mpi_lib_path=''
667  try:  if env['usempi']:
668     papi_libs = env['papi_libs']      mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
669  except KeyError:      env.AppendUnique(CPPPATH = [mpi_inc_path])
670     papi_libs = None      env.AppendUnique(LIBPATH = [mpi_lib_path])
671  try:      env.AppendUnique(LIBS = env['mpi_libs'])
672     papi_instrument_solver = env['papi_instrument_solver']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
673  except KeyError:      env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
674     papi_instrument_solver = None      # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
675        # On the other hand MPT and OpenMPI don't define the latter so we have to
676        # do that here
677  # ============= and some helpers =====================================      if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
678  try:          env.Append(CPPDEFINES = ['MPI_INCLUDED'])
679     doxygen_path = env['doxygen_path']  
680  except KeyError:  ######## BOOMERAMG (optional)
681     doxygen_path = None  
682  try:  if env['mpi'] == 'none': env['boomeramg'] = False
683     epydoc_path = env['epydoc_path']  
684  except KeyError:  boomeramg_inc_path=''
685     epydoc_path = None  boomeramg_lib_path=''
686  try:  if env['boomeramg']:
687     src_zipfile = env.File(env['src_zipfile'])      boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
688  except KeyError:      env.AppendUnique(CPPPATH = [boomeramg_inc_path])
689     src_zipfile = None      env.AppendUnique(LIBPATH = [boomeramg_lib_path])
690  try:      env.AppendUnique(LIBS = env['boomeramg_libs'])
691     test_zipfile = env.File(env['test_zipfile'])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
692  except KeyError:      env.Append(CPPDEFINES = ['BOOMERAMG'])
693     test_zipfile = None  
694  try:  ######## ParMETIS (optional)
695     examples_zipfile = env.File(env['examples_zipfile'])  
696  except KeyError:  if not env['usempi']: env['parmetis'] = False
697     examples_zipfile = None  
698    parmetis_inc_path=''
699  try:  parmetis_lib_path=''
700     src_tarfile = env.File(env['src_tarfile'])  if env['parmetis']:
701  except KeyError:      parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
702     src_tarfile = None      env.AppendUnique(CPPPATH = [parmetis_inc_path])
703  try:      env.AppendUnique(LIBPATH = [parmetis_lib_path])
704     test_tarfile = env.File(env['test_tarfile'])      env.AppendUnique(LIBS = env['parmetis_libs'])
705  except KeyError:      env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
706     test_tarfile = None      env.Append(CPPDEFINES = ['USE_PARMETIS'])
707  try:  
708     examples_tarfile = env.File(env['examples_tarfile'])  ######## gmsh (optional, for tests)
709  except KeyError:  
710     examples_tarfile = None  try:
711        p=Popen(['gmsh', '-info'], stderr=PIPE)
712  try:      _,e=p.communicate()
713     guide_pdf = env.File(env['guide_pdf'])      if e.split().count("MPI"):
714  except KeyError:          env['gmsh']='m'
715     guide_pdf = None      else:
716            env['gmsh']='s'
717  try:  except OSError:
718     guide_html_index = env.File('index.htm',env['guide_html'])      env['gmsh']=False
719  except KeyError:  
720     guide_html_index = None  ######## PDFLaTeX (for documentation)
721    if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
722  try:      env['pdflatex']=True
723     api_epydoc = env.Dir(env['api_epydoc'])  else:
724  except KeyError:      env['pdflatex']=False
725     api_epydoc = None  
726    ######################## Summarize our environment ###########################
727  try:  
728     api_doxygen = env.Dir(env['api_doxygen'])  # keep some of our install paths first in the list for the unit tests
729  except KeyError:  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
730     api_doxygen = None  env.PrependENVPath('PYTHONPATH', prefix)
731    env['ENV']['ESCRIPT_ROOT'] = prefix
732  try:  
733     svn_pipe = os.popen("svnversion -n .")  if not env['verbose']:
734     global_revision = svn_pipe.readlines()      env['CCCOMSTR'] = "Compiling $TARGET"
735     svn_pipe.close()      env['CXXCOMSTR'] = "Compiling $TARGET"
736     global_revision = re.sub(":.*", "", global_revision[0])      env['SHCCCOMSTR'] = "Compiling $TARGET"
737     global_revision = re.sub("[^0-9]", "", global_revision)      env['SHCXXCOMSTR'] = "Compiling $TARGET"
738  except:      env['ARCOMSTR'] = "Linking $TARGET"
739     global_revision="-1"      env['LINKCOMSTR'] = "Linking $TARGET"
740     print "Warning: unable to recover global revsion number."      env['SHLINKCOMSTR'] = "Linking $TARGET"
741  print "Revision number is %s."%global_revision      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
742  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)      env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
743        env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
744  # Python install - esys __init__.py      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
745  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))      #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
746    
747  # FIXME: exinstall and friends related to examples are not working.  print("")
748  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])  print("*** Config Summary (see config.log and lib/buildvars for details) ***")
749    print("Escript/Finley revision %s"%global_revision)
750  env.Default(build_target)  print("  Install prefix:  %s"%env['prefix'])
751    print("          Python:  %s"%sysconfig.PREFIX)
752  # Zipgets  print("           boost:  %s"%env['boost_prefix'])
753  env.Alias('release_src',[ src_zipfile, src_tarfile ])  print("           numpy:  YES")
754  env.Alias('release_tests',[ test_zipfile, test_tarfile])  if env['usempi']:
755  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])      print("             MPI:  YES (flavour: %s)"%env['mpi'])
756  env.Alias('examples_zipfile',examples_zipfile)  else:
757  env.Alias('examples_tarfile',examples_tarfile)      print("             MPI:  DISABLED")
758  env.Alias('api_epydoc',api_epydoc)  if env['uselapack']:
759  env.Alias('api_doxygen',api_doxygen)      print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
760  env.Alias('guide_html_index',guide_html_index)  else:
761  env.Alias('guide_pdf', guide_pdf)      print("          LAPACK:  DISABLED")
762  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])  d_list=[]
763  env.Alias('release', ['release_src', 'release_tests', 'docs'])  e_list=[]
764    for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','silo','sympy','umfpack','visit','vsl_random':
765  env.Alias('build_tests',build_target)    # target to build all C++ tests      if env[i]: e_list.append(i)
766  env.Alias('build_py_tests',build_target) # target to build all python tests      else: d_list.append(i)
767  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests  for i in e_list:
768  env.Alias('run_tests', 'build_tests')   # target to run all C++ test      print("%16s:  YES"%i)
769  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests  for i in d_list:
770  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests      print("%16s:  DISABLED"%i)
771    if env['cppunit']:
772        print("         CppUnit:  FOUND")
773  # Allow sconscripts to see the env  else:
774  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",      print("         CppUnit:  NOT FOUND")
775      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",  if env['gmsh']=='m':
776          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",      print("            gmsh:  FOUND, MPI-ENABLED")
777          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])  elif env['gmsh']=='s':
778        print("            gmsh:  FOUND")
779  # End initialisation section  else:
780  # Begin configuration section      print("            gmsh:  NOT FOUND")
781  # adds this file and the scons option directore to the source tar  if env['numpy_h']:
782  release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]      print("   numpy headers:  FOUND")
783  release_testfiles=[env.File('README_TESTS'),]  else:
784  env.Zip(src_zipfile, release_srcfiles)      print("   numpy headers:  NOT FOUND")
785  env.Zip(test_zipfile, release_testfiles)  print("   vsl_random:  %s"%env['vsl_random'])
786  try:      
787     env.Tar(src_tarfile, release_srcfiles)  if ((fatalwarning != '') and (env['werror'])):
788     env.Tar(test_tarfile, release_testfiles)      print("  Treating warnings as errors")
789  except AttributeError:  else:
790     pass      print("  NOT treating warnings as errors")
791  # Insert new components to be build here  print("")
792  # FIXME: might be nice to replace this verbosity with a list of targets and some  
793  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines  ####################### Configure the subdirectories #########################
794  # Third Party libraries  
795  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  from grouptest import *
796  # C/C++ Libraries  
797  env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  TestGroups=[]
798  # bruce is removed for now as it doesn't really do anything  
799  # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)  # keep an environment without warnings-as-errors
800  env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  dodgy_env=env.Clone()
801  env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  
802  env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  # now add warnings-as-errors flags. This needs to be done after configuration
803  env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)  # because the scons test files have warnings in them
804  env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  if ((fatalwarning != '') and (env['werror'])):
805  env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)      env.Append(CCFLAGS = fatalwarning)
806  env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)  
807  env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  Export(
808  #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)    ['env',
809       'dodgy_env',
810       'IS_WINDOWS',
811       'TestGroups'
812      ]
813    )
814    
815    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
816    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
817    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
818    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
819    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
820    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
821    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
822    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
823    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
824    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
825    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
826    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
827    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
828    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
829    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
830    
831    
832    ######################## Populate the buildvars file #########################
833    
834    # remove obsolete file
835    if not env['usempi']:
836        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
837        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
838    
839    # Try to extract the boost version from version.hpp
840    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
841    boostversion='unknown'
842    try:
843        for line in boosthpp:
844            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
845            if ver:
846                boostversion=ver.group(1)
847    except StopIteration:
848        pass
849    boosthpp.close()
850    
851    
852    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
853    buildvars.write("svn_revision="+str(global_revision)+"\n")
854    buildvars.write("prefix="+prefix+"\n")
855    buildvars.write("cc="+env['CC']+"\n")
856    buildvars.write("cxx="+env['CXX']+"\n")
857    if env['pythoncmd']=='python':
858        buildvars.write("python="+sys.executable+"\n")
859        buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
860    else:
861        buildvars.write("python="+env['pythoncmd']+"\n")
862        p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
863        verstring=p.stdout.readline().strip()
864        p.wait()
865        buildvars.write("python_version="+verstring+"\n")
866    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
867    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
868    buildvars.write("boost_version="+boostversion+"\n")
869    buildvars.write("debug=%d\n"%int(env['debug']))
870    buildvars.write("openmp=%d\n"%int(env['openmp']))
871    buildvars.write("mpi=%s\n"%env['mpi'])
872    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
873    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
874    buildvars.write("lapack=%s\n"%env['lapack'])
875    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
876    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
877        buildvars.write("%s=%d\n"%(i, int(env[i])))
878        if env[i]:
879            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
880            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
881    buildvars.close()
882    
883    ################### Targets to build and install libraries ###################
884    
885    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
886    env.Alias('target_init', [target_init])
887    # delete buildvars upon cleanup
888    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
889    
890    # The headers have to be installed prior to build in order to satisfy
891    # #include <paso/Common.h>
892    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
893    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
894    
895    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
896    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
897    
898    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
899    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
900    
901    env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
902    env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
903    
904    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
905    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
906    
907    env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
908    env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
909    
910    env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
911    env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
912    
913    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
914    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
915    
916    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
917    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
918    
919    # Now gather all the above into some easy targets: build_all and install_all
920    build_all_list = []
921    build_all_list += ['build_esysUtils']
922    build_all_list += ['build_paso']
923    build_all_list += ['build_escript']
924    build_all_list += ['build_pasowrap']
925    build_all_list += ['build_dudley']
926    build_all_list += ['build_finley']
927    build_all_list += ['build_ripley']
928    build_all_list += ['build_weipa']
929    if not IS_WINDOWS: build_all_list += ['build_escriptreader']
930    if env['usempi']:   build_all_list += ['build_pythonMPI']
931    build_all_list += ['build_escriptconvert']
932    env.Alias('build_all', build_all_list)
933    
934    install_all_list = []
935    install_all_list += ['target_init']
936    install_all_list += ['install_esysUtils']
937    install_all_list += ['install_paso']
938    install_all_list += ['install_escript']
939    install_all_list += ['install_pasowrap']
940    install_all_list += ['install_dudley']
941    install_all_list += ['install_finley']
942    install_all_list += ['install_ripley']
943    install_all_list += ['install_weipa']
944    if not IS_WINDOWS: install_all_list += ['install_escriptreader']
945    install_all_list += ['install_downunder_py']
946    install_all_list += ['install_modellib_py']
947    install_all_list += ['install_pycad_py']
948    if env['usempi']:   install_all_list += ['install_pythonMPI']
949    install_all_list += ['install_escriptconvert']
950    env.Alias('install_all', install_all_list)
951    
952    # Default target is install
953    env.Default('install_all')
954    
955    ################## Targets to build and run the test suite ###################
956    
957    if not env['cppunit']:
958        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
959        env.Alias('run_tests', test_msg)
960    env.Alias('run_tests', ['install_all'])
961    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
962    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
963    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
964    
965    ##################### Targets to build the documentation #####################
966    
967    env.Alias('basedocs', ['examples_tarfile', 'examples_zipfile', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
968    env.Alias('docs', ['basedocs', 'sphinxdoc'])
969    env.Alias('release_prep', ['docs', 'install_all'])
970    env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
971    
972    
973    # The test scripts are always generated, this target allows us to
974    # generate the testscripts without doing a full build
975    env.Alias('testscripts',[])
976    
977  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  if not IS_WINDOWS:
978  syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)      try:
979            utest=open('utest.sh','w')
980            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
981            for tests in TestGroups:
982                utest.write(tests.makeString())
983            utest.close()
984            Execute(Chmod('utest.sh', 0o755))
985            print("Generated utest.sh.")
986            # This version contains only python tests - I want this to be usable
987            # From a binary only install if you have the test files
988            utest=open('itest.sh','w')
989            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
990            for tests in TestGroups:
991              if tests.exec_cmd=='$PYTHONRUNNER ':
992                utest.write(tests.makeString())
993            utest.close()
994            Execute(Chmod('itest.sh', 0o755))
995            print("Generated itest.sh.")        
996        except IOError:
997            print("Error attempting to write unittests file.")
998            Exit(1)
999    
1000        # delete utest.sh upon cleanup
1001        env.Clean('target_init', 'utest.sh')
1002        env.Clean('target_init', 'itest.sh')
1003    
1004        # Make sure that the escript wrapper is in place
1005        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1006            print("Copying escript wrapper.")
1007            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1008    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1391  
changed lines
  Added in v.4167

  ViewVC Help
Powered by ViewVC 1.1.26