/[escript]/branches/diaplayground/SConstruct
ViewVC logotype

Diff of /branches/diaplayground/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1459 by ksteube, Thu Mar 27 01:49:10 2008 UTC revision 4216 by caltinay, Tue Feb 19 05:15:38 2013 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2013 by University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 # We may also need to know where python's site-packages subdirectory lives  
 python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', sys.prefix )  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development since 2012 by School of Earth Sciences
12     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
13     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
14  else:  
15     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
16     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
17    
18  sys_dir_examples = prefix+"/share/doc/esys"  import sys, os, platform, re
19    from distutils import sysconfig
20  source_root = Dir('#.').abspath  from site_init import *
21    from subprocess import PIPE, Popen
22  dir_packages = os.path.join(source_root,"esys")  
23  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
24  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26  print "Source root is : ",source_root  
27  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
28  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
29  print " Default example local  installation:    ", dir_examples  
30  print "Install prefix is: ", prefix  ########################## Determine options file ############################
31  print " Default packages system installation:   ", sys_dir_packages  # 1. command line
32  print " Default library system installation     ", sys_dir_libraries  # 2. scons/<hostname>_options.py
33  print " Default example system installation:    ", sys_dir_examples  # 3. name as part of a cluster
34    options_file=ARGUMENTS.get('options_file', None)
35  #==============================================================================================      if not options_file:
36        ext_dir = os.path.join(os.getcwd(), 'scons')
37  # Default options and options help text      hostname = platform.node().split('.')[0]
38  # These are defaults and can be overridden using command line arguments or an options file.      for name in hostname, effectiveName(hostname):
39  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40  # DO NOT CHANGE THEM HERE          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41  # Where to install?          if os.path.isfile(options_file): break
42  #==============================================================================================      
43  #      if not os.path.isfile(options_file):
44  #    get the options file if present:      print("\nWARNING:\nOptions file %s" % options_file)
45  #      print("not found! Default options will be used which is most likely suboptimal.")
46  options_file = ARGUMENTS.get('options_file','')      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47        print("subdirectory and customize it to your needs.\n")
48  if not os.path.isfile(options_file) :      options_file = None
49      options_file = False  
50    ############################### Build options ################################
51  if not options_file :  
52     import socket  default_prefix='/usr'
53     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     tmp = os.path.join("scons",hostname+"_options.py")  lapack_flavours=('none', 'clapack', 'mkl')
55    
56     if os.path.isfile(tmp) :  vars = Variables(options_file, ARGUMENTS)
57        options_file = tmp  vars.AddVariables(
58      PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59  IS_WINDOWS_PLATFORM = (os.name== "nt")    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61  # If you're not going to tell me then......    BoolVariable('verbose', 'Output full compile/link lines', False),
62  # FIXME: add one for the altix too.  # Compiler/Linker options
63  if not options_file :    ('cc', 'Path to C compiler', 'default'),
64     if IS_WINDOWS_PLATFORM :    ('cxx', 'Path to C++ compiler', 'default'),
65        options_file = "scons/windows_mscv71_options.py"    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66     else:    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67        options_file = "scons/linux_gcc_eg_options.py"    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
68      ('cc_extra', 'Extra C compiler flags', ''),
69  # and load it    ('cxx_extra', 'Extra C++ compiler flags', ''),
70  opts = Options(options_file, ARGUMENTS)    ('ld_extra', 'Extra linker flags', ''),
71  #================================================================    BoolVariable('werror','Treat compiler warnings as errors', True),
72  #    BoolVariable('debug', 'Compile with debug flags', False),
73  #   check if UMFPACK is installed on the system:    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74  #    ('omp_flags', 'OpenMP compiler flags', 'default'),
75  uf_root=None    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:  # Mandatory libraries
77     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78         uf_root=i    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79         print i," is used form ",tools_prefix  # Mandatory for tests
80         break    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81  if not uf_root==None:    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82     umf_path_default=os.path.join(tools_prefix,'include',uf_root)  # Optional libraries and options
83     umf_lib_path_default=os.path.join(tools_prefix,'lib')    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84     umf_libs_default=['umfpack']    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86     amd_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('netcdf', 'Enable netCDF file support', False),
87     amd_libs_default=['amd']    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89  else:    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90     umf_path_default=None    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91     umf_lib_path_default=None    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92     umf_libs_default=None    BoolVariable('papi', 'Enable PAPI', False),
93     amd_path_default=None    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
94     amd_lib_path_default=None    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95     amd_libs_default=None    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96     ufc_path_default=None    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97  #    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
100  #    python installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  #    ('lapack_libs', 'LAPACK libraries to link with', []),
108  #    boost installation:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
110  boost_path_default=os.path.join(tools_prefix,'include')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  boost_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112  boost_lib_default=['boost_python']    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113      ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114      BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115    # Advanced settings
116      #dudley_assemble_flags = -funroll-loops      to actually do something
117      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118      # To enable passing function pointers through python
119      BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120      # An option for specifying the compiler tools (see windows branch)
121      ('tools_names', 'Compiler tools to use', ['default']),
122      ('env_export', 'Environment variables to be passed to tools',[]),
123      EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125      # finer control over library building, intel aggressive global optimisation
126      # works with dynamic libraries on windows.
127      ('build_shared', 'Build dynamic libraries only', False),
128      ('sys_libs', 'Extra libraries to link with', []),
129      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130      ('SVN_VERSION', 'Do not use from options file', -2),
131      ('pythoncmd', 'which python to compile with','python'),
132      ('usepython3', 'Is this a python3 build? (experimental)', False),
133      ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134      ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135      ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136      BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
137    )
138    
139  #==========================================================================  ##################### Create environment and help text #######################
 #  
 #    check if netCDF is installed on the system:  
 #  
 netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')  
 netCDF_lib_path_default=os.path.join(tools_prefix,'lib')  
140    
141  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):  # Intel's compiler uses regular expressions improperly and emits a warning
142       useNetCDF_default='yes'  # about failing to find the compilers. This warning can be safely ignored.
      netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]  
 else:  
      useNetCDF_default='no'  
      netCDF_path_default=None  
      netCDF_lib_path_default=None  
      netCDF_libs_default=None  
143    
144  #==========================================================================  # PATH is needed so the compiler, linker and tools are found if they are not
145  #  # in default locations.
146  #  MPI:  env = Environment(tools = ['default'], options = vars,
147  #                    ENV = {'PATH': os.environ['PATH']})
148  if IS_WINDOWS_PLATFORM:                    
149     useMPI_default='no'  
150     mpi_path_default=None  #set the vars for clang
151     mpi_lib_path_default=None  def mkclang(env):
152     mpi_libs_default=[]    env['CC']='clang'
153     mpi_run_default=None    env['CXX']='clang++'
154  else:                    
155     useMPI_default='no'                    
156     mpi_root='/usr/local'  if env['tools_names'] != 'default':
157     mpi_path_default=os.path.join(mpi_root,'include')      zz=env['tools_names']
158     mpi_lib_path_default=os.path.join(mpi_root,'lib')      if 'clang' in zz:
159     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]          zz.remove('clang')
160     mpi_run_default='mpiexec -np 1'          zz.insert(0, mkclang)
161  #      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162  #==========================================================================                        ENV = {'PATH' : os.environ['PATH']})
163  #  
164  #    compile:  if options_file:
165  #      opts_valid=False
166  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'      if 'escript_opts_version' in env.Dictionary() and \
167  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'          int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168  cxx_flags_default='--no-warn -ansi'              opts_valid=True
169  cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'      if opts_valid:
170            print("Using options in %s." % options_file)
171  #==============================================================================================          else:
172  # Default options and options help text          print("\nOptions file %s" % options_file)
173  # These are defaults and can be overridden using command line arguments or an options file.          print("is outdated! Please update the file by examining one of the TEMPLATE")
174  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175  # DO NOT CHANGE THEM HERE          Exit(1)
176  opts.AddOptions(  
177  # Where to install esys stuff  # Generate help text (scons -h)
178    ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  Help(vars.GenerateHelpText(env))
179    ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
180    ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  # Check for superfluous options
181    ('exinstall', 'where the esys examples will be installed',             dir_examples),  if len(vars.UnknownVariables())>0:
182    ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),      for k in vars.UnknownVariables():
183    ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),          print("Unknown option '%s'" % k)
184    ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),      Exit(1)
185    ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
186    ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  #################### Make sure install directories exist #####################
187    ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
188    ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  prefix=Dir(env['prefix']).abspath
190    ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  env['incinstall'] = os.path.join(prefix, 'include')
191    ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  env['bininstall'] = os.path.join(prefix, 'bin')
192    ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  env['libinstall'] = os.path.join(prefix, 'lib')
193    ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  env['pyinstall']  = os.path.join(prefix, 'esys')
194    ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  if not os.path.isdir(env['bininstall']):
195  # Compilation options      os.makedirs(env['bininstall'])
196    BoolOption('dodebug', 'Do you want a debug build?', 'no'),  if not os.path.isdir(env['libinstall']):
197    BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),      os.makedirs(env['libinstall'])
198    ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  if not os.path.isdir(env['pyinstall']):
199    ('cc_defines','C/C++ defines to use', None),      os.makedirs(env['pyinstall'])
200    ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
201    ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  env.Append(CPPPATH = [env['incinstall']])
202    ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  env.Append(LIBPATH = [env['libinstall']])
203    ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
204    ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  ################# Fill in compiler options if not set above ##################
205    ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
206    ('ar_flags', 'Static library archiver flags to use', None),  if env['cc'] != 'default': env['CC']=env['cc']
207    ('sys_libs', 'System libraries to link with', None),  if env['cxx'] != 'default': env['CXX']=env['cxx']
208    ('tar_flags','flags for zip files','-c -z'),  
209  # MKL  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210    PathOption('mkl_path', 'Path to MKL includes', None),  # runtimes (icc does not)
211    PathOption('mkl_lib_path', 'Path to MKL libs', None),  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212    ('mkl_libs', 'MKL libraries to link with', None),      env['LINK'] = env['CXX']
213  # SCSL  
214    PathOption('scsl_path', 'Path to SCSL includes', None),  # default compiler/linker options
215    PathOption('scsl_lib_path', 'Path to SCSL libs', None),  cc_flags = ''
216    ('scsl_libs', 'SCSL libraries to link with', None),  cc_optim = ''
217    ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  cc_debug = ''
218  # UMFPACK  omp_flags = ''
219    PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  omp_ldflags = ''
220    PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  fatalwarning = '' # switch to turn warnings into errors
221    PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  sysheaderopt = '' # how to indicate that a header is a system header
222    ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
223  # AMD (used by UMFPACK)  # env['CC'] might be a full path
224    PathOption('amd_path', 'Path to AMD includes', amd_path_default),  cc_name=os.path.basename(env['CC'])
225    PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
226    ('amd_libs', 'AMD libraries to link with', amd_libs_default),  if cc_name == 'icc':
227  # ParMETIS      # Intel compiler
228    ('parmetis_path', 'Path to ParMETIS includes', ''),      # #1875: offsetof applied to non-POD types is nonstandard (in boost)
229    ('parmetis_lib_path', 'Path to ParMETIS library', ''),      cc_flags    = "-std=c99 -fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1"
230    ('parmetis_lib', 'ParMETIS library to link with', []),      cc_optim    = "-O3 -ftz -fno-alias -ipo -xHost"
231  # TRILINOS      cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
232    PathOption('trilinos_path', 'Path to TRILINOS includes', None),      omp_flags   = "-openmp"
233    PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),      omp_ldflags = "-openmp -openmp_report=1"
234    ('trilinos_libs', 'TRILINOS libraries to link with', None),      fatalwarning = "-Werror"
235  # BLAS  elif cc_name[:3] == 'gcc':
236    PathOption('blas_path', 'Path to BLAS includes', None),      # GNU C on any system
237    PathOption('blas_lib_path', 'Path to BLAS libs', None),      # note that -ffast-math is not used because it breaks isnan(),
238    ('blas_libs', 'BLAS libraries to link with', None),      # see mantis #691
239  # netCDF      cc_flags     = "-pedantic -Wall -fPIC -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
240    ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),      cc_optim     = "-O3"
241    PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),      cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
242    PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),      omp_flags    = "-fopenmp"
243    ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),      omp_ldflags  = "-fopenmp"
244  # Python      fatalwarning = "-Werror"
245  # locations of include files for python      sysheaderopt = "-isystem"
246  # FIXME: python_path should be python_inc_path and the same for boost etc.  elif cc_name == 'cl':
247    PathOption('python_path', 'Path to Python includes', python_path_default),      # Microsoft Visual C on Windows
248    PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),      cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
249    ('python_lib', 'Python libraries to link with', python_lib_default),      cc_optim     = "/O2 /Op /W3"
250    ('python_cmd', 'Python command', 'python'),      cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
251  # Boost      fatalwarning = "/WX"
252    PathOption('boost_path', 'Path to Boost includes', boost_path_default),  elif cc_name == 'icl':
253    PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),      # Intel C on Windows
254    ('boost_lib', 'Boost libraries to link with', boost_lib_default),      cc_flags     = '/EHsc /GR /MD'
255  # Doc building      cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
256  #  PathOption('doxygen_path', 'Path to Doxygen executable', None),      cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
257  #  PathOption('epydoc_path', 'Path to Epydoc executable', None),      omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
258  # PAPI      omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
259    PathOption('papi_path', 'Path to PAPI includes', None),  
260    PathOption('papi_lib_path', 'Path to PAPI libs', None),  # set defaults if not otherwise specified
261    ('papi_libs', 'PAPI libraries to link with', None),  if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
262    ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),  if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
263  # MPI  if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
264    BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
265    ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
266    PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
267    ('mpi_run', 'mpirun name' , mpi_run_default),  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
268    PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
269    ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  
270  )  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
271  #=================================================================================================  
272  #  if env['usepython3']:
273  #   Note: On the Altix the intel compilers are not automatically      env.Append(CPPDEFINES=['ESPYTHON3'])
274  #   detected by scons intelc.py script. The Altix has a different directory  
275  #   path and in some locations the "modules" facility is used to support  # set up the autolazy values
276  #   multiple compiler versions. This forces the need to import the users PATH  if env['forcelazy'] == 'on':
277  #   environment which isn't the "scons way"      env.Append(CPPDEFINES=['FAUTOLAZYON'])
278  #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)  elif env['forcelazy'] == 'off':
279  #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix      env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
280  #  
281    # set up the collective resolve values
282  if IS_WINDOWS_PLATFORM:  if env['forcecollres'] == 'on':
283        env = Environment(tools = ['default', 'msvc'], options = opts)      env.Append(CPPDEFINES=['FRESCOLLECTON'])
284        #env = Environment(tools = ['default', 'intelc'], options = opts)  elif env['forcecollres'] == 'off':
285  else:      env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
286     if os.uname()[4]=='ia64':  
287        env = Environment(tools = ['default', 'intelc'], options = opts)  # allow non-standard C if requested
288        if env['CXX'] == 'icpc':  if env['iknowwhatimdoing']:
289           env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py      env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
290     else:  
291        env = Environment(tools = ['default'], options = opts)  # Disable OpenMP if no flags provided
292  Help(opts.GenerateHelpText(env))  if env['openmp'] and env['omp_flags'] == '':
293       print("OpenMP requested but no flags provided - disabling OpenMP!")
294  if env['bounds_check']:     env['openmp'] = False
295     env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])  
296     env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])  if env['openmp']:
297     bounds_check = env['bounds_check']      env.Append(CCFLAGS = env['omp_flags'])
298        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
299    else:
300        env['omp_flags']=''
301        env['omp_ldflags']=''
302    
303    # add debug/non-debug compiler flags
304    if env['debug']:
305        env.Append(CCFLAGS = env['cc_debug'])
306    else:
307        env.Append(CCFLAGS = env['cc_optim'])
308    
309    # always add cc_flags
310    env.Append(CCFLAGS = env['cc_flags'])
311    
312    # add system libraries
313    env.AppendUnique(LIBS = env['sys_libs'])
314    
315    
316    global_revision=ARGUMENTS.get('SVN_VERSION', None)
317    if global_revision:
318        global_revision = re.sub(':.*', '', global_revision)
319        global_revision = re.sub('[^0-9]', '', global_revision)
320        if global_revision == '': global_revision='-2'
321    else:
322      # Get the global Subversion revision number for the getVersion() method
323      try:
324        global_revision = os.popen('svnversion -n .').read()
325        global_revision = re.sub(':.*', '', global_revision)
326        global_revision = re.sub('[^0-9]', '', global_revision)
327        if global_revision == '': global_revision='-2'
328      except:
329        global_revision = '-1'
330    env['svn_revision']=global_revision
331    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
332    
333    if IS_WINDOWS:
334        if not env['build_shared']:
335            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
336            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
337    
338    ###################### Copy required environment vars ########################
339    
340    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
341    if IS_WINDOWS:
342        LD_LIBRARY_PATH_KEY='PATH'
343        env['ENV']['LD_LIBRARY_PATH']=''
344  else:  else:
345     bounds_check = 0      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
346    
347  #=================================================================================================  # the following env variables are exported for the unit tests
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
348    
349  env.PrependENVPath('PYTHONPATH', source_root)  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
350        try:
351            env['ENV'][key] = os.environ[key]
352        except KeyError:
353            env['ENV'][key] = 1
354    
355  try:  env_export=env['env_export']
356     omp_num_threads = os.environ['OMP_NUM_THREADS']  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
357  except KeyError:  
358     omp_num_threads = 1  for key in set(env_export):
359  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads      try:
360            env['ENV'][key] = os.environ[key]
361        except KeyError:
362            pass
363    
364  try:  try:
365     path = os.environ['PATH']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env['ENV']['PATH'] = path  
366  except KeyError:  except KeyError:
367     omp_num_threads = 1      pass
   
 env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  
368    
369    # these shouldn't be needed
370    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
371    #    try:
372    #        env['ENV'][key] = os.environ[key]
373    #    except KeyError:
374    #        pass
375    
 # Copy some variables from the system environment to the build environment  
376  try:  try:
377     env['ENV']['DISPLAY'] = os.environ['DISPLAY']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
378  except KeyError:  except KeyError:
379     pass      pass
380    
381  try:  ######################## Add some custom builders ############################
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
382    
383  try:  if env['pythoncmd']=='python':
384     tmp = os.environ['LD_LIBRARY_PATH']      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
385     print tmp  else:
386     env['ENV']['LD_LIBRARY_PATH'] = tmp      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
387  env.Append(BUILDERS = {'PyCompile' : py_builder});  env.Append(BUILDERS = {'PyCompile' : py_builder});
388    
389  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
390  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
391    
392  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
393  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
394    
395  # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
396  try:  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
   
 try:  
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
   
 try:  
    tar_flags = env['tar_flags']  
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
   
 try:  
    exinstall = env['exinstall']  
 except KeyError:  
    exinstall = None  
 try:  
    sys_libinstall = env['sys_libinstall']  
 except KeyError:  
    sys_libinstall = None  
 try:  
    sys_pyinstall = env['sys_pyinstall']  
 except KeyError:  
    sys_pyinstall = None  
 try:  
    sys_exinstall = env['sys_exinstall']  
 except KeyError:  
    sys_exinstall = None  
   
 # ====================== debugging ===================================  
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
397    
398  # === switch on omp ===================================================  ############################ Dependency checks ###############################
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
399    
400  try:  # Create a Configure() environment to check for compilers and python
401    omp_flags_debug = env['omp_flags_debug']  conf = Configure(env.Clone())
 except KeyError:  
   omp_flags_debug = ''  
402    
403  # ========= use mpi? =====================================================  ######## Test that the compilers work
404  try:  
405     useMPI = env['useMPI']  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
406  except KeyError:      if not conf.CheckCC():
407     useMPI = None          print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
408  # ========= set compiler flags ===========================================          Exit(1)
409        if not conf.CheckCXX():
410            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
411            Exit(1)
412    else:
413        if not conf.CheckFunc('printf', language='c'):
414            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
415            Exit(1)
416        if not conf.CheckFunc('printf', language='c++'):
417            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
418            Exit(1)
419    
420    if conf.CheckFunc('gethostname'):
421        conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
422    
423    ######## Python headers & library (required)
424    
425    #First we check to see if the config file has specified
426    ##Where to find the filae. Ideally, this should be automatic
427    #But we need to deal with the case where python is not in its INSTALL
428    #Directory
429    # Use the python scons is running
430    if env['pythoncmd']=='python':
431        python_inc_path=sysconfig.get_python_inc()
432        if IS_WINDOWS:
433            python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
434        elif env['PLATFORM']=='darwin':
435            python_lib_path=sysconfig.get_config_var('LIBPL')
436        else:
437            python_lib_path=sysconfig.get_config_var('LIBDIR')
438    
439        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
440        if IS_WINDOWS:
441            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
442        else:
443            python_libs=['python'+sysconfig.get_python_version()]
444    
445    #if we want to use a python other than the one scons is running
446    else:
447        initstring='from __future__ import print_function;from distutils import sysconfig;'
448        if env['pythonlibname']!='':
449            python_libs=env['pythonlibname']
450        else:   # work it out by calling python    
451            if IS_WINDOWS:
452                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
453            else:
454                cmd='print("python"+sysconfig.get_python_version())'
455            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
456            python_libs=p.stdout.readline()
457            if env['usepython3']:       # This is to convert unicode str into py2 string
458                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
459            p.wait()
460            python_libs=python_libs.strip()
461    
 if dodebug:  
     try:  
       flags = env['cc_flags_debug'] + ' ' + omp_flags_debug  
       env.Append(CCFLAGS = flags)  
     except KeyError:  
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= Remember what options were used in the compile =====================================  
 if not IS_WINDOWS_PLATFORM:  
   env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*")  
   if dodebug:       env.Execute("touch " + libinstall + "/Compiled.with.debug")  
   if useMPI:        env.Execute("touch " + libinstall + "/Compiled.with.mpi")  
   if omp_flags != '':   env.Execute("touch " + libinstall + "/Compiled.with.OpenMP")  
   if bounds_check:  env.Execute("touch " + libinstall + "/Compiled.with.bounds_check")  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
462        
463     try:      # Now we know whether we are using python3 or not
464        scsl_libs = env['scsl_libs']      p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
465     except KeyError:      python_inc_path=p.stdout.readline()
466        scsl_libs = [ ]      if env['usepython3']:
467             python_inc_path=python_inc_path.encode()
468  else:      p.wait()  
469      scsl_libs =  []      python_inc_path=python_inc_path.strip()
470        if IS_WINDOWS:
471  # ============= set TRILINOS (but only with MPI) =====================================          cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
472  if useMPI:      elif env['PLATFORM']=='darwin':
473     try:          cmd="sysconfig.get_config_var(\"LIBPL\")"
474        includes = env['trilinos_path']      else:
475        env.Append(CPPPATH = [includes,])          cmd="sysconfig.get_config_var(\"LIBDIR\")"
476     except KeyError:  
477        pass      p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
478        python_lib_path=p.stdout.readline()
479     try:      if env['usepython3']:
480        lib_path = env['trilinos_lib_path']          python_lib_path=python_lib_path.decode()
481        env.Append(LIBPATH = [lib_path,])      p.wait()
482     except KeyError:      python_lib_path=python_lib_path.strip()
483        pass  
484    #Check for an override from the config file.
485     try:  #Ideally, this should be automatic
486        trilinos_libs = env['trilinos_libs']  #But we need to deal with the case where python is not in its INSTALL
487     except KeyError:  #Directory
488        trilinos_libs = []  if env['pythonlibpath']!='':
489  else:      python_lib_path=env['pythonlibpath']
490       trilinos_libs = []  
491    if env['pythonincpath']!='':
492        python_inc_path=env['pythonincpath']
493  # ============= set umfpack (but only without MPI) =====================================  
494  umf_libs=[ ]  
495  if not useMPI:  if sysheaderopt == '':
496     try:      conf.env.AppendUnique(CPPPATH = [python_inc_path])
497        includes = env['umf_path']  else:
498        env.Append(CPPPATH = [includes,])      conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
499     except KeyError:  
500        pass  conf.env.AppendUnique(LIBPATH = [python_lib_path])
501    conf.env.AppendUnique(LIBS = python_libs)
502     try:  # The wrapper script needs to find the libs
503        lib_path = env['umf_lib_path']  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
504        env.Append(LIBPATH = [lib_path,])  
505     except KeyError:  if not conf.CheckCHeader('Python.h'):
506        pass      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
507        Exit(1)
508     try:  if not conf.CheckFunc('Py_Exit'):
509        umf_libs = env['umf_libs']      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
510        umf_libs+=umf_libs      Exit(1)
511     except KeyError:  
512        pass  ## reuse conf to check for numpy header (optional)
513    if env['usepython3']:
514     try:      # FIXME: This is until we can work out how to make the checks in python 3
515        includes = env['ufc_path']      conf.env['numpy_h']=False
516        env.Append(CPPPATH = [includes,])  else:
517     except KeyError:      if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
518        pass          conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
519            conf.env['numpy_h']=True
520     try:      else:
521        includes = env['amd_path']          conf.env['numpy_h']=False
522        env.Append(CPPPATH = [includes,])  
523     except KeyError:  # Commit changes to environment
524        pass  env = conf.Finish()
525    
526     try:  ######## boost (required)
527        lib_path = env['amd_lib_path']  
528        env.Append(LIBPATH = [lib_path,])  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
529     except KeyError:  if sysheaderopt == '':
530        pass      env.AppendUnique(CPPPATH = [boost_inc_path])
531    else:
532     try:      # This is required because we can't -isystem /usr/include since it breaks
533        amd_libs = env['amd_libs']      # std includes
534        umf_libs+=amd_libs      if os.path.normpath(boost_inc_path) == '/usr/include':
535     except KeyError:          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
536        pass      else:
537            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
538  # ============= set TRILINOS (but only with MPI) =====================================  
539  if useMPI:  env.AppendUnique(LIBPATH = [boost_lib_path])
540     try:  env.AppendUnique(LIBS = env['boost_libs'])
541        includes = env['trilinos_path']  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
542        env.Append(CPPPATH = [includes,])  
543     except KeyError:  ######## numpy (required)
544        pass  
545    if not detectModule(env, 'numpy'):
546     try:      print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
547        lib_path = env['trilinos_lib_path']      Exit(1)
548        env.Append(LIBPATH = [lib_path,])  
549     except KeyError:  ######## CppUnit (required for tests)
550        pass  
551    try:
552     try:      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
553        trilinos_libs = env['trilinos_libs']      env.AppendUnique(CPPPATH = [cppunit_inc_path])
554     except KeyError:      env.AppendUnique(LIBPATH = [cppunit_lib_path])
555        trilinos_libs = []      env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
556  else:      env['cppunit']=True
557       trilinos_libs = []  except:
558        env['cppunit']=False
 # ============= set blas =====================================  
 try:  
    includes = env['blas_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
559    
560  try:  ######## sympy (optional)
    lib_path = env['blas_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
561    
562  try:  if detectModule(env, 'sympy'):
563     blas_libs = env['blas_libs']      env['sympy'] = True
 except KeyError:  
    blas_libs = [ ]  
   
 # ========== netcdf ====================================  
 try:  
    useNetCDF = env['useNetCDF']  
 except KeyError:  
    useNetCDF = 'yes'  
    pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
564  else:  else:
565     print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
566     netCDF_libs=[ ]      env['sympy'] = False
567    
568  # ====================== boost ======================================  ######## pyproj (optional)
569  try:  
570     includes = env['boost_path']  if detectModule(env, 'pyproj'):
571     env.Append(CPPPATH = [includes,])      env['pyproj'] = True
572  except KeyError:  else:
573     pass      print("Cannot import pyproj. Inversions may not work.")
574  try:      env['pyproj'] = False
575     lib_path = env['boost_lib_path']  
576     env.Append(LIBPATH = [lib_path,])  ######## netCDF (optional)
577     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
578     if IS_WINDOWS_PLATFORM :  netcdf_inc_path=''
579        env.PrependENVPath('PATH', lib_path)  netcdf_lib_path=''
580  except KeyError:  if env['netcdf']:
581     pass      netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
582  try:      env.AppendUnique(CPPPATH = [netcdf_inc_path])
583     boost_lib = env['boost_lib']      env.AppendUnique(LIBPATH = [netcdf_lib_path])
584  except KeyError:      env.AppendUnique(LIBS = env['netcdf_libs'])
585     boost_lib = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
586  # ====================== python ======================================      env.Append(CPPDEFINES = ['USE_NETCDF'])
587  try:  
588     includes = env['python_path']  ######## PAPI (optional)
589     env.Append(CPPPATH = [includes,])  
590  except KeyError:  papi_inc_path=''
591     pass  papi_lib_path=''
592  try:  if env['papi']:
593     lib_path = env['python_lib_path']      papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
594     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      env.AppendUnique(CPPPATH = [papi_inc_path])
595     env.Append(LIBPATH = [lib_path,])      env.AppendUnique(LIBPATH = [papi_lib_path])
596  except KeyError:      env.AppendUnique(LIBS = env['papi_libs'])
597     pass      env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
598  try:      env.Append(CPPDEFINES = ['BLOCKPAPI'])
599     python_lib = env['python_lib']  
600  except KeyError:  ######## MKL (optional)
601     python_lib = None  
602  # =============== documentation =======================================  mkl_inc_path=''
603  try:  mkl_lib_path=''
604     doxygen_path = env['doxygen_path']  if env['mkl']:
605  except KeyError:      mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
606     doxygen_path = None      env.AppendUnique(CPPPATH = [mkl_inc_path])
607  try:      env.AppendUnique(LIBPATH = [mkl_lib_path])
608     epydoc_path = env['epydoc_path']      env.AppendUnique(LIBS = env['mkl_libs'])
609  except KeyError:      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
610     epydoc_path = None      env.Append(CPPDEFINES = ['MKL'])
611  # =============== ParMETIS =======================================  
612  try:  ######## UMFPACK (optional)
613     parmetis_path = env['parmetis_path']  
614     parmetis_lib_path = env['parmetis_lib_path']  umfpack_inc_path=''
615     parmetis_lib = env['parmetis_lib']  umfpack_lib_path=''
616  except KeyError:  if env['umfpack']:
617     parmetis_path = ''      umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
618     parmetis_lib_path = ''      env.AppendUnique(CPPPATH = [umfpack_inc_path])
619     parmetis_lib = ''      env.AppendUnique(LIBPATH = [umfpack_lib_path])
620        env.AppendUnique(LIBS = env['umfpack_libs'])
621  if useMPI and os.path.isdir(parmetis_lib_path):      env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
622     env.Append(CPPDEFINES = [ 'PARMETIS' ])      env.Append(CPPDEFINES = ['UMFPACK'])
623     env.Append(CXXDEFINES = [ 'PARMETIS' ])  
624     env.Append(CPPPATH = [parmetis_path])  ######## LAPACK (optional)
625     env.Append(LIBPATH = [parmetis_lib_path])  
626     env.Append(LIBS = parmetis_lib)  if env['lapack']=='mkl' and not env['mkl']:
627  # =============== PAPI =======================================      print("mkl_lapack requires MKL!")
628  try:      Exit(1)
629     includes = env['papi_path']  
630     env.Append(CPPPATH = [includes,])  env['uselapack'] = env['lapack']!='none'
631  except KeyError:  lapack_inc_path=''
632     pass  lapack_lib_path=''
633  try:  if env['uselapack']:
634     lib_path = env['papi_lib_path']      header='clapack.h'
635     env.Append(LIBPATH = [lib_path,])      if env['lapack']=='mkl':
636  except KeyError:          env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
637     pass          header='mkl_lapack.h'
638  try:      lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
639     papi_libs = env['papi_libs']      env.AppendUnique(CPPPATH = [lapack_inc_path])
640  except KeyError:      env.AppendUnique(LIBPATH = [lapack_lib_path])
641     papi_libs = None      env.AppendUnique(LIBS = env['lapack_libs'])
642  # ============= set mpi =====================================      env.Append(CPPDEFINES = ['USE_LAPACK'])
643  if useMPI:  
644     env.Append(CPPDEFINES=['PASO_MPI',])  ######## Silo (optional)
645     try:  
646        includes = env['mpi_path']  silo_inc_path=''
647        env.Append(CPPPATH = [includes,])  silo_lib_path=''
648     except KeyError:  if env['silo']:
649        pass      silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
650     try:      env.AppendUnique(CPPPATH = [silo_inc_path])
651        lib_path = env['mpi_lib_path']      env.AppendUnique(LIBPATH = [silo_lib_path])
652        env.Append(LIBPATH = [lib_path,])      # Note that we do not add the libs since they are only needed for the
653        env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      # weipa library and tools.
654     except KeyError:      #env.AppendUnique(LIBS = [env['silo_libs']])
655        pass  
656     try:  ######## VSL random numbers (optional)
657        mpi_libs = env['mpi_libs']  if env['vsl_random']:
658     except KeyError:      env.Append(CPPDEFINES = ['MKLRANDOM'])
659        mpi_libs = []  
660    ######## VisIt (optional)
661     try:  
662        mpi_run = env['mpi_run']  visit_inc_path=''
663     except KeyError:  visit_lib_path=''
664        mpi_run = ''  if env['visit']:
665        visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
666     try:      env.AppendUnique(CPPPATH = [visit_inc_path])
667         mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']      env.AppendUnique(LIBPATH = [visit_lib_path])
668         env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  
669     except KeyError:  ######## MPI (optional)
670        pass  
671  else:  if env['mpi']=='no':
672    mpi_libs=[]      env['mpi']='none'
673    mpi_run = mpi_run_default  
674  # =========== zip files ===========================================  env['usempi'] = env['mpi']!='none'
675  try:  mpi_inc_path=''
676     includes = env['papi_path']  mpi_lib_path=''
677     env.Append(CPPPATH = [includes,])  if env['usempi']:
678  except KeyError:      mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
679     pass      env.AppendUnique(CPPPATH = [mpi_inc_path])
680  try:      env.AppendUnique(LIBPATH = [mpi_lib_path])
681     lib_path = env['papi_lib_path']      env.AppendUnique(LIBS = env['mpi_libs'])
682     env.Append(LIBPATH = [lib_path,])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
683  except KeyError:      env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
684     pass      # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
685  try:      # On the other hand MPT and OpenMPI don't define the latter so we have to
686     papi_libs = env['papi_libs']      # do that here
687  except KeyError:      if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
688     papi_libs = None          env.Append(CPPDEFINES = ['MPI_INCLUDED'])
689  try:  
690     papi_instrument_solver = env['papi_instrument_solver']  ######## BOOMERAMG (optional)
691  except KeyError:  
692     papi_instrument_solver = None  if env['mpi'] == 'none': env['boomeramg'] = False
693    
694    boomeramg_inc_path=''
695  # ============= and some helpers =====================================  boomeramg_lib_path=''
696  try:  if env['boomeramg']:
697     doxygen_path = env['doxygen_path']      boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
698  except KeyError:      env.AppendUnique(CPPPATH = [boomeramg_inc_path])
699     doxygen_path = None      env.AppendUnique(LIBPATH = [boomeramg_lib_path])
700  try:      env.AppendUnique(LIBS = env['boomeramg_libs'])
701     epydoc_path = env['epydoc_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
702  except KeyError:      env.Append(CPPDEFINES = ['BOOMERAMG'])
703     epydoc_path = None  
704  try:  ######## ParMETIS (optional)
705     src_zipfile = env.File(env['src_zipfile'])  
706  except KeyError:  if not env['usempi']: env['parmetis'] = False
707     src_zipfile = None  
708  try:  parmetis_inc_path=''
709     test_zipfile = env.File(env['test_zipfile'])  parmetis_lib_path=''
710  except KeyError:  if env['parmetis']:
711     test_zipfile = None      parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
712  try:      env.AppendUnique(CPPPATH = [parmetis_inc_path])
713     examples_zipfile = env.File(env['examples_zipfile'])      env.AppendUnique(LIBPATH = [parmetis_lib_path])
714  except KeyError:      env.AppendUnique(LIBS = env['parmetis_libs'])
715     examples_zipfile = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
716        env.Append(CPPDEFINES = ['USE_PARMETIS'])
717  try:  
718     src_tarfile = env.File(env['src_tarfile'])  ######## gmsh (optional, for tests)
719  except KeyError:  
720     src_tarfile = None  try:
721  try:      p=Popen(['gmsh', '-info'], stderr=PIPE)
722     test_tarfile = env.File(env['test_tarfile'])      _,e=p.communicate()
723  except KeyError:      if e.split().count("MPI"):
724     test_tarfile = None          env['gmsh']='m'
725  try:      else:
726     examples_tarfile = env.File(env['examples_tarfile'])          env['gmsh']='s'
727  except KeyError:  except OSError:
728     examples_tarfile = None      env['gmsh']=False
729    
730  try:  ######## PDFLaTeX (for documentation)
731     guide_pdf = env.File(env['guide_pdf'])  if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
732  except KeyError:      env['pdflatex']=True
733     guide_pdf = None  else:
734        env['pdflatex']=False
735  try:  
736     guide_html_index = env.File('index.htm',env['guide_html'])  ######################## Summarize our environment ###########################
737  except KeyError:  
738     guide_html_index = None  # keep some of our install paths first in the list for the unit tests
739    env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
740  try:  env.PrependENVPath('PYTHONPATH', prefix)
741     api_epydoc = env.Dir(env['api_epydoc'])  env['ENV']['ESCRIPT_ROOT'] = prefix
742  except KeyError:  
743     api_epydoc = None  if not env['verbose']:
744        env['CCCOMSTR'] = "Compiling $TARGET"
745  try:      env['CXXCOMSTR'] = "Compiling $TARGET"
746     api_doxygen = env.Dir(env['api_doxygen'])      env['SHCCCOMSTR'] = "Compiling $TARGET"
747  except KeyError:      env['SHCXXCOMSTR'] = "Compiling $TARGET"
748     api_doxygen = None      env['ARCOMSTR'] = "Linking $TARGET"
749        env['LINKCOMSTR'] = "Linking $TARGET"
750  try:      env['SHLINKCOMSTR'] = "Linking $TARGET"
751     svn_pipe = os.popen("svnversion -n .")      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
752     global_revision = svn_pipe.readlines()      env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
753     svn_pipe.close()      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
754     global_revision = re.sub(":.*", "", global_revision[0])      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
755     global_revision = re.sub("[^0-9]", "", global_revision)      #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
756  except:  
757     global_revision="-1"  print("")
758     print "Warning: unable to recover global revsion number."  print("*** Config Summary (see config.log and lib/buildvars for details) ***")
759  if global_revision == "": global_revision="0"  print("Escript/Finley revision %s"%global_revision)
760  print "Revision number is %s."%global_revision  print("  Install prefix:  %s"%env['prefix'])
761  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)  print("          Python:  %s"%sysconfig.PREFIX)
762    print("           boost:  %s"%env['boost_prefix'])
763  # Python install - esys __init__.py  print("           numpy:  YES")
764  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))  if env['usempi']:
765        print("             MPI:  YES (flavour: %s)"%env['mpi'])
766  # FIXME: exinstall and friends related to examples are not working.  else:
767  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])      print("             MPI:  DISABLED")
768    if env['uselapack']:
769  env.Default(build_target)      print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
770    else:
771  # Zipgets      print("          LAPACK:  DISABLED")
772  env.Alias('release_src',[ src_zipfile, src_tarfile ])  d_list=[]
773  env.Alias('release_tests',[ test_zipfile, test_tarfile])  e_list=[]
774  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])  for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','pyproj','silo','sympy','umfpack','visit','vsl_random':
775  env.Alias('examples_zipfile',examples_zipfile)      if env[i]: e_list.append(i)
776  env.Alias('examples_tarfile',examples_tarfile)      else: d_list.append(i)
777  env.Alias('api_epydoc',api_epydoc)  for i in e_list:
778  env.Alias('api_doxygen',api_doxygen)      print("%16s:  YES"%i)
779  env.Alias('guide_html_index',guide_html_index)  for i in d_list:
780  env.Alias('guide_pdf', guide_pdf)      print("%16s:  DISABLED"%i)
781  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])  if env['cppunit']:
782  env.Alias('release', ['release_src', 'release_tests', 'docs'])      print("         CppUnit:  FOUND")
783    else:
784  env.Alias('build_tests',build_target)    # target to build all C++ tests      print("         CppUnit:  NOT FOUND")
785  env.Alias('build_py_tests',build_target) # target to build all python tests  if env['gmsh']=='m':
786  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests      print("            gmsh:  FOUND, MPI-ENABLED")
787  env.Alias('run_tests', 'build_tests')   # target to run all C++ test  elif env['gmsh']=='s':
788  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests      print("            gmsh:  FOUND")
789  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests  else:
790        print("            gmsh:  NOT FOUND")
791    if env['numpy_h']:
792  # Allow sconscripts to see the env      print("   numpy headers:  FOUND")
793  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",  else:
794      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",      print("   numpy headers:  NOT FOUND")
795          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",  print("   vsl_random:  %s"%env['vsl_random'])
796          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])      
797    if ((fatalwarning != '') and (env['werror'])):
798  # End initialisation section      print("  Treating warnings as errors")
799  # Begin configuration section  else:
800  # adds this file and the scons option directore to the source tar      print("  NOT treating warnings as errors")
801  release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]  print("")
802  release_testfiles=[env.File('README_TESTS'),]  
803  env.Zip(src_zipfile, release_srcfiles)  ####################### Configure the subdirectories #########################
804  env.Zip(test_zipfile, release_testfiles)  
805  try:  from grouptest import *
806     env.Tar(src_tarfile, release_srcfiles)  
807     env.Tar(test_tarfile, release_testfiles)  TestGroups=[]
808  except AttributeError:  
809     pass  # keep an environment without warnings-as-errors
810  # Insert new components to be build here  dodgy_env=env.Clone()
811  # FIXME: might be nice to replace this verbosity with a list of targets and some  
812  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines  # now add warnings-as-errors flags. This needs to be done after configuration
813  # Third Party libraries  # because the scons test files have warnings in them
814  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  if ((fatalwarning != '') and (env['werror'])):
815  # C/C++ Libraries      env.Append(CCFLAGS = fatalwarning)
816  env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  
817  # bruce is removed for now as it doesn't really do anything  Export(
818  # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)    ['env',
819  env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)     'dodgy_env',
820  env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)     'IS_WINDOWS',
821  env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)     'TestGroups'
822  env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)    ]
823  env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  )
 env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  
 env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)  
 env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
 #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)  
824    
825    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
826    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
827    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
828    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
829    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
830    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
831    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
832    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
833    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
834    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
835    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
836    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
837    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
838    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
839    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
840    
841    
842    ######################## Populate the buildvars file #########################
843    
844    # remove obsolete file
845    if not env['usempi']:
846        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
847        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
848    
849    # Try to extract the boost version from version.hpp
850    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
851    boostversion='unknown'
852    try:
853        for line in boosthpp:
854            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
855            if ver:
856                boostversion=ver.group(1)
857    except StopIteration:
858        pass
859    boosthpp.close()
860    
861    
862    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
863    buildvars.write("svn_revision="+str(global_revision)+"\n")
864    buildvars.write("prefix="+prefix+"\n")
865    buildvars.write("cc="+env['CC']+"\n")
866    buildvars.write("cxx="+env['CXX']+"\n")
867    if env['pythoncmd']=='python':
868        buildvars.write("python="+sys.executable+"\n")
869        buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
870    else:
871        buildvars.write("python="+env['pythoncmd']+"\n")
872        p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
873        verstring=p.stdout.readline().strip()
874        p.wait()
875        buildvars.write("python_version="+verstring+"\n")
876    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
877    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
878    buildvars.write("boost_version="+boostversion+"\n")
879    buildvars.write("debug=%d\n"%int(env['debug']))
880    buildvars.write("openmp=%d\n"%int(env['openmp']))
881    buildvars.write("mpi=%s\n"%env['mpi'])
882    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
883    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
884    buildvars.write("lapack=%s\n"%env['lapack'])
885    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
886    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
887        buildvars.write("%s=%d\n"%(i, int(env[i])))
888        if env[i]:
889            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
890            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
891    buildvars.close()
892    
893    ################### Targets to build and install libraries ###################
894    
895    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
896    env.Alias('target_init', [target_init])
897    # delete buildvars upon cleanup
898    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
899    
900    # The headers have to be installed prior to build in order to satisfy
901    # #include <paso/Common.h>
902    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
903    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
904    
905    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
906    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
907    
908    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
909    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
910    
911    env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
912    env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
913    
914    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
915    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
916    
917    env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
918    env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
919    
920    env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
921    env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
922    
923    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
924    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
925    
926    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
927    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
928    
929    # Now gather all the above into some easy targets: build_all and install_all
930    build_all_list = []
931    build_all_list += ['build_esysUtils']
932    build_all_list += ['build_paso']
933    build_all_list += ['build_escript']
934    build_all_list += ['build_pasowrap']
935    build_all_list += ['build_dudley']
936    build_all_list += ['build_finley']
937    build_all_list += ['build_ripley']
938    build_all_list += ['build_weipa']
939    if not IS_WINDOWS: build_all_list += ['build_escriptreader']
940    if env['usempi']:   build_all_list += ['build_pythonMPI']
941    build_all_list += ['build_escriptconvert']
942    env.Alias('build_all', build_all_list)
943    
944    install_all_list = []
945    install_all_list += ['target_init']
946    install_all_list += ['install_esysUtils']
947    install_all_list += ['install_paso']
948    install_all_list += ['install_escript']
949    install_all_list += ['install_pasowrap']
950    install_all_list += ['install_dudley']
951    install_all_list += ['install_finley']
952    install_all_list += ['install_ripley']
953    install_all_list += ['install_weipa']
954    if not IS_WINDOWS: install_all_list += ['install_escriptreader']
955    install_all_list += ['install_downunder_py']
956    install_all_list += ['install_modellib_py']
957    install_all_list += ['install_pycad_py']
958    if env['usempi']:   install_all_list += ['install_pythonMPI']
959    install_all_list += ['install_escriptconvert']
960    env.Alias('install_all', install_all_list)
961    
962    # Default target is install
963    env.Default('install_all')
964    
965    ################## Targets to build and run the test suite ###################
966    
967    if not env['cppunit']:
968        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
969        env.Alias('run_tests', test_msg)
970    env.Alias('run_tests', ['install_all'])
971    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
972    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
973    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
974    
975    ##################### Targets to build the documentation #####################
976    
977    env.Alias('basedocs', ['examples_tarfile', 'examples_zipfile', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
978    env.Alias('docs', ['basedocs', 'sphinxdoc'])
979    env.Alias('release_prep', ['docs', 'install_all'])
980    env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
981    
982    
983    # The test scripts are always generated, this target allows us to
984    # generate the testscripts without doing a full build
985    env.Alias('testscripts',[])
986    
987  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  if not IS_WINDOWS:
988  syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)      try:
989            utest=open('utest.sh','w')
990            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
991            for tests in TestGroups:
992                utest.write(tests.makeString())
993            utest.close()
994            Execute(Chmod('utest.sh', 0o755))
995            print("Generated utest.sh.")
996            # This version contains only python tests - I want this to be usable
997            # From a binary only install if you have the test files
998            utest=open('itest.sh','w')
999            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
1000            for tests in TestGroups:
1001              if tests.exec_cmd=='$PYTHONRUNNER ':
1002                utest.write(tests.makeString())
1003            utest.close()
1004            Execute(Chmod('itest.sh', 0o755))
1005            print("Generated itest.sh.")        
1006        except IOError:
1007            print("Error attempting to write unittests file.")
1008            Exit(1)
1009    
1010        # delete utest.sh upon cleanup
1011        env.Clean('target_init', 'utest.sh')
1012        env.Clean('target_init', 'itest.sh')
1013    
1014        # Make sure that the escript wrapper is in place
1015        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1016            print("Copying escript wrapper.")
1017            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1018    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1459  
changed lines
  Added in v.4216

  ViewVC Help
Powered by ViewVC 1.1.26