/[escript]/branches/diaplayground/SConstruct
ViewVC logotype

Diff of /branches/diaplayground/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1459 by ksteube, Thu Mar 27 01:49:10 2008 UTC revision 4173 by caltinay, Wed Jan 30 03:05:01 2013 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2013 by University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 # We may also need to know where python's site-packages subdirectory lives  
 python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', sys.prefix )  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development since 2012 by School of Earth Sciences
12     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
13     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
14  else:  
15     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
16     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
17    
18  sys_dir_examples = prefix+"/share/doc/esys"  import sys, os, platform, re
19    from distutils import sysconfig
20  source_root = Dir('#.').abspath  from site_init import *
21    from subprocess import PIPE, Popen
22  dir_packages = os.path.join(source_root,"esys")  
23  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
24  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26  print "Source root is : ",source_root  
27  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
28  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
29  print " Default example local  installation:    ", dir_examples  
30  print "Install prefix is: ", prefix  ########################## Determine options file ############################
31  print " Default packages system installation:   ", sys_dir_packages  # 1. command line
32  print " Default library system installation     ", sys_dir_libraries  # 2. scons/<hostname>_options.py
33  print " Default example system installation:    ", sys_dir_examples  # 3. name as part of a cluster
34    options_file=ARGUMENTS.get('options_file', None)
35  #==============================================================================================      if not options_file:
36        ext_dir = os.path.join(os.getcwd(), 'scons')
37  # Default options and options help text      hostname = platform.node().split('.')[0]
38  # These are defaults and can be overridden using command line arguments or an options file.      for name in hostname, effectiveName(hostname):
39  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40  # DO NOT CHANGE THEM HERE          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41  # Where to install?          if os.path.isfile(options_file): break
42  #==============================================================================================      
43  #      if not os.path.isfile(options_file):
44  #    get the options file if present:      print("\nWARNING:\nOptions file %s" % options_file)
45  #      print("not found! Default options will be used which is most likely suboptimal.")
46  options_file = ARGUMENTS.get('options_file','')      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47        print("subdirectory and customize it to your needs.\n")
48  if not os.path.isfile(options_file) :      options_file = None
49      options_file = False  
50    ############################### Build options ################################
51  if not options_file :  
52     import socket  default_prefix='/usr'
53     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     tmp = os.path.join("scons",hostname+"_options.py")  lapack_flavours=('none', 'clapack', 'mkl')
55    
56     if os.path.isfile(tmp) :  vars = Variables(options_file, ARGUMENTS)
57        options_file = tmp  vars.AddVariables(
58      PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59  IS_WINDOWS_PLATFORM = (os.name== "nt")    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61  # If you're not going to tell me then......    BoolVariable('verbose', 'Output full compile/link lines', False),
62  # FIXME: add one for the altix too.  # Compiler/Linker options
63  if not options_file :    ('cc', 'Path to C compiler', 'default'),
64     if IS_WINDOWS_PLATFORM :    ('cxx', 'Path to C++ compiler', 'default'),
65        options_file = "scons/windows_mscv71_options.py"    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66     else:    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67        options_file = "scons/linux_gcc_eg_options.py"    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
68      ('cc_extra', 'Extra C compiler flags', ''),
69  # and load it    ('cxx_extra', 'Extra C++ compiler flags', ''),
70  opts = Options(options_file, ARGUMENTS)    ('ld_extra', 'Extra linker flags', ''),
71  #================================================================    BoolVariable('werror','Treat compiler warnings as errors', True),
72  #    BoolVariable('debug', 'Compile with debug flags', False),
73  #   check if UMFPACK is installed on the system:    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74  #    ('omp_flags', 'OpenMP compiler flags', 'default'),
75  uf_root=None    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:  # Mandatory libraries
77     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78         uf_root=i    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79         print i," is used form ",tools_prefix  # Mandatory for tests
80         break    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81  if not uf_root==None:    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82     umf_path_default=os.path.join(tools_prefix,'include',uf_root)  # Optional libraries and options
83     umf_lib_path_default=os.path.join(tools_prefix,'lib')    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84     umf_libs_default=['umfpack']    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86     amd_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('netcdf', 'Enable netCDF file support', False),
87     amd_libs_default=['amd']    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89  else:    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90     umf_path_default=None    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91     umf_lib_path_default=None    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92     umf_libs_default=None    BoolVariable('papi', 'Enable PAPI', False),
93     amd_path_default=None    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
94     amd_lib_path_default=None    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95     amd_libs_default=None    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96     ufc_path_default=None    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97  #    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
100  #    python installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  #    ('lapack_libs', 'LAPACK libraries to link with', []),
108  #    boost installation:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
110  boost_path_default=os.path.join(tools_prefix,'include')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  boost_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112  boost_lib_default=['boost_python']    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113      ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114      BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115    # Advanced settings
116      #dudley_assemble_flags = -funroll-loops      to actually do something
117      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118      # To enable passing function pointers through python
119      BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120      # An option for specifying the compiler tools (see windows branch)
121      ('tools_names', 'Compiler tools to use', ['default']),
122      ('env_export', 'Environment variables to be passed to tools',[]),
123      EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125      # finer control over library building, intel aggressive global optimisation
126      # works with dynamic libraries on windows.
127      ('build_shared', 'Build dynamic libraries only', False),
128      ('sys_libs', 'Extra libraries to link with', []),
129      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130      ('SVN_VERSION', 'Do not use from options file', -2),
131      ('pythoncmd', 'which python to compile with','python'),
132      ('usepython3', 'Is this a python3 build? (experimental)', False),
133      ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134      ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135      ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136      BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
137    )
138    
139  #==========================================================================  ##################### Create environment and help text #######################
 #  
 #    check if netCDF is installed on the system:  
 #  
 netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')  
 netCDF_lib_path_default=os.path.join(tools_prefix,'lib')  
140    
141  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):  # Intel's compiler uses regular expressions improperly and emits a warning
142       useNetCDF_default='yes'  # about failing to find the compilers. This warning can be safely ignored.
      netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]  
 else:  
      useNetCDF_default='no'  
      netCDF_path_default=None  
      netCDF_lib_path_default=None  
      netCDF_libs_default=None  
143    
144  #==========================================================================  # PATH is needed so the compiler, linker and tools are found if they are not
145  #  # in default locations.
146  #  MPI:  env = Environment(tools = ['default'], options = vars,
147  #                    ENV = {'PATH': os.environ['PATH']})
148  if IS_WINDOWS_PLATFORM:                    
149     useMPI_default='no'  
150     mpi_path_default=None  #set the vars for clang
151     mpi_lib_path_default=None  def mkclang(env):
152     mpi_libs_default=[]    env['CC']='clang'
153     mpi_run_default=None    env['CXX']='clang++'
154  else:                    
155     useMPI_default='no'                    
156     mpi_root='/usr/local'  if env['tools_names'] != 'default':
157     mpi_path_default=os.path.join(mpi_root,'include')      zz=env['tools_names']
158     mpi_lib_path_default=os.path.join(mpi_root,'lib')      if 'clang' in zz:
159     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]          zz.remove('clang')
160     mpi_run_default='mpiexec -np 1'          zz.insert(0, mkclang)
161  #      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162  #==========================================================================                        ENV = {'PATH' : os.environ['PATH']})
163  #  
164  #    compile:  if options_file:
165  #      opts_valid=False
166  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'      if 'escript_opts_version' in env.Dictionary() and \
167  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'          int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168  cxx_flags_default='--no-warn -ansi'              opts_valid=True
169  cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'      if opts_valid:
170            print("Using options in %s." % options_file)
171  #==============================================================================================          else:
172  # Default options and options help text          print("\nOptions file %s" % options_file)
173  # These are defaults and can be overridden using command line arguments or an options file.          print("is outdated! Please update the file by examining one of the TEMPLATE")
174  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175  # DO NOT CHANGE THEM HERE          Exit(1)
176  opts.AddOptions(  
177  # Where to install esys stuff  # Generate help text (scons -h)
178    ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  Help(vars.GenerateHelpText(env))
179    ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
180    ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  # Check for superfluous options
181    ('exinstall', 'where the esys examples will be installed',             dir_examples),  if len(vars.UnknownVariables())>0:
182    ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),      for k in vars.UnknownVariables():
183    ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),          print("Unknown option '%s'" % k)
184    ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),      Exit(1)
185    ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
186    ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  #################### Make sure install directories exist #####################
187    ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
188    ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  prefix=Dir(env['prefix']).abspath
190    ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  env['incinstall'] = os.path.join(prefix, 'include')
191    ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  env['bininstall'] = os.path.join(prefix, 'bin')
192    ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  env['libinstall'] = os.path.join(prefix, 'lib')
193    ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  env['pyinstall']  = os.path.join(prefix, 'esys')
194    ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  if not os.path.isdir(env['bininstall']):
195  # Compilation options      os.makedirs(env['bininstall'])
196    BoolOption('dodebug', 'Do you want a debug build?', 'no'),  if not os.path.isdir(env['libinstall']):
197    BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),      os.makedirs(env['libinstall'])
198    ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  if not os.path.isdir(env['pyinstall']):
199    ('cc_defines','C/C++ defines to use', None),      os.makedirs(env['pyinstall'])
200    ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
201    ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  env.Append(CPPPATH = [env['incinstall']])
202    ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  env.Append(LIBPATH = [env['libinstall']])
203    ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
204    ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  ################# Fill in compiler options if not set above ##################
205    ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
206    ('ar_flags', 'Static library archiver flags to use', None),  if env['cc'] != 'default': env['CC']=env['cc']
207    ('sys_libs', 'System libraries to link with', None),  if env['cxx'] != 'default': env['CXX']=env['cxx']
208    ('tar_flags','flags for zip files','-c -z'),  
209  # MKL  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210    PathOption('mkl_path', 'Path to MKL includes', None),  # runtimes (icc does not)
211    PathOption('mkl_lib_path', 'Path to MKL libs', None),  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212    ('mkl_libs', 'MKL libraries to link with', None),      env['LINK'] = env['CXX']
213  # SCSL  
214    PathOption('scsl_path', 'Path to SCSL includes', None),  # default compiler/linker options
215    PathOption('scsl_lib_path', 'Path to SCSL libs', None),  cc_flags = ''
216    ('scsl_libs', 'SCSL libraries to link with', None),  cc_optim = ''
217    ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  cc_debug = ''
218  # UMFPACK  omp_flags = ''
219    PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  omp_ldflags = ''
220    PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  fatalwarning = '' # switch to turn warnings into errors
221    PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  sysheaderopt = '' # how to indicate that a header is a system header
222    ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
223  # AMD (used by UMFPACK)  # env['CC'] might be a full path
224    PathOption('amd_path', 'Path to AMD includes', amd_path_default),  cc_name=os.path.basename(env['CC'])
225    PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
226    ('amd_libs', 'AMD libraries to link with', amd_libs_default),  if cc_name == 'icc':
227  # ParMETIS      # Intel compiler
228    ('parmetis_path', 'Path to ParMETIS includes', ''),      # #1875: offsetof applied to non-POD types is nonstandard (in boost)
229    ('parmetis_lib_path', 'Path to ParMETIS library', ''),      cc_flags    = "-std=c99 -fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1"
230    ('parmetis_lib', 'ParMETIS library to link with', []),      cc_optim    = "-O3 -ftz -fno-alias -ipo -xHost"
231  # TRILINOS      cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
232    PathOption('trilinos_path', 'Path to TRILINOS includes', None),      omp_flags   = "-openmp"
233    PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),      omp_ldflags = "-openmp -openmp_report=1"
234    ('trilinos_libs', 'TRILINOS libraries to link with', None),      fatalwarning = "-Werror"
235  # BLAS  elif cc_name[:3] == 'gcc':
236    PathOption('blas_path', 'Path to BLAS includes', None),      # GNU C on any system
237    PathOption('blas_lib_path', 'Path to BLAS libs', None),      # note that -ffast-math is not used because it breaks isnan(),
238    ('blas_libs', 'BLAS libraries to link with', None),      # see mantis #691
239  # netCDF      cc_flags     = "-pedantic -Wall -fPIC -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
240    ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),      cc_optim     = "-O3"
241    PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),      cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
242    PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),      omp_flags    = "-fopenmp"
243    ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),      omp_ldflags  = "-fopenmp"
244  # Python      fatalwarning = "-Werror"
245  # locations of include files for python      sysheaderopt = "-isystem"
246  # FIXME: python_path should be python_inc_path and the same for boost etc.  elif cc_name == 'cl':
247    PathOption('python_path', 'Path to Python includes', python_path_default),      # Microsoft Visual C on Windows
248    PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),      cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
249    ('python_lib', 'Python libraries to link with', python_lib_default),      cc_optim     = "/O2 /Op /W3"
250    ('python_cmd', 'Python command', 'python'),      cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
251  # Boost      fatalwarning = "/WX"
252    PathOption('boost_path', 'Path to Boost includes', boost_path_default),  elif cc_name == 'icl':
253    PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),      # Intel C on Windows
254    ('boost_lib', 'Boost libraries to link with', boost_lib_default),      cc_flags     = '/EHsc /GR /MD'
255  # Doc building      cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
256  #  PathOption('doxygen_path', 'Path to Doxygen executable', None),      cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
257  #  PathOption('epydoc_path', 'Path to Epydoc executable', None),      omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
258  # PAPI      omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
259    PathOption('papi_path', 'Path to PAPI includes', None),  
260    PathOption('papi_lib_path', 'Path to PAPI libs', None),  # set defaults if not otherwise specified
261    ('papi_libs', 'PAPI libraries to link with', None),  if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
262    ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),  if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
263  # MPI  if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
264    BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
265    ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
266    PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
267    ('mpi_run', 'mpirun name' , mpi_run_default),  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
268    PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
269    ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  
270  )  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
271  #=================================================================================================  
272  #  if env['usepython3']:
273  #   Note: On the Altix the intel compilers are not automatically      env.Append(CPPDEFINES=['ESPYTHON3'])
274  #   detected by scons intelc.py script. The Altix has a different directory  
275  #   path and in some locations the "modules" facility is used to support  # set up the autolazy values
276  #   multiple compiler versions. This forces the need to import the users PATH  if env['forcelazy'] == 'on':
277  #   environment which isn't the "scons way"      env.Append(CPPDEFINES=['FAUTOLAZYON'])
278  #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)  elif env['forcelazy'] == 'off':
279  #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix      env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
280  #  
281    # set up the collective resolve values
282  if IS_WINDOWS_PLATFORM:  if env['forcecollres'] == 'on':
283        env = Environment(tools = ['default', 'msvc'], options = opts)      env.Append(CPPDEFINES=['FRESCOLLECTON'])
284        #env = Environment(tools = ['default', 'intelc'], options = opts)  elif env['forcecollres'] == 'off':
285  else:      env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
286     if os.uname()[4]=='ia64':  
287        env = Environment(tools = ['default', 'intelc'], options = opts)  # allow non-standard C if requested
288        if env['CXX'] == 'icpc':  if env['iknowwhatimdoing']:
289           env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py      env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
290     else:  
291        env = Environment(tools = ['default'], options = opts)  # Disable OpenMP if no flags provided
292  Help(opts.GenerateHelpText(env))  if env['openmp'] and env['omp_flags'] == '':
293       print("OpenMP requested but no flags provided - disabling OpenMP!")
294  if env['bounds_check']:     env['openmp'] = False
295     env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])  
296     env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])  if env['openmp']:
297     bounds_check = env['bounds_check']      env.Append(CCFLAGS = env['omp_flags'])
298        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
299    else:
300        env['omp_flags']=''
301        env['omp_ldflags']=''
302    
303    # add debug/non-debug compiler flags
304    if env['debug']:
305        env.Append(CCFLAGS = env['cc_debug'])
306    else:
307        env.Append(CCFLAGS = env['cc_optim'])
308    
309    # always add cc_flags
310    env.Append(CCFLAGS = env['cc_flags'])
311    
312    # add system libraries
313    env.AppendUnique(LIBS = env['sys_libs'])
314    
315    
316    global_revision=ARGUMENTS.get('SVN_VERSION', None)
317    if global_revision:
318        global_revision = re.sub(':.*', '', global_revision)
319        global_revision = re.sub('[^0-9]', '', global_revision)
320        if global_revision == '': global_revision='-2'
321    else:
322      # Get the global Subversion revision number for the getVersion() method
323      try:
324        global_revision = os.popen('svnversion -n .').read()
325        global_revision = re.sub(':.*', '', global_revision)
326        global_revision = re.sub('[^0-9]', '', global_revision)
327        if global_revision == '': global_revision='-2'
328      except:
329        global_revision = '-1'
330    env['svn_revision']=global_revision
331    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
332    
333    if IS_WINDOWS:
334        if not env['build_shared']:
335            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
336            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
337    
338    ###################### Copy required environment vars ########################
339    
340    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
341    if IS_WINDOWS:
342        LD_LIBRARY_PATH_KEY='PATH'
343        env['ENV']['LD_LIBRARY_PATH']=''
344  else:  else:
345     bounds_check = 0      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
346    
347  #=================================================================================================  # the following env variables are exported for the unit tests
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
348    
349  env.PrependENVPath('PYTHONPATH', source_root)  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
350        try:
351            env['ENV'][key] = os.environ[key]
352        except KeyError:
353            env['ENV'][key] = 1
354    
355  try:  env_export=env['env_export']
356     omp_num_threads = os.environ['OMP_NUM_THREADS']  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
357  except KeyError:  
358     omp_num_threads = 1  for key in set(env_export):
359  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads      try:
360            env['ENV'][key] = os.environ[key]
361        except KeyError:
362            pass
363    
364  try:  try:
365     path = os.environ['PATH']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env['ENV']['PATH'] = path  
366  except KeyError:  except KeyError:
367     omp_num_threads = 1      pass
   
 env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  
368    
369    # these shouldn't be needed
370    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
371    #    try:
372    #        env['ENV'][key] = os.environ[key]
373    #    except KeyError:
374    #        pass
375    
 # Copy some variables from the system environment to the build environment  
376  try:  try:
377     env['ENV']['DISPLAY'] = os.environ['DISPLAY']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
378  except KeyError:  except KeyError:
379     pass      pass
380    
381  try:  ######################## Add some custom builders ############################
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
382    
383  try:  if env['pythoncmd']=='python':
384     tmp = os.environ['LD_LIBRARY_PATH']      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
385     print tmp  else:
386     env['ENV']['LD_LIBRARY_PATH'] = tmp      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
387  env.Append(BUILDERS = {'PyCompile' : py_builder});  env.Append(BUILDERS = {'PyCompile' : py_builder});
388    
389  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
390  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
391    
392  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
393  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
394    
395  # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
396  try:  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
   
 try:  
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
   
 try:  
    tar_flags = env['tar_flags']  
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
   
 try:  
    exinstall = env['exinstall']  
 except KeyError:  
    exinstall = None  
 try:  
    sys_libinstall = env['sys_libinstall']  
 except KeyError:  
    sys_libinstall = None  
 try:  
    sys_pyinstall = env['sys_pyinstall']  
 except KeyError:  
    sys_pyinstall = None  
 try:  
    sys_exinstall = env['sys_exinstall']  
 except KeyError:  
    sys_exinstall = None  
   
 # ====================== debugging ===================================  
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
397    
398  # === switch on omp ===================================================  ############################ Dependency checks ###############################
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
399    
400  try:  # Create a Configure() environment to check for compilers and python
401    omp_flags_debug = env['omp_flags_debug']  conf = Configure(env.Clone())
 except KeyError:  
   omp_flags_debug = ''  
402    
403  # ========= use mpi? =====================================================  ######## Test that the compilers work
404  try:  
405     useMPI = env['useMPI']  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
406  except KeyError:      if not conf.CheckCC():
407     useMPI = None          print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
408  # ========= set compiler flags ===========================================          Exit(1)
409        if not conf.CheckCXX():
410            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
411            Exit(1)
412    else:
413        if not conf.CheckFunc('printf', language='c'):
414            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
415            Exit(1)
416        if not conf.CheckFunc('printf', language='c++'):
417            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
418            Exit(1)
419    
420    if conf.CheckFunc('gethostname'):
421        conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
422    
423    ######## Python headers & library (required)
424    
425    #First we check to see if the config file has specified
426    ##Where to find the filae. Ideally, this should be automatic
427    #But we need to deal with the case where python is not in its INSTALL
428    #Directory
429    # Use the python scons is running
430    if env['pythoncmd']=='python':
431        python_inc_path=sysconfig.get_python_inc()
432        if IS_WINDOWS:
433            python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
434        elif env['PLATFORM']=='darwin':
435            python_lib_path=sysconfig.get_config_var('LIBPL')
436        else:
437            python_lib_path=sysconfig.get_config_var('LIBDIR')
438    
439        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
440        if IS_WINDOWS:
441            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
442        else:
443            python_libs=['python'+sysconfig.get_python_version()]
444    
445    #if we want to use a python other than the one scons is running
446    else:
447        initstring='from __future__ import print_function;from distutils import sysconfig;'
448        if env['pythonlibname']!='':
449            python_libs=env['pythonlibname']
450        else:   # work it out by calling python    
451            if IS_WINDOWS:
452                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
453            else:
454                cmd='print("python"+sysconfig.get_python_version())'
455            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
456            python_libs=p.stdout.readline()
457            if env['usepython3']:       # This is to convert unicode str into py2 string
458                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
459            p.wait()
460            python_libs=python_libs.strip()
461    
 if dodebug:  
     try:  
       flags = env['cc_flags_debug'] + ' ' + omp_flags_debug  
       env.Append(CCFLAGS = flags)  
     except KeyError:  
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= Remember what options were used in the compile =====================================  
 if not IS_WINDOWS_PLATFORM:  
   env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*")  
   if dodebug:       env.Execute("touch " + libinstall + "/Compiled.with.debug")  
   if useMPI:        env.Execute("touch " + libinstall + "/Compiled.with.mpi")  
   if omp_flags != '':   env.Execute("touch " + libinstall + "/Compiled.with.OpenMP")  
   if bounds_check:  env.Execute("touch " + libinstall + "/Compiled.with.bounds_check")  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
462        
463     try:      # Now we know whether we are using python3 or not
464        scsl_libs = env['scsl_libs']      p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
465     except KeyError:      python_inc_path=p.stdout.readline()
466        scsl_libs = [ ]      if env['usepython3']:
467             python_inc_path=python_inc_path.encode()
468  else:      p.wait()  
469      scsl_libs =  []      python_inc_path=python_inc_path.strip()
470        if IS_WINDOWS:
471  # ============= set TRILINOS (but only with MPI) =====================================          cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
472  if useMPI:      elif env['PLATFORM']=='darwin':
473     try:          cmd="sysconfig.get_config_var(\"LIBPL\")"
474        includes = env['trilinos_path']      else:
475        env.Append(CPPPATH = [includes,])          cmd="sysconfig.get_config_var(\"LIBDIR\")"
476     except KeyError:  
477        pass      p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
478        python_lib_path=p.stdout.readline()
479     try:      if env['usepython3']:
480        lib_path = env['trilinos_lib_path']          python_lib_path=python_lib_path.decode()
481        env.Append(LIBPATH = [lib_path,])      p.wait()
482     except KeyError:      python_lib_path=python_lib_path.strip()
483        pass  
484    #Check for an override from the config file.
485     try:  #Ideally, this should be automatic
486        trilinos_libs = env['trilinos_libs']  #But we need to deal with the case where python is not in its INSTALL
487     except KeyError:  #Directory
488        trilinos_libs = []  if env['pythonlibpath']!='':
489  else:      python_lib_path=env['pythonlibpath']
490       trilinos_libs = []  
491    if env['pythonincpath']!='':
492        python_inc_path=env['pythonincpath']
493  # ============= set umfpack (but only without MPI) =====================================  
494  umf_libs=[ ]  
495  if not useMPI:  if sysheaderopt == '':
496     try:      conf.env.AppendUnique(CPPPATH = [python_inc_path])
497        includes = env['umf_path']  else:
498        env.Append(CPPPATH = [includes,])      conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
499     except KeyError:  
500        pass  conf.env.AppendUnique(LIBPATH = [python_lib_path])
501    conf.env.AppendUnique(LIBS = python_libs)
502     try:  # The wrapper script needs to find the libs
503        lib_path = env['umf_lib_path']  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
504        env.Append(LIBPATH = [lib_path,])  
505     except KeyError:  if not conf.CheckCHeader('Python.h'):
506        pass      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
507        Exit(1)
508     try:  if not conf.CheckFunc('Py_Exit'):
509        umf_libs = env['umf_libs']      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
510        umf_libs+=umf_libs      Exit(1)
511     except KeyError:  
512        pass  ## reuse conf to check for numpy header (optional)
513    if env['usepython3']:
514     try:      # FIXME: This is until we can work out how to make the checks in python 3
515        includes = env['ufc_path']      conf.env['numpy_h']=False
516        env.Append(CPPPATH = [includes,])  else:
517     except KeyError:      if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
518        pass          conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
519            conf.env['numpy_h']=True
520     try:      else:
521        includes = env['amd_path']          conf.env['numpy_h']=False
522        env.Append(CPPPATH = [includes,])  
523     except KeyError:  # Commit changes to environment
524        pass  env = conf.Finish()
525    
526     try:  ######## boost (required)
527        lib_path = env['amd_lib_path']  
528        env.Append(LIBPATH = [lib_path,])  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
529     except KeyError:  if sysheaderopt == '':
530        pass      env.AppendUnique(CPPPATH = [boost_inc_path])
531    else:
532     try:      # This is required because we can't -isystem /usr/include since it breaks
533        amd_libs = env['amd_libs']      # std includes
534        umf_libs+=amd_libs      if os.path.normpath(boost_inc_path) == '/usr/include':
535     except KeyError:          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
536        pass      else:
537            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
538  # ============= set TRILINOS (but only with MPI) =====================================  
539  if useMPI:  env.AppendUnique(LIBPATH = [boost_lib_path])
540     try:  env.AppendUnique(LIBS = env['boost_libs'])
541        includes = env['trilinos_path']  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
542        env.Append(CPPPATH = [includes,])  
543     except KeyError:  ######## numpy (required)
544        pass  
545    if not detectModule(env, 'numpy'):
546     try:      print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
547        lib_path = env['trilinos_lib_path']      Exit(1)
548        env.Append(LIBPATH = [lib_path,])  
549     except KeyError:  ######## CppUnit (required for tests)
550        pass  
551    try:
552     try:      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
553        trilinos_libs = env['trilinos_libs']      env.AppendUnique(CPPPATH = [cppunit_inc_path])
554     except KeyError:      env.AppendUnique(LIBPATH = [cppunit_lib_path])
555        trilinos_libs = []      env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
556  else:      env['cppunit']=True
557       trilinos_libs = []  except:
558        env['cppunit']=False
 # ============= set blas =====================================  
 try:  
    includes = env['blas_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
559    
560  try:  ######## sympy (optional)
    lib_path = env['blas_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
561    
562  try:  if detectModule(env, 'sympy'):
563     blas_libs = env['blas_libs']      env['sympy'] = True
 except KeyError:  
    blas_libs = [ ]  
   
 # ========== netcdf ====================================  
 try:  
    useNetCDF = env['useNetCDF']  
 except KeyError:  
    useNetCDF = 'yes'  
    pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
564  else:  else:
565     print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
566     netCDF_libs=[ ]      env['sympy'] = False
567    
568  # ====================== boost ======================================  ######## netCDF (optional)
569  try:  
570     includes = env['boost_path']  netcdf_inc_path=''
571     env.Append(CPPPATH = [includes,])  netcdf_lib_path=''
572  except KeyError:  if env['netcdf']:
573     pass      netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
574  try:      env.AppendUnique(CPPPATH = [netcdf_inc_path])
575     lib_path = env['boost_lib_path']      env.AppendUnique(LIBPATH = [netcdf_lib_path])
576     env.Append(LIBPATH = [lib_path,])      env.AppendUnique(LIBS = env['netcdf_libs'])
577     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
578     if IS_WINDOWS_PLATFORM :      env.Append(CPPDEFINES = ['USE_NETCDF'])
579        env.PrependENVPath('PATH', lib_path)  
580  except KeyError:  ######## PAPI (optional)
581     pass  
582  try:  papi_inc_path=''
583     boost_lib = env['boost_lib']  papi_lib_path=''
584  except KeyError:  if env['papi']:
585     boost_lib = None      papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
586  # ====================== python ======================================      env.AppendUnique(CPPPATH = [papi_inc_path])
587  try:      env.AppendUnique(LIBPATH = [papi_lib_path])
588     includes = env['python_path']      env.AppendUnique(LIBS = env['papi_libs'])
589     env.Append(CPPPATH = [includes,])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
590  except KeyError:      env.Append(CPPDEFINES = ['BLOCKPAPI'])
591     pass  
592  try:  ######## MKL (optional)
593     lib_path = env['python_lib_path']  
594     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  mkl_inc_path=''
595     env.Append(LIBPATH = [lib_path,])  mkl_lib_path=''
596  except KeyError:  if env['mkl']:
597     pass      mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
598  try:      env.AppendUnique(CPPPATH = [mkl_inc_path])
599     python_lib = env['python_lib']      env.AppendUnique(LIBPATH = [mkl_lib_path])
600  except KeyError:      env.AppendUnique(LIBS = env['mkl_libs'])
601     python_lib = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
602  # =============== documentation =======================================      env.Append(CPPDEFINES = ['MKL'])
603  try:  
604     doxygen_path = env['doxygen_path']  ######## UMFPACK (optional)
605  except KeyError:  
606     doxygen_path = None  umfpack_inc_path=''
607  try:  umfpack_lib_path=''
608     epydoc_path = env['epydoc_path']  if env['umfpack']:
609  except KeyError:      umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
610     epydoc_path = None      env.AppendUnique(CPPPATH = [umfpack_inc_path])
611  # =============== ParMETIS =======================================      env.AppendUnique(LIBPATH = [umfpack_lib_path])
612  try:      env.AppendUnique(LIBS = env['umfpack_libs'])
613     parmetis_path = env['parmetis_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
614     parmetis_lib_path = env['parmetis_lib_path']      env.Append(CPPDEFINES = ['UMFPACK'])
615     parmetis_lib = env['parmetis_lib']  
616  except KeyError:  ######## LAPACK (optional)
617     parmetis_path = ''  
618     parmetis_lib_path = ''  if env['lapack']=='mkl' and not env['mkl']:
619     parmetis_lib = ''      print("mkl_lapack requires MKL!")
620        Exit(1)
621  if useMPI and os.path.isdir(parmetis_lib_path):  
622     env.Append(CPPDEFINES = [ 'PARMETIS' ])  env['uselapack'] = env['lapack']!='none'
623     env.Append(CXXDEFINES = [ 'PARMETIS' ])  lapack_inc_path=''
624     env.Append(CPPPATH = [parmetis_path])  lapack_lib_path=''
625     env.Append(LIBPATH = [parmetis_lib_path])  if env['uselapack']:
626     env.Append(LIBS = parmetis_lib)      header='clapack.h'
627  # =============== PAPI =======================================      if env['lapack']=='mkl':
628  try:          env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
629     includes = env['papi_path']          header='mkl_lapack.h'
630     env.Append(CPPPATH = [includes,])      lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
631  except KeyError:      env.AppendUnique(CPPPATH = [lapack_inc_path])
632     pass      env.AppendUnique(LIBPATH = [lapack_lib_path])
633  try:      env.AppendUnique(LIBS = env['lapack_libs'])
634     lib_path = env['papi_lib_path']      env.Append(CPPDEFINES = ['USE_LAPACK'])
635     env.Append(LIBPATH = [lib_path,])  
636  except KeyError:  ######## Silo (optional)
637     pass  
638  try:  silo_inc_path=''
639     papi_libs = env['papi_libs']  silo_lib_path=''
640  except KeyError:  if env['silo']:
641     papi_libs = None      silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
642  # ============= set mpi =====================================      env.AppendUnique(CPPPATH = [silo_inc_path])
643  if useMPI:      env.AppendUnique(LIBPATH = [silo_lib_path])
644     env.Append(CPPDEFINES=['PASO_MPI',])      # Note that we do not add the libs since they are only needed for the
645     try:      # weipa library and tools.
646        includes = env['mpi_path']      #env.AppendUnique(LIBS = [env['silo_libs']])
647        env.Append(CPPPATH = [includes,])  
648     except KeyError:  ######## VSL random numbers (optional)
649        pass  if env['vsl_random']:
650     try:      env.Append(CPPDEFINES = ['MKLRANDOM'])
651        lib_path = env['mpi_lib_path']  
652        env.Append(LIBPATH = [lib_path,])  ######## VisIt (optional)
653        env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
654     except KeyError:  visit_inc_path=''
655        pass  visit_lib_path=''
656     try:  if env['visit']:
657        mpi_libs = env['mpi_libs']      visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
658     except KeyError:      env.AppendUnique(CPPPATH = [visit_inc_path])
659        mpi_libs = []      env.AppendUnique(LIBPATH = [visit_lib_path])
660    
661     try:  ######## MPI (optional)
662        mpi_run = env['mpi_run']  
663     except KeyError:  if env['mpi']=='no':
664        mpi_run = ''      env['mpi']='none'
665    
666     try:  env['usempi'] = env['mpi']!='none'
667         mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']  mpi_inc_path=''
668         env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  mpi_lib_path=''
669     except KeyError:  if env['usempi']:
670        pass      mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
671  else:      env.AppendUnique(CPPPATH = [mpi_inc_path])
672    mpi_libs=[]      env.AppendUnique(LIBPATH = [mpi_lib_path])
673    mpi_run = mpi_run_default      env.AppendUnique(LIBS = env['mpi_libs'])
674  # =========== zip files ===========================================      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
675  try:      env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
676     includes = env['papi_path']      # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
677     env.Append(CPPPATH = [includes,])      # On the other hand MPT and OpenMPI don't define the latter so we have to
678  except KeyError:      # do that here
679     pass      if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
680  try:          env.Append(CPPDEFINES = ['MPI_INCLUDED'])
681     lib_path = env['papi_lib_path']  
682     env.Append(LIBPATH = [lib_path,])  ######## BOOMERAMG (optional)
683  except KeyError:  
684     pass  if env['mpi'] == 'none': env['boomeramg'] = False
685  try:  
686     papi_libs = env['papi_libs']  boomeramg_inc_path=''
687  except KeyError:  boomeramg_lib_path=''
688     papi_libs = None  if env['boomeramg']:
689  try:      boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
690     papi_instrument_solver = env['papi_instrument_solver']      env.AppendUnique(CPPPATH = [boomeramg_inc_path])
691  except KeyError:      env.AppendUnique(LIBPATH = [boomeramg_lib_path])
692     papi_instrument_solver = None      env.AppendUnique(LIBS = env['boomeramg_libs'])
693        env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
694        env.Append(CPPDEFINES = ['BOOMERAMG'])
695  # ============= and some helpers =====================================  
696  try:  ######## ParMETIS (optional)
697     doxygen_path = env['doxygen_path']  
698  except KeyError:  if not env['usempi']: env['parmetis'] = False
699     doxygen_path = None  
700  try:  parmetis_inc_path=''
701     epydoc_path = env['epydoc_path']  parmetis_lib_path=''
702  except KeyError:  if env['parmetis']:
703     epydoc_path = None      parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
704  try:      env.AppendUnique(CPPPATH = [parmetis_inc_path])
705     src_zipfile = env.File(env['src_zipfile'])      env.AppendUnique(LIBPATH = [parmetis_lib_path])
706  except KeyError:      env.AppendUnique(LIBS = env['parmetis_libs'])
707     src_zipfile = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
708  try:      env.Append(CPPDEFINES = ['USE_PARMETIS'])
709     test_zipfile = env.File(env['test_zipfile'])  
710  except KeyError:  ######## gmsh (optional, for tests)
711     test_zipfile = None  
712  try:  try:
713     examples_zipfile = env.File(env['examples_zipfile'])      p=Popen(['gmsh', '-info'], stderr=PIPE)
714  except KeyError:      _,e=p.communicate()
715     examples_zipfile = None      if e.split().count("MPI"):
716            env['gmsh']='m'
717  try:      else:
718     src_tarfile = env.File(env['src_tarfile'])          env['gmsh']='s'
719  except KeyError:  except OSError:
720     src_tarfile = None      env['gmsh']=False
721  try:  
722     test_tarfile = env.File(env['test_tarfile'])  ######## PDFLaTeX (for documentation)
723  except KeyError:  if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
724     test_tarfile = None      env['pdflatex']=True
725  try:  else:
726     examples_tarfile = env.File(env['examples_tarfile'])      env['pdflatex']=False
727  except KeyError:  
728     examples_tarfile = None  ######################## Summarize our environment ###########################
729    
730  try:  # keep some of our install paths first in the list for the unit tests
731     guide_pdf = env.File(env['guide_pdf'])  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
732  except KeyError:  env.PrependENVPath('PYTHONPATH', prefix)
733     guide_pdf = None  env['ENV']['ESCRIPT_ROOT'] = prefix
734    
735  try:  if not env['verbose']:
736     guide_html_index = env.File('index.htm',env['guide_html'])      env['CCCOMSTR'] = "Compiling $TARGET"
737  except KeyError:      env['CXXCOMSTR'] = "Compiling $TARGET"
738     guide_html_index = None      env['SHCCCOMSTR'] = "Compiling $TARGET"
739        env['SHCXXCOMSTR'] = "Compiling $TARGET"
740  try:      env['ARCOMSTR'] = "Linking $TARGET"
741     api_epydoc = env.Dir(env['api_epydoc'])      env['LINKCOMSTR'] = "Linking $TARGET"
742  except KeyError:      env['SHLINKCOMSTR'] = "Linking $TARGET"
743     api_epydoc = None      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
744        env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
745  try:      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
746     api_doxygen = env.Dir(env['api_doxygen'])      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
747  except KeyError:      #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
748     api_doxygen = None  
749    print("")
750  try:  print("*** Config Summary (see config.log and lib/buildvars for details) ***")
751     svn_pipe = os.popen("svnversion -n .")  print("Escript/Finley revision %s"%global_revision)
752     global_revision = svn_pipe.readlines()  print("  Install prefix:  %s"%env['prefix'])
753     svn_pipe.close()  print("          Python:  %s"%sysconfig.PREFIX)
754     global_revision = re.sub(":.*", "", global_revision[0])  print("           boost:  %s"%env['boost_prefix'])
755     global_revision = re.sub("[^0-9]", "", global_revision)  print("           numpy:  YES")
756  except:  if env['usempi']:
757     global_revision="-1"      print("             MPI:  YES (flavour: %s)"%env['mpi'])
758     print "Warning: unable to recover global revsion number."  else:
759  if global_revision == "": global_revision="0"      print("             MPI:  DISABLED")
760  print "Revision number is %s."%global_revision  if env['uselapack']:
761  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)      print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
762    else:
763  # Python install - esys __init__.py      print("          LAPACK:  DISABLED")
764  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))  d_list=[]
765    e_list=[]
766  # FIXME: exinstall and friends related to examples are not working.  for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','silo','sympy','umfpack','visit','vsl_random':
767  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])      if env[i]: e_list.append(i)
768        else: d_list.append(i)
769  env.Default(build_target)  for i in e_list:
770        print("%16s:  YES"%i)
771  # Zipgets  for i in d_list:
772  env.Alias('release_src',[ src_zipfile, src_tarfile ])      print("%16s:  DISABLED"%i)
773  env.Alias('release_tests',[ test_zipfile, test_tarfile])  if env['cppunit']:
774  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])      print("         CppUnit:  FOUND")
775  env.Alias('examples_zipfile',examples_zipfile)  else:
776  env.Alias('examples_tarfile',examples_tarfile)      print("         CppUnit:  NOT FOUND")
777  env.Alias('api_epydoc',api_epydoc)  if env['gmsh']=='m':
778  env.Alias('api_doxygen',api_doxygen)      print("            gmsh:  FOUND, MPI-ENABLED")
779  env.Alias('guide_html_index',guide_html_index)  elif env['gmsh']=='s':
780  env.Alias('guide_pdf', guide_pdf)      print("            gmsh:  FOUND")
781  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])  else:
782  env.Alias('release', ['release_src', 'release_tests', 'docs'])      print("            gmsh:  NOT FOUND")
783    if env['numpy_h']:
784  env.Alias('build_tests',build_target)    # target to build all C++ tests      print("   numpy headers:  FOUND")
785  env.Alias('build_py_tests',build_target) # target to build all python tests  else:
786  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests      print("   numpy headers:  NOT FOUND")
787  env.Alias('run_tests', 'build_tests')   # target to run all C++ test  print("   vsl_random:  %s"%env['vsl_random'])
788  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests      
789  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests  if ((fatalwarning != '') and (env['werror'])):
790        print("  Treating warnings as errors")
791    else:
792  # Allow sconscripts to see the env      print("  NOT treating warnings as errors")
793  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",  print("")
794      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",  
795          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",  ####################### Configure the subdirectories #########################
796          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])  
797    from grouptest import *
798  # End initialisation section  
799  # Begin configuration section  TestGroups=[]
800  # adds this file and the scons option directore to the source tar  
801  release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]  # keep an environment without warnings-as-errors
802  release_testfiles=[env.File('README_TESTS'),]  dodgy_env=env.Clone()
803  env.Zip(src_zipfile, release_srcfiles)  
804  env.Zip(test_zipfile, release_testfiles)  # now add warnings-as-errors flags. This needs to be done after configuration
805  try:  # because the scons test files have warnings in them
806     env.Tar(src_tarfile, release_srcfiles)  if ((fatalwarning != '') and (env['werror'])):
807     env.Tar(test_tarfile, release_testfiles)      env.Append(CCFLAGS = fatalwarning)
808  except AttributeError:  
809     pass  Export(
810  # Insert new components to be build here    ['env',
811  # FIXME: might be nice to replace this verbosity with a list of targets and some     'dodgy_env',
812  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines     'IS_WINDOWS',
813  # Third Party libraries     'TestGroups'
814  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)    ]
815  # C/C++ Libraries  )
 env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  
 # bruce is removed for now as it doesn't really do anything  
 # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)  
 env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  
 env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  
 env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  
 env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)  
 env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  
 env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  
 env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)  
 env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
 #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)  
816    
817    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
818    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
819    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
820    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
821    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
822    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
823    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
824    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
825    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
826    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
827    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
828    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
829    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
830    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
831    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
832    
833    
834    ######################## Populate the buildvars file #########################
835    
836    # remove obsolete file
837    if not env['usempi']:
838        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
839        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
840    
841    # Try to extract the boost version from version.hpp
842    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
843    boostversion='unknown'
844    try:
845        for line in boosthpp:
846            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
847            if ver:
848                boostversion=ver.group(1)
849    except StopIteration:
850        pass
851    boosthpp.close()
852    
853    
854    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
855    buildvars.write("svn_revision="+str(global_revision)+"\n")
856    buildvars.write("prefix="+prefix+"\n")
857    buildvars.write("cc="+env['CC']+"\n")
858    buildvars.write("cxx="+env['CXX']+"\n")
859    if env['pythoncmd']=='python':
860        buildvars.write("python="+sys.executable+"\n")
861        buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
862    else:
863        buildvars.write("python="+env['pythoncmd']+"\n")
864        p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
865        verstring=p.stdout.readline().strip()
866        p.wait()
867        buildvars.write("python_version="+verstring+"\n")
868    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
869    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
870    buildvars.write("boost_version="+boostversion+"\n")
871    buildvars.write("debug=%d\n"%int(env['debug']))
872    buildvars.write("openmp=%d\n"%int(env['openmp']))
873    buildvars.write("mpi=%s\n"%env['mpi'])
874    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
875    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
876    buildvars.write("lapack=%s\n"%env['lapack'])
877    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
878    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
879        buildvars.write("%s=%d\n"%(i, int(env[i])))
880        if env[i]:
881            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
882            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
883    buildvars.close()
884    
885    ################### Targets to build and install libraries ###################
886    
887    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
888    env.Alias('target_init', [target_init])
889    # delete buildvars upon cleanup
890    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
891    
892    # The headers have to be installed prior to build in order to satisfy
893    # #include <paso/Common.h>
894    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
895    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
896    
897    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
898    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
899    
900    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
901    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
902    
903    env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
904    env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
905    
906    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
907    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
908    
909    env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
910    env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
911    
912    env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
913    env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
914    
915    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
916    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
917    
918    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
919    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
920    
921    # Now gather all the above into some easy targets: build_all and install_all
922    build_all_list = []
923    build_all_list += ['build_esysUtils']
924    build_all_list += ['build_paso']
925    build_all_list += ['build_escript']
926    build_all_list += ['build_pasowrap']
927    build_all_list += ['build_dudley']
928    build_all_list += ['build_finley']
929    build_all_list += ['build_ripley']
930    build_all_list += ['build_weipa']
931    if not IS_WINDOWS: build_all_list += ['build_escriptreader']
932    if env['usempi']:   build_all_list += ['build_pythonMPI']
933    build_all_list += ['build_escriptconvert']
934    env.Alias('build_all', build_all_list)
935    
936    install_all_list = []
937    install_all_list += ['target_init']
938    install_all_list += ['install_esysUtils']
939    install_all_list += ['install_paso']
940    install_all_list += ['install_escript']
941    install_all_list += ['install_pasowrap']
942    install_all_list += ['install_dudley']
943    install_all_list += ['install_finley']
944    install_all_list += ['install_ripley']
945    install_all_list += ['install_weipa']
946    if not IS_WINDOWS: install_all_list += ['install_escriptreader']
947    install_all_list += ['install_downunder_py']
948    install_all_list += ['install_modellib_py']
949    install_all_list += ['install_pycad_py']
950    if env['usempi']:   install_all_list += ['install_pythonMPI']
951    install_all_list += ['install_escriptconvert']
952    env.Alias('install_all', install_all_list)
953    
954    # Default target is install
955    env.Default('install_all')
956    
957    ################## Targets to build and run the test suite ###################
958    
959    if not env['cppunit']:
960        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
961        env.Alias('run_tests', test_msg)
962    env.Alias('run_tests', ['install_all'])
963    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
964    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
965    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
966    
967    ##################### Targets to build the documentation #####################
968    
969    env.Alias('basedocs', ['examples_tarfile', 'examples_zipfile', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
970    env.Alias('docs', ['basedocs', 'sphinxdoc'])
971    env.Alias('release_prep', ['docs', 'install_all'])
972    env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
973    
974    
975    # The test scripts are always generated, this target allows us to
976    # generate the testscripts without doing a full build
977    env.Alias('testscripts',[])
978    
979  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  if not IS_WINDOWS:
980  syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)      try:
981            utest=open('utest.sh','w')
982            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
983            for tests in TestGroups:
984                utest.write(tests.makeString())
985            utest.close()
986            Execute(Chmod('utest.sh', 0o755))
987            print("Generated utest.sh.")
988            # This version contains only python tests - I want this to be usable
989            # From a binary only install if you have the test files
990            utest=open('itest.sh','w')
991            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
992            for tests in TestGroups:
993              if tests.exec_cmd=='$PYTHONRUNNER ':
994                utest.write(tests.makeString())
995            utest.close()
996            Execute(Chmod('itest.sh', 0o755))
997            print("Generated itest.sh.")        
998        except IOError:
999            print("Error attempting to write unittests file.")
1000            Exit(1)
1001    
1002        # delete utest.sh upon cleanup
1003        env.Clean('target_init', 'utest.sh')
1004        env.Clean('target_init', 'itest.sh')
1005    
1006        # Make sure that the escript wrapper is in place
1007        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1008            print("Copying escript wrapper.")
1009            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1010    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1459  
changed lines
  Added in v.4173

  ViewVC Help
Powered by ViewVC 1.1.26