/[escript]/trunk/SConstruct
ViewVC logotype

Diff of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1390 by gross, Mon Jan 14 03:46:11 2008 UTC revision 4078 by jfenwick, Fri Nov 16 07:50:49 2012 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2012 by University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 # We may also need to know where python's site-packages subdirectory lives  
 python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', sys.prefix )  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development since 2012 by School of Earth Sciences
12     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
13     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
14  else:  
15     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
16     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
17    
18  sys_dir_examples = prefix+"/share/doc/esys"  import sys, os, platform, re
19    from distutils import sysconfig
20  source_root = Dir('#.').abspath  from site_init import *
21    from subprocess import PIPE, Popen
22  dir_packages = os.path.join(source_root,"esys")  
23  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
24  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26  print "Source root is : ",source_root  
27  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
28  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
29  print " Default example local  installation:    ", dir_examples  
30  print "Install prefix is: ", prefix  ########################## Determine options file ############################
31  print " Default packages system installation:   ", sys_dir_packages  # 1. command line
32  print " Default library system installation     ", sys_dir_libraries  # 2. scons/<hostname>_options.py
33  print " Default example system installation:    ", sys_dir_examples  # 3. name as part of a cluster
34    options_file=ARGUMENTS.get('options_file', None)
35  #==============================================================================================      if not options_file:
36        ext_dir = os.path.join(os.getcwd(), 'scons')
37  # Default options and options help text      hostname = platform.node().split('.')[0]
38  # These are defaults and can be overridden using command line arguments or an options file.      for name in hostname, effectiveName(hostname):
39  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40  # DO NOT CHANGE THEM HERE          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41  # Where to install?          if os.path.isfile(options_file): break
42  #==============================================================================================      
43  #      if not os.path.isfile(options_file):
44  #    get the options file if present:      print("\nWARNING:\nOptions file %s" % options_file)
45  #      print("not found! Default options will be used which is most likely suboptimal.")
46  options_file = ARGUMENTS.get('options_file','')      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47        print("subdirectory and customize it to your needs.\n")
48  if not os.path.isfile(options_file) :      options_file = None
49      options_file = False  
50    ############################### Build options ################################
51  if not options_file :  
52     import socket  default_prefix='/usr'
53     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     tmp = os.path.join("scons",hostname+"_options.py")  lapack_flavours=('none', 'clapack', 'mkl')
55    
56     if os.path.isfile(tmp) :  vars = Variables(options_file, ARGUMENTS)
57        options_file = tmp  vars.AddVariables(
58      PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59  IS_WINDOWS_PLATFORM = (os.name== "nt")    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61  # If you're not going to tell me then......    BoolVariable('verbose', 'Output full compile/link lines', False),
62  # FIXME: add one for the altix too.  # Compiler/Linker options
63  if not options_file :    ('cc', 'Path to C compiler', 'default'),
64     if IS_WINDOWS_PLATFORM :    ('cxx', 'Path to C++ compiler', 'default'),
65        options_file = "scons/windows_mscv71_options.py"    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66     else:    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67        options_file = "scons/linux_gcc_eg_options.py"    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
68      ('cc_extra', 'Extra C compiler flags', ''),
69  # and load it    ('cxx_extra', 'Extra C++ compiler flags', ''),
70  opts = Options(options_file, ARGUMENTS)    ('ld_extra', 'Extra linker flags', ''),
71  #================================================================    BoolVariable('werror','Treat compiler warnings as errors', True),
72  #    BoolVariable('debug', 'Compile with debug flags', False),
73  #   check if UMFPACK is installed on the system:    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74  #    ('omp_flags', 'OpenMP compiler flags', 'default'),
75  uf_root=None    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:  # Mandatory libraries
77     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78         uf_root=i    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79         print i," is used form ",tools_prefix  # Mandatory for tests
80         break    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81  if not uf_root==None:    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82     umf_path_default=os.path.join(tools_prefix,'include',uf_root)  # Optional libraries and options
83     umf_lib_path_default=os.path.join(tools_prefix,'lib')    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84     umf_libs_default=['umfpack']    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86     amd_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('netcdf', 'Enable netCDF file support', False),
87     amd_libs_default=['amd']    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89  else:    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90     umf_path_default=None    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91     umf_lib_path_default=None    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92     umf_libs_default=None    BoolVariable('papi', 'Enable PAPI', False),
93     amd_path_default=None    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
94     amd_lib_path_default=None    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95     amd_libs_default=None    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96     ufc_path_default=None    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97  #    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
100  #    python installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  #    ('lapack_libs', 'LAPACK libraries to link with', []),
108  #    boost installation:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
110  boost_path_default=os.path.join(tools_prefix,'include')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  boost_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112  boost_lib_default=['boost_python']    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113      ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114      BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115    # Advanced settings
116      #dudley_assemble_flags = -funroll-loops      to actually do something
117      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118      # To enable passing function pointers through python
119      BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120      # An option for specifying the compiler tools (see windows branch)
121      ('tools_names', 'Compiler tools to use', ['default']),
122      ('env_export', 'Environment variables to be passed to tools',[]),
123      EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125      # finer control over library building, intel aggressive global optimisation
126      # works with dynamic libraries on windows.
127      ('build_shared', 'Build dynamic libraries only', False),
128      ('sys_libs', 'Extra libraries to link with', []),
129      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130      ('SVN_VERSION', 'Do not use from options file', -2),
131      ('pythoncmd', 'which python to compile with','python'),
132      ('usepython3', 'Is this a python3 build? (experimental)', False),
133      ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134      ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135      ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136      BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
137    )
138    
139  #==========================================================================  ##################### Create environment and help text #######################
 #  
 #    check if netCDF is installed on the system:  
 #  
 netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')  
 netCDF_lib_path_default=os.path.join(tools_prefix,'lib')  
140    
141  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):  # Intel's compiler uses regular expressions improperly and emits a warning
142       useNetCDF_default='yes'  # about failing to find the compilers. This warning can be safely ignored.
      netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]  
 else:  
      useNetCDF_default='no'  
      netCDF_path_default=None  
      netCDF_lib_path_default=None  
      netCDF_libs_default=None  
143    
144  #==========================================================================  # PATH is needed so the compiler, linker and tools are found if they are not
145  #  # in default locations.
146  #  MPI:  env = Environment(tools = ['default'], options = vars,
147  #                    ENV = {'PATH': os.environ['PATH']})
148  if IS_WINDOWS_PLATFORM:                    
149     useMPI_default='no'  
150     mpi_path_default=None  #set the vars for clang
151     mpi_lib_path_default=None  def mkclang(env):
152     mpi_libs_default=[]    env['CC']='clang'
153     mpi_run_default=None    env['CXX']='clang++'
154  else:                    
155     useMPI_default='no'                    
156     mpi_root='/usr/local'  if env['tools_names'] != 'default':
157     mpi_path_default=os.path.join(mpi_root,'include')      zz=env['tools_names']
158     mpi_lib_path_default=os.path.join(mpi_root,'lib')      if 'clang' in zz:
159     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]          zz.remove('clang')
160     mpi_run_default='mpiexec -np 1'          zz.insert(0, mkclang)
161  #      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162  #==========================================================================                        ENV = {'PATH' : os.environ['PATH']})
163  #  
164  #    compile:  if options_file:
165  #      opts_valid=False
166  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'      if 'escript_opts_version' in env.Dictionary() and \
167  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'          int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168  cxx_flags_default='--no-warn -ansi'              opts_valid=True
169  cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'      if opts_valid:
170            print("Using options in %s." % options_file)
171  #==============================================================================================          else:
172  # Default options and options help text          print("\nOptions file %s" % options_file)
173  # These are defaults and can be overridden using command line arguments or an options file.          print("is outdated! Please update the file by examining one of the TEMPLATE")
174  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175  # DO NOT CHANGE THEM HERE          Exit(1)
176  opts.AddOptions(  
177  # Where to install esys stuff  # Generate help text (scons -h)
178    ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  Help(vars.GenerateHelpText(env))
179    ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
180    ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  # Check for superfluous options
181    ('exinstall', 'where the esys examples will be installed',             dir_examples),  if len(vars.UnknownVariables())>0:
182    ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),      for k in vars.UnknownVariables():
183    ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),          print("Unknown option '%s'" % k)
184    ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),      Exit(1)
185    ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
186    ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  #################### Make sure install directories exist #####################
187    ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
188    ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  prefix=Dir(env['prefix']).abspath
190    ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  env['incinstall'] = os.path.join(prefix, 'include')
191    ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  env['bininstall'] = os.path.join(prefix, 'bin')
192    ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  env['libinstall'] = os.path.join(prefix, 'lib')
193    ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  env['pyinstall']  = os.path.join(prefix, 'esys')
194    ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  if not os.path.isdir(env['bininstall']):
195  # Compilation options      os.makedirs(env['bininstall'])
196    BoolOption('dodebug', 'Do you want a debug build?', 'no'),  if not os.path.isdir(env['libinstall']):
197    BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),      os.makedirs(env['libinstall'])
198    ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  if not os.path.isdir(env['pyinstall']):
199    ('cc_defines','C/C++ defines to use', None),      os.makedirs(env['pyinstall'])
200    ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
201    ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  env.Append(CPPPATH = [env['incinstall']])
202    ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  env.Append(LIBPATH = [env['libinstall']])
203    ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
204    ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  ################# Fill in compiler options if not set above ##################
205    ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
206    ('ar_flags', 'Static library archiver flags to use', None),  if env['cc'] != 'default': env['CC']=env['cc']
207    ('sys_libs', 'System libraries to link with', None),  if env['cxx'] != 'default': env['CXX']=env['cxx']
208    ('tar_flags','flags for zip files','-c -z'),  
209  # MKL  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210    PathOption('mkl_path', 'Path to MKL includes', None),  # runtimes (icc does not)
211    PathOption('mkl_lib_path', 'Path to MKL libs', None),  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212    ('mkl_libs', 'MKL libraries to link with', None),      env['LINK'] = env['CXX']
213  # SCSL  
214    PathOption('scsl_path', 'Path to SCSL includes', None),  # default compiler/linker options
215    PathOption('scsl_lib_path', 'Path to SCSL libs', None),  cc_flags = ''
216    ('scsl_libs', 'SCSL libraries to link with', None),  cc_optim = ''
217    ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  cc_debug = ''
218  # UMFPACK  omp_flags = ''
219    PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  omp_ldflags = ''
220    PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  fatalwarning = '' # switch to turn warnings into errors
221    PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  sysheaderopt = '' # how to indicate that a header is a system header
222    ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
223  # AMD (used by UMFPACK)  # env['CC'] might be a full path
224    PathOption('amd_path', 'Path to AMD includes', amd_path_default),  cc_name=os.path.basename(env['CC'])
225    PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
226    ('amd_libs', 'AMD libraries to link with', amd_libs_default),  if cc_name == 'icc':
227  # TRILINOS      # Intel compiler
228    PathOption('trilinos_path', 'Path to TRILINOS includes', None),      cc_flags    = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"
229    PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),      cc_optim    = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip"
230    ('trilinos_libs', 'TRILINOS libraries to link with', None),      cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
231  # BLAS      omp_flags   = "-openmp -openmp_report0"
232    PathOption('blas_path', 'Path to BLAS includes', None),      omp_ldflags = "-openmp -openmp_report0 -lpthread"
233    PathOption('blas_lib_path', 'Path to BLAS libs', None),      fatalwarning = "-Werror"
234    ('blas_libs', 'BLAS libraries to link with', None),  elif cc_name[:3] == 'gcc':
235  # netCDF      # GNU C on any system
236    ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),      cc_flags     = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
237    PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),      cc_optim     = "-O3"
238    PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),      cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
239    ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),      omp_flags    = "-fopenmp"
240  # Python      omp_ldflags  = "-fopenmp"
241  # locations of include files for python      fatalwarning = "-Werror"
242  # FIXME: python_path should be python_inc_path and the same for boost etc.      sysheaderopt = "-isystem"
243    PathOption('python_path', 'Path to Python includes', python_path_default),  elif cc_name == 'cl':
244    PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),      # Microsoft Visual C on Windows
245    ('python_lib', 'Python libraries to link with', python_lib_default),      cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
246    ('python_cmd', 'Python command', 'python'),      cc_optim     = "/O2 /Op /W3"
247  # Boost      cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
248    PathOption('boost_path', 'Path to Boost includes', boost_path_default),      fatalwarning = "/WX"
249    PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),  elif cc_name == 'icl':
250    ('boost_lib', 'Boost libraries to link with', boost_lib_default),      # Intel C on Windows
251  # Doc building      cc_flags     = '/EHsc /GR /MD'
252  #  PathOption('doxygen_path', 'Path to Doxygen executable', None),      cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
253  #  PathOption('epydoc_path', 'Path to Epydoc executable', None),      cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
254  # PAPI      omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
255    PathOption('papi_path', 'Path to PAPI includes', None),      omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
256    PathOption('papi_lib_path', 'Path to PAPI libs', None),  
257    ('papi_libs', 'PAPI libraries to link with', None),  # set defaults if not otherwise specified
258    ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),  if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
259  # MPI  if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
260    BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
261    ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
262    PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
263    ('mpi_run', 'mpirun name' , mpi_run_default),  if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
264    PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
265    ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
266  )  
267  #=================================================================================================  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
268  #  
269  #   Note: On the Altix the intel compilers are not automatically  if env['usepython3']:
270  #   detected by scons intelc.py script. The Altix has a different directory      env.Append(CPPDEFINES=['ESPYTHON3'])
271  #   path and in some locations the "modules" facility is used to support  
272  #   multiple compiler versions. This forces the need to import the users PATH  # set up the autolazy values
273  #   environment which isn't the "scons way"  if env['forcelazy'] == 'on':
274  #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)      env.Append(CPPDEFINES=['FAUTOLAZYON'])
275  #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix  elif env['forcelazy'] == 'off':
276  #      env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
277    
278  if IS_WINDOWS_PLATFORM:  # set up the collective resolve values
279        env = Environment(tools = ['default', 'msvc'], options = opts)  if env['forcecollres'] == 'on':
280  else:      env.Append(CPPDEFINES=['FRESCOLLECTON'])
281     if os.uname()[4]=='ia64':  elif env['forcecollres'] == 'off':
282        env = Environment(tools = ['default', 'intelc'], options = opts)      env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
283        if env['CXX'] == 'icpc':  
284           env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py  # allow non-standard C if requested
285     else:  if env['iknowwhatimdoing']:
286        env = Environment(tools = ['default'], options = opts)      env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
287  Help(opts.GenerateHelpText(env))  
288    # Disable OpenMP if no flags provided
289  if env['bounds_check']:  if env['openmp'] and env['omp_flags'] == '':
290     env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])     print("OpenMP requested but no flags provided - disabling OpenMP!")
291     env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])     env['openmp'] = False
292     bounds_check = env['bounds_check']  
293    if env['openmp']:
294        env.Append(CCFLAGS = env['omp_flags'])
295        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
296    else:
297        env['omp_flags']=''
298        env['omp_ldflags']=''
299    
300    # add debug/non-debug compiler flags
301    if env['debug']:
302        env.Append(CCFLAGS = env['cc_debug'])
303    else:
304        env.Append(CCFLAGS = env['cc_optim'])
305    
306    # always add cc_flags
307    env.Append(CCFLAGS = env['cc_flags'])
308    
309    # add system libraries
310    env.AppendUnique(LIBS = env['sys_libs'])
311    
312    
313    global_revision=ARGUMENTS.get('SVN_VERSION', None)
314    if global_revision:
315        global_revision = re.sub(':.*', '', global_revision)
316        global_revision = re.sub('[^0-9]', '', global_revision)
317        if global_revision == '': global_revision='-2'
318    else:
319      # Get the global Subversion revision number for the getVersion() method
320      try:
321        global_revision = os.popen('svnversion -n .').read()
322        global_revision = re.sub(':.*', '', global_revision)
323        global_revision = re.sub('[^0-9]', '', global_revision)
324        if global_revision == '': global_revision='-2'
325      except:
326        global_revision = '-1'
327    env['svn_revision']=global_revision
328    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
329    
330    if IS_WINDOWS:
331        if not env['build_shared']:
332            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
333            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
334    
335    ###################### Copy required environment vars ########################
336    
337    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
338    if IS_WINDOWS:
339        LD_LIBRARY_PATH_KEY='PATH'
340        env['ENV']['LD_LIBRARY_PATH']=''
341  else:  else:
342     bounds_check = 0      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
343    
344  #=================================================================================================  # the following env variables are exported for the unit tests
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
345    
346  env.PrependENVPath('PYTHONPATH', source_root)  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
347        try:
348            env['ENV'][key] = os.environ[key]
349        except KeyError:
350            env['ENV'][key] = 1
351    
352  try:  env_export=env['env_export']
353     omp_num_threads = os.environ['OMP_NUM_THREADS']  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
354  except KeyError:  
355     omp_num_threads = 1  for key in set(env_export):
356  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads      try:
357            env['ENV'][key] = os.environ[key]
358        except KeyError:
359            pass
360    
361  try:  try:
362     path = os.environ['PATH']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env['ENV']['PATH'] = path  
363  except KeyError:  except KeyError:
364     omp_num_threads = 1      pass
365    
366  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  # these shouldn't be needed
367    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
368    #    try:
369    #        env['ENV'][key] = os.environ[key]
370    #    except KeyError:
371    #        pass
372    
   
 # Copy some variables from the system environment to the build environment  
373  try:  try:
374     env['ENV']['DISPLAY'] = os.environ['DISPLAY']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
375  except KeyError:  except KeyError:
376     pass      pass
377    
378  try:  ######################## Add some custom builders ############################
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
379    
380  try:  if env['pythoncmd']=='python':
381     tmp = os.environ['LD_LIBRARY_PATH']      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
382     print tmp  else:
383     env['ENV']['LD_LIBRARY_PATH'] = tmp      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
384  env.Append(BUILDERS = {'PyCompile' : py_builder});  env.Append(BUILDERS = {'PyCompile' : py_builder});
385    
386  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
387  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
388    
389  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
390  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
391    
392  # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
393  try:  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
   
 try:  
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
   
 try:  
    tar_flags = env['tar_flags']  
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
   
 try:  
    exinstall = env['exinstall']  
 except KeyError:  
    exinstall = None  
 try:  
    sys_libinstall = env['sys_libinstall']  
 except KeyError:  
    sys_libinstall = None  
 try:  
    sys_pyinstall = env['sys_pyinstall']  
 except KeyError:  
    sys_pyinstall = None  
 try:  
    sys_exinstall = env['sys_exinstall']  
 except KeyError:  
    sys_exinstall = None  
   
 # ====================== debugging ===================================  
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
394    
395  # === switch on omp ===================================================  ############################ Dependency checks ###############################
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
396    
397  try:  # Create a Configure() environment to check for compilers and python
398    omp_flags_debug = env['omp_flags_debug']  conf = Configure(env.Clone())
 except KeyError:  
   omp_flags_debug = ''  
   
 # ========= use mpi? =====================================================  
 try:  
    useMPI = env['useMPI']  
 except KeyError:  
    useMPI = None  
 # ========= set compiler flags ===========================================  
399    
400  # Can't use MPI and OpenMP simultaneously at this time  ######## Test that the compilers work
401  if useMPI:  
402      omp_flags=''  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
403      omp_flags_debug=''      if not conf.CheckCC():
404            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
405            Exit(1)
406        if not conf.CheckCXX():
407            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
408            Exit(1)
409    else:
410        if not conf.CheckFunc('printf', language='c'):
411            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
412            Exit(1)
413        if not conf.CheckFunc('printf', language='c++'):
414            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
415            Exit(1)
416    
417    if conf.CheckFunc('gethostname'):
418        conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
419    
420    ######## Python headers & library (required)
421    
422    #First we check to see if the config file has specified
423    ##Where to find the filae. Ideally, this should be automatic
424    #But we need to deal with the case where python is not in its INSTALL
425    #Directory
426    # Use the python scons is running
427    if env['pythoncmd']=='python':
428        python_inc_path=sysconfig.get_python_inc()
429        if IS_WINDOWS:
430            python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
431        elif env['PLATFORM']=='darwin':
432            python_lib_path=sysconfig.get_config_var('LIBPL')
433        else:
434            python_lib_path=sysconfig.get_config_var('LIBDIR')
435    
436        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
437        if IS_WINDOWS:
438            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
439        else:
440            python_libs=['python'+sysconfig.get_python_version()]
441    
442    #if we want to use a python other than the one scons is running
443    else:
444        initstring='from __future__ import print_function;from distutils import sysconfig;'
445        if env['pythonlibname']!='':
446            python_libs=env['pythonlibname']
447        else:   # work it out by calling python    
448            if IS_WINDOWS:
449                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
450            else:
451                cmd='print("python"+sysconfig.get_python_version())'
452            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
453            python_libs=p.stdout.readline()
454            if env['usepython3']:       # This is to convert unicode str into py2 string
455                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
456            p.wait()
457            python_libs=python_libs.strip()
458    
 if dodebug:  
     try:  
       flags = env['cc_flags_debug'] + ' ' + omp_flags_debug  
       env.Append(CCFLAGS = flags)  
     except KeyError:  
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= Remember what options were used in the compile =====================================  
 if not IS_WINDOWS_PLATFORM:  
   env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*")  
   if dodebug:       env.Execute("touch " + libinstall + "/Compiled.with.debug")  
   if useMPI:        env.Execute("touch " + libinstall + "/Compiled.with.mpi")  
   if omp_flags != '':   env.Execute("touch " + libinstall + "/Compiled.with.OpenMP")  
   if bounds_check:  env.Execute("touch " + libinstall + "/Compiled.with.bounds_check")  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
459        
460     try:      # Now we know whether we are using python3 or not
461        scsl_libs = env['scsl_libs']      p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
462     except KeyError:      python_inc_path=p.stdout.readline()
463        scsl_libs = [ ]      if env['usepython3']:
464             python_inc_path=python_inc_path.encode()
465  else:      p.wait()  
466      scsl_libs =  []      python_inc_path=python_inc_path.strip()
467        if IS_WINDOWS:
468  # ============= set TRILINOS (but only with MPI) =====================================          cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
469  if useMPI:      elif env['PLATFORM']=='darwin':
470     try:          cmd="sysconfig.get_config_var(\"LIBPL\")"
471        includes = env['trilinos_path']      else:
472        env.Append(CPPPATH = [includes,])          cmd="sysconfig.get_config_var(\"LIBDIR\")"
473     except KeyError:  
474        pass      p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
475        python_lib_path=p.stdout.readline()
476     try:      if env['usepython3']:
477        lib_path = env['trilinos_lib_path']          python_lib_path=python_lib_path.decode()
478        env.Append(LIBPATH = [lib_path,])      p.wait()
479     except KeyError:      python_lib_path=python_lib_path.strip()
480        pass  
481    #Check for an override from the config file.
482     try:  #Ideally, this should be automatic
483        trilinos_libs = env['trilinos_libs']  #But we need to deal with the case where python is not in its INSTALL
484     except KeyError:  #Directory
485        trilinos_libs = []  if env['pythonlibpath']!='':
486  else:      python_lib_path=env['pythonlibpath']
487       trilinos_libs = []  
488    if env['pythonincpath']!='':
489        python_inc_path=env['pythonincpath']
490  # ============= set umfpack (but only without MPI) =====================================  
491  umf_libs=[ ]  
492  if not useMPI:  if sysheaderopt == '':
493     try:      conf.env.AppendUnique(CPPPATH = [python_inc_path])
494        includes = env['umf_path']  else:
495        env.Append(CPPPATH = [includes,])      conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
496     except KeyError:  
497        pass  conf.env.AppendUnique(LIBPATH = [python_lib_path])
498    conf.env.AppendUnique(LIBS = python_libs)
499     try:  # The wrapper script needs to find the libs
500        lib_path = env['umf_lib_path']  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
501        env.Append(LIBPATH = [lib_path,])  
502     except KeyError:  if not conf.CheckCHeader('Python.h'):
503        pass      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
504        Exit(1)
505     try:  if not conf.CheckFunc('Py_Exit'):
506        umf_libs = env['umf_libs']      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
507        umf_libs+=umf_libs      Exit(1)
508     except KeyError:  
509        pass  ## reuse conf to check for numpy header (optional)
510    if env['usepython3']:
511     try:      # FIXME: This is until we can work out how to make the checks in python 3
512        includes = env['ufc_path']      conf.env['numpy_h']=False
513        env.Append(CPPPATH = [includes,])  else:
514     except KeyError:      if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
515        pass          conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
516            conf.env['numpy_h']=True
517     try:      else:
518        includes = env['amd_path']          conf.env['numpy_h']=False
519        env.Append(CPPPATH = [includes,])  
520     except KeyError:  # Commit changes to environment
521        pass  env = conf.Finish()
522    
523     try:  ######## boost (required)
524        lib_path = env['amd_lib_path']  
525        env.Append(LIBPATH = [lib_path,])  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
526     except KeyError:  if sysheaderopt == '':
527        pass      env.AppendUnique(CPPPATH = [boost_inc_path])
528    else:
529     try:      # This is required because we can't -isystem /usr/include since it breaks
530        amd_libs = env['amd_libs']      # std includes
531        umf_libs+=amd_libs      if os.path.normpath(boost_inc_path) == '/usr/include':
532     except KeyError:          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
533        pass      else:
534            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
535  # ============= set TRILINOS (but only with MPI) =====================================  
536  if useMPI:  env.AppendUnique(LIBPATH = [boost_lib_path])
537     try:  env.AppendUnique(LIBS = env['boost_libs'])
538        includes = env['trilinos_path']  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
539        env.Append(CPPPATH = [includes,])  
540     except KeyError:  ######## numpy (required)
541        pass  
542    if not detectModule(env, 'numpy'):
543     try:      print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
544        lib_path = env['trilinos_lib_path']      Exit(1)
545        env.Append(LIBPATH = [lib_path,])  
546     except KeyError:  ######## CppUnit (required for tests)
547        pass  
548    try:
549     try:      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
550        trilinos_libs = env['trilinos_libs']      env.AppendUnique(CPPPATH = [cppunit_inc_path])
551     except KeyError:      env.AppendUnique(LIBPATH = [cppunit_lib_path])
552        trilinos_libs = []      env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
553  else:      env['cppunit']=True
554       trilinos_libs = []  except:
555        env['cppunit']=False
 # ============= set blas =====================================  
 try:  
    includes = env['blas_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
   
 try:  
    lib_path = env['blas_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
556    
557  try:  ######## sympy (optional)
    blas_libs = env['blas_libs']  
 except KeyError:  
    blas_libs = [ ]  
558    
559  # ========== netcdf ====================================  if detectModule(env, 'sympy'):
560  try:      env['sympy'] = True
    useNetCDF = env['useNetCDF']  
 except KeyError:  
    useNetCDF = 'yes'  
    pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
561  else:  else:
562     print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
563     netCDF_libs=[ ]      env['sympy'] = False
564    
565  # ====================== boost ======================================  ######## netCDF (optional)
566  try:  
567     includes = env['boost_path']  netcdf_inc_path=''
568     env.Append(CPPPATH = [includes,])  netcdf_lib_path=''
569  except KeyError:  if env['netcdf']:
570     pass      netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
571  try:      env.AppendUnique(CPPPATH = [netcdf_inc_path])
572     lib_path = env['boost_lib_path']      env.AppendUnique(LIBPATH = [netcdf_lib_path])
573     env.Append(LIBPATH = [lib_path,])      env.AppendUnique(LIBS = env['netcdf_libs'])
574     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
575     if IS_WINDOWS_PLATFORM :      env.Append(CPPDEFINES = ['USE_NETCDF'])
576        env.PrependENVPath('PATH', lib_path)  
577  except KeyError:  ######## PAPI (optional)
578     pass  
579  try:  papi_inc_path=''
580     boost_lib = env['boost_lib']  papi_lib_path=''
581  except KeyError:  if env['papi']:
582     boost_lib = None      papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
583  # ====================== python ======================================      env.AppendUnique(CPPPATH = [papi_inc_path])
584  try:      env.AppendUnique(LIBPATH = [papi_lib_path])
585     includes = env['python_path']      env.AppendUnique(LIBS = env['papi_libs'])
586     env.Append(CPPPATH = [includes,])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
587  except KeyError:      env.Append(CPPDEFINES = ['BLOCKPAPI'])
588     pass  
589  try:  ######## MKL (optional)
590     lib_path = env['python_lib_path']  
591     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  mkl_inc_path=''
592     env.Append(LIBPATH = [lib_path,])  mkl_lib_path=''
593  except KeyError:  if env['mkl']:
594     pass      mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
595  try:      env.AppendUnique(CPPPATH = [mkl_inc_path])
596     python_lib = env['python_lib']      env.AppendUnique(LIBPATH = [mkl_lib_path])
597  except KeyError:      env.AppendUnique(LIBS = env['mkl_libs'])
598     python_lib = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
599  # =============== documentation =======================================      env.Append(CPPDEFINES = ['MKL'])
600  try:  
601     doxygen_path = env['doxygen_path']  ######## UMFPACK (optional)
602  except KeyError:  
603     doxygen_path = None  umfpack_inc_path=''
604  try:  umfpack_lib_path=''
605     epydoc_path = env['epydoc_path']  if env['umfpack']:
606  except KeyError:      umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
607     epydoc_path = None      env.AppendUnique(CPPPATH = [umfpack_inc_path])
608  # =============== PAPI =======================================      env.AppendUnique(LIBPATH = [umfpack_lib_path])
609  try:      env.AppendUnique(LIBS = env['umfpack_libs'])
610     includes = env['papi_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
611     env.Append(CPPPATH = [includes,])      env.Append(CPPDEFINES = ['UMFPACK'])
612  except KeyError:  
613     pass  ######## LAPACK (optional)
614  try:  
615     lib_path = env['papi_lib_path']  if env['lapack']=='mkl' and not env['mkl']:
616     env.Append(LIBPATH = [lib_path,])      print("mkl_lapack requires MKL!")
617  except KeyError:      Exit(1)
618     pass  
619  try:  env['uselapack'] = env['lapack']!='none'
620     papi_libs = env['papi_libs']  lapack_inc_path=''
621  except KeyError:  lapack_lib_path=''
622     papi_libs = None  if env['uselapack']:
623  # ============= set mpi =====================================      header='clapack.h'
624  if useMPI:      if env['lapack']=='mkl':
625     env.Append(CPPDEFINES=['PASO_MPI',])          env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
626     try:          header='mkl_lapack.h'
627        includes = env['mpi_path']      lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
628        env.Append(CPPPATH = [includes,])      env.AppendUnique(CPPPATH = [lapack_inc_path])
629     except KeyError:      env.AppendUnique(LIBPATH = [lapack_lib_path])
630        pass      env.AppendUnique(LIBS = env['lapack_libs'])
631     try:      env.Append(CPPDEFINES = ['USE_LAPACK'])
632        lib_path = env['mpi_lib_path']  
633        env.Append(LIBPATH = [lib_path,])  ######## Silo (optional)
634        env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
635     except KeyError:  silo_inc_path=''
636        pass  silo_lib_path=''
637     try:  if env['silo']:
638        mpi_libs = env['mpi_libs']      silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
639     except KeyError:      env.AppendUnique(CPPPATH = [silo_inc_path])
640        mpi_libs = []      env.AppendUnique(LIBPATH = [silo_lib_path])
641        # Note that we do not add the libs since they are only needed for the
642     try:      # weipa library and tools.
643        mpi_run = env['mpi_run']      #env.AppendUnique(LIBS = [env['silo_libs']])
644     except KeyError:  
645        mpi_run = ''  ######## VSL random numbers (optional)
646    if env['vsl_random']:
647     try:      env.Append(CPPDEFINES = ['MKLRANDOM'])
648         mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']  
649         env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  ######## VisIt (optional)
650     except KeyError:  
651        pass  visit_inc_path=''
652  else:  visit_lib_path=''
653    mpi_libs=[]  if env['visit']:
654    mpi_run = mpi_run_default      visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
655  # =========== zip files ===========================================      env.AppendUnique(CPPPATH = [visit_inc_path])
656  try:      env.AppendUnique(LIBPATH = [visit_lib_path])
657     includes = env['papi_path']  
658     env.Append(CPPPATH = [includes,])  ######## MPI (optional)
659  except KeyError:  
660     pass  if env['mpi']=='no':
661  try:      env['mpi']='none'
662     lib_path = env['papi_lib_path']  
663     env.Append(LIBPATH = [lib_path,])  env['usempi'] = env['mpi']!='none'
664  except KeyError:  mpi_inc_path=''
665     pass  mpi_lib_path=''
666  try:  if env['usempi']:
667     papi_libs = env['papi_libs']      mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
668  except KeyError:      env.AppendUnique(CPPPATH = [mpi_inc_path])
669     papi_libs = None      env.AppendUnique(LIBPATH = [mpi_lib_path])
670  try:      env.AppendUnique(LIBS = env['mpi_libs'])
671     papi_instrument_solver = env['papi_instrument_solver']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
672  except KeyError:      env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
673     papi_instrument_solver = None      # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
674        # On the other hand MPT and OpenMPI don't define the latter so we have to
675        # do that here
676  # ============= and some helpers =====================================      if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
677  try:          env.Append(CPPDEFINES = ['MPI_INCLUDED'])
678     doxygen_path = env['doxygen_path']  
679  except KeyError:  ######## BOOMERAMG (optional)
680     doxygen_path = None  
681  try:  if env['mpi'] == 'none': env['boomeramg'] = False
682     epydoc_path = env['epydoc_path']  
683  except KeyError:  boomeramg_inc_path=''
684     epydoc_path = None  boomeramg_lib_path=''
685  try:  if env['boomeramg']:
686     src_zipfile = env.File(env['src_zipfile'])      boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
687  except KeyError:      env.AppendUnique(CPPPATH = [boomeramg_inc_path])
688     src_zipfile = None      env.AppendUnique(LIBPATH = [boomeramg_lib_path])
689  try:      env.AppendUnique(LIBS = env['boomeramg_libs'])
690     test_zipfile = env.File(env['test_zipfile'])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
691  except KeyError:      env.Append(CPPDEFINES = ['BOOMERAMG'])
692     test_zipfile = None  
693  try:  ######## ParMETIS (optional)
694     examples_zipfile = env.File(env['examples_zipfile'])  
695  except KeyError:  if not env['usempi']: env['parmetis'] = False
696     examples_zipfile = None  
697    parmetis_inc_path=''
698  try:  parmetis_lib_path=''
699     src_tarfile = env.File(env['src_tarfile'])  if env['parmetis']:
700  except KeyError:      parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
701     src_tarfile = None      env.AppendUnique(CPPPATH = [parmetis_inc_path])
702  try:      env.AppendUnique(LIBPATH = [parmetis_lib_path])
703     test_tarfile = env.File(env['test_tarfile'])      env.AppendUnique(LIBS = env['parmetis_libs'])
704  except KeyError:      env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
705     test_tarfile = None      env.Append(CPPDEFINES = ['USE_PARMETIS'])
706  try:  
707     examples_tarfile = env.File(env['examples_tarfile'])  ######## gmsh (optional, for tests)
708  except KeyError:  
709     examples_tarfile = None  try:
710        p=Popen(['gmsh', '-info'], stderr=PIPE)
711  try:      _,e=p.communicate()
712     guide_pdf = env.File(env['guide_pdf'])      if e.split().count("MPI"):
713  except KeyError:          env['gmsh']='m'
714     guide_pdf = None      else:
715            env['gmsh']='s'
716  try:  except OSError:
717     guide_html_index = env.File('index.htm',env['guide_html'])      env['gmsh']=False
718  except KeyError:  
719     guide_html_index = None  ######## PDFLaTeX (for documentation)
720    if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
721  try:      env['pdflatex']=True
722     api_epydoc = env.Dir(env['api_epydoc'])  else:
723  except KeyError:      env['pdflatex']=False
724     api_epydoc = None  
725    ######################## Summarize our environment ###########################
726  try:  
727     api_doxygen = env.Dir(env['api_doxygen'])  # keep some of our install paths first in the list for the unit tests
728  except KeyError:  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
729     api_doxygen = None  env.PrependENVPath('PYTHONPATH', prefix)
730    env['ENV']['ESCRIPT_ROOT'] = prefix
731  global_revision="$Rev$"  
732  try:  if not env['verbose']:
733     svn_pipe = os.popen("svnversion -n .")      env['CCCOMSTR'] = "Compiling $TARGET"
734     global_revision = svn_pipe.readlines()      env['CXXCOMSTR'] = "Compiling $TARGET"
735     svn_pipe.close()      env['SHCCCOMSTR'] = "Compiling $TARGET"
736  except:      env['SHCXXCOMSTR'] = "Compiling $TARGET"
737     print "Extracting revision number failed. Using %s instead."%global_revision      env['ARCOMSTR'] = "Linking $TARGET"
738  global_revision = re.sub(":.*", "", global_revision[0])      env['LINKCOMSTR'] = "Linking $TARGET"
739  global_revision = re.sub("[^0-9]", "", global_revision)      env['SHLINKCOMSTR'] = "Linking $TARGET"
740  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
741        env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
742  # Python install - esys __init__.py      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
743  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
744        #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
745  # FIXME: exinstall and friends related to examples are not working.  
746  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])  print("")
747    print("*** Config Summary (see config.log and lib/buildvars for details) ***")
748  env.Default(build_target)  print("Escript/Finley revision %s"%global_revision)
749    print("  Install prefix:  %s"%env['prefix'])
750  # Zipgets  print("          Python:  %s"%sysconfig.PREFIX)
751  env.Alias('release_src',[ src_zipfile, src_tarfile ])  print("           boost:  %s"%env['boost_prefix'])
752  env.Alias('release_tests',[ test_zipfile, test_tarfile])  print("           numpy:  YES")
753  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])  if env['usempi']:
754  env.Alias('examples_zipfile',examples_zipfile)      print("             MPI:  YES (flavour: %s)"%env['mpi'])
755  env.Alias('examples_tarfile',examples_tarfile)  else:
756  env.Alias('api_epydoc',api_epydoc)      print("             MPI:  DISABLED")
757  env.Alias('api_doxygen',api_doxygen)  if env['uselapack']:
758  env.Alias('guide_html_index',guide_html_index)      print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
759  env.Alias('guide_pdf', guide_pdf)  else:
760  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])      print("          LAPACK:  DISABLED")
761  env.Alias('release', ['release_src', 'release_tests', 'docs'])  d_list=[]
762    e_list=[]
763  env.Alias('build_tests',build_target)    # target to build all C++ tests  for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','silo','sympy','umfpack','visit','vsl_random':
764  env.Alias('build_py_tests',build_target) # target to build all python tests      if env[i]: e_list.append(i)
765  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests      else: d_list.append(i)
766  env.Alias('run_tests', 'build_tests')   # target to run all C++ test  for i in e_list:
767  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests      print("%16s:  YES"%i)
768  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests  for i in d_list:
769        print("%16s:  DISABLED"%i)
770    if env['cppunit']:
771  # Allow sconscripts to see the env      print("         CppUnit:  FOUND")
772  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",  else:
773      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",      print("         CppUnit:  NOT FOUND")
774          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",  if env['gmsh']=='m':
775          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])      print("            gmsh:  FOUND, MPI-ENABLED")
776    elif env['gmsh']=='s':
777  # End initialisation section      print("            gmsh:  FOUND")
778  # Begin configuration section  else:
779  # adds this file and the scons option directore to the source tar      print("            gmsh:  NOT FOUND")
780  release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]  if env['numpy_h']:
781  release_testfiles=[env.File('README_TESTS'),]      print("   numpy headers:  FOUND")
782  env.Zip(src_zipfile, release_srcfiles)  else:
783  env.Zip(test_zipfile, release_testfiles)      print("   numpy headers:  NOT FOUND")
784  try:  print("   vsl_random:  %s"%env['vsl_random'])
785     env.Tar(src_tarfile, release_srcfiles)      
786     env.Tar(test_tarfile, release_testfiles)  if ((fatalwarning != '') and (env['werror'])):
787  except AttributeError:      print("  Treating warnings as errors")
788     pass  else:
789  # Insert new components to be build here      print("  NOT treating warnings as errors")
790  # FIXME: might be nice to replace this verbosity with a list of targets and some  print("")
791  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines  
792  # Third Party libraries  ####################### Configure the subdirectories #########################
793  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  
794  # C/C++ Libraries  from grouptest import *
795  env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  
796  # bruce is removed for now as it doesn't really do anything  TestGroups=[]
797  # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)  
798  env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  # keep an environment without warnings-as-errors
799  env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  dodgy_env=env.Clone()
800  env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  
801  env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)  # now add warnings-as-errors flags. This needs to be done after configuration
802  env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  # because the scons test files have warnings in them
803  env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  if ((fatalwarning != '') and (env['werror'])):
804  env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)      env.Append(CCFLAGS = fatalwarning)
805  env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
806  #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)  Export(
807      ['env',
808       'dodgy_env',
809       'IS_WINDOWS',
810       'TestGroups'
811      ]
812    )
813    
814    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
815    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
816    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
817    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
818    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
819    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
820    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
821    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
822    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
823    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
824    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
825    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
826    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
827    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
828    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
829    
830    
831    ######################## Populate the buildvars file #########################
832    
833    # remove obsolete file
834    if not env['usempi']:
835        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
836        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
837    
838    # Try to extract the boost version from version.hpp
839    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
840    boostversion='unknown'
841    try:
842        for line in boosthpp:
843            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
844            if ver:
845                boostversion=ver.group(1)
846    except StopIteration:
847        pass
848    boosthpp.close()
849    
850    
851    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
852    buildvars.write("svn_revision="+str(global_revision)+"\n")
853    buildvars.write("prefix="+prefix+"\n")
854    buildvars.write("cc="+env['CC']+"\n")
855    buildvars.write("cxx="+env['CXX']+"\n")
856    if env['pythoncmd']=='python':
857        buildvars.write("python="+sys.executable+"\n")
858        buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
859    else:
860        buildvars.write("python="+env['pythoncmd']+"\n")
861        p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
862        verstring=p.stdout.readline().strip()
863        p.wait()
864        buildvars.write("python_version="+verstring+"\n")
865    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
866    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
867    buildvars.write("boost_version="+boostversion+"\n")
868    buildvars.write("debug=%d\n"%int(env['debug']))
869    buildvars.write("openmp=%d\n"%int(env['openmp']))
870    buildvars.write("mpi=%s\n"%env['mpi'])
871    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
872    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
873    buildvars.write("lapack=%s\n"%env['lapack'])
874    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
875    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
876        buildvars.write("%s=%d\n"%(i, int(env[i])))
877        if env[i]:
878            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
879            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
880    buildvars.close()
881    
882    ################### Targets to build and install libraries ###################
883    
884    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
885    env.Alias('target_init', [target_init])
886    # delete buildvars upon cleanup
887    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
888    
889    # The headers have to be installed prior to build in order to satisfy
890    # #include <paso/Common.h>
891    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
892    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
893    
894    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
895    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
896    
897    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
898    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
899    
900    env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
901    env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
902    
903    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
904    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
905    
906    env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
907    env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
908    
909    env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
910    env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
911    
912    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
913    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
914    
915    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
916    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
917    
918    # Now gather all the above into some easy targets: build_all and install_all
919    build_all_list = []
920    build_all_list += ['build_esysUtils']
921    build_all_list += ['build_paso']
922    build_all_list += ['build_escript']
923    build_all_list += ['build_pasowrap']
924    build_all_list += ['build_dudley']
925    build_all_list += ['build_finley']
926    build_all_list += ['build_ripley']
927    build_all_list += ['build_weipa']
928    if not IS_WINDOWS: build_all_list += ['build_escriptreader']
929    if env['usempi']:   build_all_list += ['build_pythonMPI']
930    build_all_list += ['build_escriptconvert']
931    env.Alias('build_all', build_all_list)
932    
933    install_all_list = []
934    install_all_list += ['target_init']
935    install_all_list += ['install_esysUtils']
936    install_all_list += ['install_paso']
937    install_all_list += ['install_escript']
938    install_all_list += ['install_pasowrap']
939    install_all_list += ['install_dudley']
940    install_all_list += ['install_finley']
941    install_all_list += ['install_ripley']
942    install_all_list += ['install_weipa']
943    if not IS_WINDOWS: install_all_list += ['install_escriptreader']
944    install_all_list += ['install_downunder_py']
945    install_all_list += ['install_modellib_py']
946    install_all_list += ['install_pycad_py']
947    if env['usempi']:   install_all_list += ['install_pythonMPI']
948    install_all_list += ['install_escriptconvert']
949    env.Alias('install_all', install_all_list)
950    
951    # Default target is install
952    env.Default('install_all')
953    
954    ################## Targets to build and run the test suite ###################
955    
956    if not env['cppunit']:
957        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
958        env.Alias('run_tests', test_msg)
959    env.Alias('run_tests', ['install_all'])
960    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
961    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
962    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
963    
964    ##################### Targets to build the documentation #####################
965    
966    env.Alias('api_epydoc','install_all')
967    env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
968    env.Alias('release_prep', ['docs', 'install_all'])
969    
970    
971    # The test scripts are always generated, this target allows us to
972    # generate the testscripts without doing a full build
973    env.Alias('testscripts',[])
974    
975  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  if not IS_WINDOWS:
976  syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)      try:
977            utest=open('utest.sh','w')
978            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
979            for tests in TestGroups:
980                utest.write(tests.makeString())
981            utest.close()
982            Execute(Chmod('utest.sh', 0o755))
983            print("Generated utest.sh.")
984            # This version contains only python tests - I want this to be usable
985            # From a binary only install if you have the test files
986            utest=open('itest.sh','w')
987            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
988            for tests in TestGroups:
989              if tests.exec_cmd=='$PYTHONRUNNER ':
990                utest.write(tests.makeString())
991            utest.close()
992            Execute(Chmod('itest.sh', 0o755))
993            print("Generated itest.sh.")        
994        except IOError:
995            print("Error attempting to write unittests file.")
996            Exit(1)
997    
998        # delete utest.sh upon cleanup
999        env.Clean('target_init', 'utest.sh')
1000        env.Clean('target_init', 'itest.sh')
1001    
1002        # Make sure that the escript wrapper is in place
1003        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1004            print("Copying escript wrapper.")
1005            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1006    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1390  
changed lines
  Added in v.4078

  ViewVC Help
Powered by ViewVC 1.1.26