/[escript]/branches/diaplayground/SConstruct
ViewVC logotype

Diff of /branches/diaplayground/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1390 by gross, Mon Jan 14 03:46:11 2008 UTC revision 4173 by caltinay, Wed Jan 30 03:05:01 2013 UTC
# Line 1  Line 1 
1  #         Copyright 2006 by ACcESS MNRF  ##############################################################################
2  #  #
3  #              http://www.access.edu.au  # Copyright (c) 2003-2013 by University of Queensland
4  #       Primary Business: Queensland, Australia  # http://www.uq.edu.au
 #  Licensed under the Open Software License version 3.0  
 #     http://www.opensource.org/licenses/osl-3.0.php  
   
 # top-level Scons configuration file for all esys13 modules  
 # Begin initialisation Section  
 # all of this section just intialises default environments and helper  
 # scripts. You shouldn't need to modify this section.  
 EnsureSConsVersion(0,96,91)  
 EnsurePythonVersion(2,3)  
   
   
 #===============================================================  
 #   import tools:  
 import glob  
 import sys, os, re  
 # Add our extensions  
 if sys.path.count('scons')==0: sys.path.append('scons')  
 import scons_extensions  
   
 # We may also need to know where python's site-packages subdirectory lives  
 python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  
   
 #===============================================================  
   
 tools_prefix="/usr"  
   
 #==============================================================================================      
 #      
 #    get the installation prefix  
5  #  #
6  prefix = ARGUMENTS.get('prefix', sys.prefix )  # Primary Business: Queensland, Australia
7    # Licensed under the Open Software License version 3.0
8  # We may also need to know where python's site-packages subdirectory lives  # http://www.opensource.org/licenses/osl-3.0.php
9  python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1])  #
10  # Install as a standard python package in /usr/lib64 if available, else in /usr/lib  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11  if os.path.isdir(  prefix+"/lib64/"+python_version+"/site-packages"):  # Development since 2012 by School of Earth Sciences
12     sys_dir_packages =  prefix+"/lib64/"+python_version+"/site-packages/esys"  #
13     sys_dir_libraries = prefix+"/lib64"  ##############################################################################
14  else:  
15     sys_dir_packages =  prefix+"/lib/"+python_version+"/site-packages/esys"  EnsureSConsVersion(0,98,1)
16     sys_dir_libraries = prefix+"/lib"  EnsurePythonVersion(2,5)
17    
18  sys_dir_examples = prefix+"/share/doc/esys"  import sys, os, platform, re
19    from distutils import sysconfig
20  source_root = Dir('#.').abspath  from site_init import *
21    from subprocess import PIPE, Popen
22  dir_packages = os.path.join(source_root,"esys")  
23  dir_examples = os.path.join(source_root,"examples")  # Version number to check for in options file. Increment when new features are
24  dir_libraries = os.path.join(source_root,"lib")  # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26  print "Source root is : ",source_root  
27  print " Default packages local installation:    ", dir_packages  # MS Windows support, many thanks to PH
28  print " Default library local installation  ", dir_libraries  IS_WINDOWS = (os.name == 'nt')
29  print " Default example local  installation:    ", dir_examples  
30  print "Install prefix is: ", prefix  ########################## Determine options file ############################
31  print " Default packages system installation:   ", sys_dir_packages  # 1. command line
32  print " Default library system installation     ", sys_dir_libraries  # 2. scons/<hostname>_options.py
33  print " Default example system installation:    ", sys_dir_examples  # 3. name as part of a cluster
34    options_file=ARGUMENTS.get('options_file', None)
35  #==============================================================================================      if not options_file:
36        ext_dir = os.path.join(os.getcwd(), 'scons')
37  # Default options and options help text      hostname = platform.node().split('.')[0]
38  # These are defaults and can be overridden using command line arguments or an options file.      for name in hostname, effectiveName(hostname):
39  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40  # DO NOT CHANGE THEM HERE          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41  # Where to install?          if os.path.isfile(options_file): break
42  #==============================================================================================      
43  #      if not os.path.isfile(options_file):
44  #    get the options file if present:      print("\nWARNING:\nOptions file %s" % options_file)
45  #      print("not found! Default options will be used which is most likely suboptimal.")
46  options_file = ARGUMENTS.get('options_file','')      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47        print("subdirectory and customize it to your needs.\n")
48  if not os.path.isfile(options_file) :      options_file = None
49      options_file = False  
50    ############################### Build options ################################
51  if not options_file :  
52     import socket  default_prefix='/usr'
53     hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     tmp = os.path.join("scons",hostname+"_options.py")  lapack_flavours=('none', 'clapack', 'mkl')
55    
56     if os.path.isfile(tmp) :  vars = Variables(options_file, ARGUMENTS)
57        options_file = tmp  vars.AddVariables(
58      PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59  IS_WINDOWS_PLATFORM = (os.name== "nt")    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60      PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61  # If you're not going to tell me then......    BoolVariable('verbose', 'Output full compile/link lines', False),
62  # FIXME: add one for the altix too.  # Compiler/Linker options
63  if not options_file :    ('cc', 'Path to C compiler', 'default'),
64     if IS_WINDOWS_PLATFORM :    ('cxx', 'Path to C++ compiler', 'default'),
65        options_file = "scons/windows_mscv71_options.py"    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66     else:    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67        options_file = "scons/linux_gcc_eg_options.py"    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
68      ('cc_extra', 'Extra C compiler flags', ''),
69  # and load it    ('cxx_extra', 'Extra C++ compiler flags', ''),
70  opts = Options(options_file, ARGUMENTS)    ('ld_extra', 'Extra linker flags', ''),
71  #================================================================    BoolVariable('werror','Treat compiler warnings as errors', True),
72  #    BoolVariable('debug', 'Compile with debug flags', False),
73  #   check if UMFPACK is installed on the system:    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74  #    ('omp_flags', 'OpenMP compiler flags', 'default'),
75  uf_root=None    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76  for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']:  # Mandatory libraries
77     if os.path.isdir(os.path.join(tools_prefix,'include',i)):    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78         uf_root=i    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79         print i," is used form ",tools_prefix  # Mandatory for tests
80         break    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81  if not uf_root==None:    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82     umf_path_default=os.path.join(tools_prefix,'include',uf_root)  # Optional libraries and options
83     umf_lib_path_default=os.path.join(tools_prefix,'lib')    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84     umf_libs_default=['umfpack']    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85     amd_path_default=os.path.join(tools_prefix,'include',uf_root)    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86     amd_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('netcdf', 'Enable netCDF file support', False),
87     amd_libs_default=['amd']    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88     ufc_path_default=os.path.join(tools_prefix,'include',uf_root)    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89  else:    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90     umf_path_default=None    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91     umf_lib_path_default=None    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92     umf_libs_default=None    BoolVariable('papi', 'Enable PAPI', False),
93     amd_path_default=None    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
94     amd_lib_path_default=None    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95     amd_libs_default=None    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96     ufc_path_default=None    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97  #    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98  #==========================================================================    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99  #    BoolVariable('umfpack', 'Enable UMFPACK', False),
100  #    python installation:    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  #    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102  python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1]))    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103  python_lib_path_default=os.path.join(tools_prefix,'lib')    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104  python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1])    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105      EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106  #==========================================================================    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  #    ('lapack_libs', 'LAPACK libraries to link with', []),
108  #    boost installation:    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109  #    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
110  boost_path_default=os.path.join(tools_prefix,'include')    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  boost_lib_path_default=os.path.join(tools_prefix,'lib')    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112  boost_lib_default=['boost_python']    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113      ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114      BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115    # Advanced settings
116      #dudley_assemble_flags = -funroll-loops      to actually do something
117      ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118      # To enable passing function pointers through python
119      BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120      # An option for specifying the compiler tools (see windows branch)
121      ('tools_names', 'Compiler tools to use', ['default']),
122      ('env_export', 'Environment variables to be passed to tools',[]),
123      EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125      # finer control over library building, intel aggressive global optimisation
126      # works with dynamic libraries on windows.
127      ('build_shared', 'Build dynamic libraries only', False),
128      ('sys_libs', 'Extra libraries to link with', []),
129      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130      ('SVN_VERSION', 'Do not use from options file', -2),
131      ('pythoncmd', 'which python to compile with','python'),
132      ('usepython3', 'Is this a python3 build? (experimental)', False),
133      ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134      ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135      ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136      BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
137    )
138    
139  #==========================================================================  ##################### Create environment and help text #######################
 #  
 #    check if netCDF is installed on the system:  
 #  
 netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3')  
 netCDF_lib_path_default=os.path.join(tools_prefix,'lib')  
140    
141  if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default):  # Intel's compiler uses regular expressions improperly and emits a warning
142       useNetCDF_default='yes'  # about failing to find the compilers. This warning can be safely ignored.
      netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ]  
 else:  
      useNetCDF_default='no'  
      netCDF_path_default=None  
      netCDF_lib_path_default=None  
      netCDF_libs_default=None  
143    
144  #==========================================================================  # PATH is needed so the compiler, linker and tools are found if they are not
145  #  # in default locations.
146  #  MPI:  env = Environment(tools = ['default'], options = vars,
147  #                    ENV = {'PATH': os.environ['PATH']})
148  if IS_WINDOWS_PLATFORM:                    
149     useMPI_default='no'  
150     mpi_path_default=None  #set the vars for clang
151     mpi_lib_path_default=None  def mkclang(env):
152     mpi_libs_default=[]    env['CC']='clang'
153     mpi_run_default=None    env['CXX']='clang++'
154  else:                    
155     useMPI_default='no'                    
156     mpi_root='/usr/local'  if env['tools_names'] != 'default':
157     mpi_path_default=os.path.join(mpi_root,'include')      zz=env['tools_names']
158     mpi_lib_path_default=os.path.join(mpi_root,'lib')      if 'clang' in zz:
159     mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ]          zz.remove('clang')
160     mpi_run_default='mpiexec -np 1'          zz.insert(0, mkclang)
161  #      env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162  #==========================================================================                        ENV = {'PATH' : os.environ['PATH']})
163  #  
164  #    compile:  if options_file:
165  #      opts_valid=False
166  cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi'      if 'escript_opts_version' in env.Dictionary() and \
167  cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi'          int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168  cxx_flags_default='--no-warn -ansi'              opts_valid=True
169  cxx_flags_debug_default='--no-warn -ansi -DDOASSERT'      if opts_valid:
170            print("Using options in %s." % options_file)
171  #==============================================================================================          else:
172  # Default options and options help text          print("\nOptions file %s" % options_file)
173  # These are defaults and can be overridden using command line arguments or an options file.          print("is outdated! Please update the file by examining one of the TEMPLATE")
174  # if the options_file or ARGUMENTS do not exist then the ones listed as default here are used          print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175  # DO NOT CHANGE THEM HERE          Exit(1)
176  opts.AddOptions(  
177  # Where to install esys stuff  # Generate help text (scons -h)
178    ('incinstall', 'where the esys headers will be installed',             Dir('#.').abspath+'/include'),  Help(vars.GenerateHelpText(env))
179    ('libinstall', 'where the esys libraries will be installed',           dir_libraries),  
180    ('pyinstall', 'where the esys python modules will be installed',       dir_packages),  # Check for superfluous options
181    ('exinstall', 'where the esys examples will be installed',             dir_examples),  if len(vars.UnknownVariables())>0:
182    ('sys_libinstall', 'where the system esys libraries will be installed',       sys_dir_libraries),      for k in vars.UnknownVariables():
183    ('sys_pyinstall', 'where the system esys python modules will be installed',   sys_dir_packages),          print("Unknown option '%s'" % k)
184    ('sys_exinstall', 'where the system esys examples will be installed',         sys_dir_examples),      Exit(1)
185    ('src_zipfile', 'the source zip file will be installed.',              Dir('#.').abspath+"/release/escript_src.zip"),  
186    ('test_zipfile', 'the test zip file will be installed.',               Dir('#.').abspath+"/release/escript_tests.zip"),  #################### Make sure install directories exist #####################
187    ('src_tarfile', 'the source tar file will be installed.',              Dir('#.').abspath+"/release/escript_src.tar.gz"),  
188    ('test_tarfile', 'the test tar file will be installed.',               Dir('#.').abspath+"/release/escript_tests.tar.gz"),  env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    ('examples_tarfile', 'the examples tar file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"),  prefix=Dir(env['prefix']).abspath
190    ('examples_zipfile', 'the examples zip file will be installed.',       Dir('#.').abspath+"/release/doc/escript_examples.zip"),  env['incinstall'] = os.path.join(prefix, 'include')
191    ('guide_pdf', 'name of the user guide in pdf format',                  Dir('#.').abspath+"/release/doc/user/guide.pdf"),  env['bininstall'] = os.path.join(prefix, 'bin')
192    ('api_epydoc', 'name of the epydoc api docs directory',                Dir('#.').abspath+"/release/doc/epydoc"),  env['libinstall'] = os.path.join(prefix, 'lib')
193    ('guide_html', 'name of the directory for user guide in html format',  Dir('#.').abspath+"/release/doc/user/html"),  env['pyinstall']  = os.path.join(prefix, 'esys')
194    ('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"),  if not os.path.isdir(env['bininstall']):
195  # Compilation options      os.makedirs(env['bininstall'])
196    BoolOption('dodebug', 'Do you want a debug build?', 'no'),  if not os.path.isdir(env['libinstall']):
197    BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'),      os.makedirs(env['libinstall'])
198    ('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file),  if not os.path.isdir(env['pyinstall']):
199    ('cc_defines','C/C++ defines to use', None),      os.makedirs(env['pyinstall'])
200    ('cc_flags','C compiler flags to use (Release build)', cc_flags_default),  
201    ('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default),  env.Append(CPPPATH = [env['incinstall']])
202    ('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default),  env.Append(LIBPATH = [env['libinstall']])
203    ('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default),  
204    ('omp_flags', 'OpenMP compiler flags to use (Release build)', ''),  ################# Fill in compiler options if not set above ##################
205    ('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''),  
206    ('ar_flags', 'Static library archiver flags to use', None),  if env['cc'] != 'default': env['CC']=env['cc']
207    ('sys_libs', 'System libraries to link with', None),  if env['cxx'] != 'default': env['CXX']=env['cxx']
208    ('tar_flags','flags for zip files','-c -z'),  
209  # MKL  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210    PathOption('mkl_path', 'Path to MKL includes', None),  # runtimes (icc does not)
211    PathOption('mkl_lib_path', 'Path to MKL libs', None),  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212    ('mkl_libs', 'MKL libraries to link with', None),      env['LINK'] = env['CXX']
213  # SCSL  
214    PathOption('scsl_path', 'Path to SCSL includes', None),  # default compiler/linker options
215    PathOption('scsl_lib_path', 'Path to SCSL libs', None),  cc_flags = ''
216    ('scsl_libs', 'SCSL libraries to link with', None),  cc_optim = ''
217    ('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None),  cc_debug = ''
218  # UMFPACK  omp_flags = ''
219    PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default),  omp_ldflags = ''
220    PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default),  fatalwarning = '' # switch to turn warnings into errors
221    PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default),  sysheaderopt = '' # how to indicate that a header is a system header
222    ('umf_libs', 'UMFPACK libraries to link with', umf_libs_default),  
223  # AMD (used by UMFPACK)  # env['CC'] might be a full path
224    PathOption('amd_path', 'Path to AMD includes', amd_path_default),  cc_name=os.path.basename(env['CC'])
225    PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default),  
226    ('amd_libs', 'AMD libraries to link with', amd_libs_default),  if cc_name == 'icc':
227  # TRILINOS      # Intel compiler
228    PathOption('trilinos_path', 'Path to TRILINOS includes', None),      # #1875: offsetof applied to non-POD types is nonstandard (in boost)
229    PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None),      cc_flags    = "-std=c99 -fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1"
230    ('trilinos_libs', 'TRILINOS libraries to link with', None),      cc_optim    = "-O3 -ftz -fno-alias -ipo -xHost"
231  # BLAS      cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
232    PathOption('blas_path', 'Path to BLAS includes', None),      omp_flags   = "-openmp"
233    PathOption('blas_lib_path', 'Path to BLAS libs', None),      omp_ldflags = "-openmp -openmp_report=1"
234    ('blas_libs', 'BLAS libraries to link with', None),      fatalwarning = "-Werror"
235  # netCDF  elif cc_name[:3] == 'gcc':
236    ('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default),      # GNU C on any system
237    PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default),      # note that -ffast-math is not used because it breaks isnan(),
238    PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default),      # see mantis #691
239    ('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default),      cc_flags     = "-pedantic -Wall -fPIC -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
240  # Python      cc_optim     = "-O3"
241  # locations of include files for python      cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
242  # FIXME: python_path should be python_inc_path and the same for boost etc.      omp_flags    = "-fopenmp"
243    PathOption('python_path', 'Path to Python includes', python_path_default),      omp_ldflags  = "-fopenmp"
244    PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default),      fatalwarning = "-Werror"
245    ('python_lib', 'Python libraries to link with', python_lib_default),      sysheaderopt = "-isystem"
246    ('python_cmd', 'Python command', 'python'),  elif cc_name == 'cl':
247  # Boost      # Microsoft Visual C on Windows
248    PathOption('boost_path', 'Path to Boost includes', boost_path_default),      cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
249    PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default),      cc_optim     = "/O2 /Op /W3"
250    ('boost_lib', 'Boost libraries to link with', boost_lib_default),      cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
251  # Doc building      fatalwarning = "/WX"
252  #  PathOption('doxygen_path', 'Path to Doxygen executable', None),  elif cc_name == 'icl':
253  #  PathOption('epydoc_path', 'Path to Epydoc executable', None),      # Intel C on Windows
254  # PAPI      cc_flags     = '/EHsc /GR /MD'
255    PathOption('papi_path', 'Path to PAPI includes', None),      cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
256    PathOption('papi_lib_path', 'Path to PAPI libs', None),      cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
257    ('papi_libs', 'PAPI libraries to link with', None),      omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
258    ('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None),      omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
259  # MPI  
260    BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default),  # set defaults if not otherwise specified
261    ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
262    PathOption('mpi_path', 'Path to MPI includes', mpi_path_default),  if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
263    ('mpi_run', 'mpirun name' , mpi_run_default),  if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
264    PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default),  if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
265    ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default)  if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
266  )  if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
267  #=================================================================================================  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
268  #  if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
269  #   Note: On the Altix the intel compilers are not automatically  
270  #   detected by scons intelc.py script. The Altix has a different directory  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
271  #   path and in some locations the "modules" facility is used to support  
272  #   multiple compiler versions. This forces the need to import the users PATH  if env['usepython3']:
273  #   environment which isn't the "scons way"      env.Append(CPPDEFINES=['ESPYTHON3'])
274  #   This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms)  
275  #   FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix  # set up the autolazy values
276  #  if env['forcelazy'] == 'on':
277        env.Append(CPPDEFINES=['FAUTOLAZYON'])
278  if IS_WINDOWS_PLATFORM:  elif env['forcelazy'] == 'off':
279        env = Environment(tools = ['default', 'msvc'], options = opts)      env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
280  else:  
281     if os.uname()[4]=='ia64':  # set up the collective resolve values
282        env = Environment(tools = ['default', 'intelc'], options = opts)  if env['forcecollres'] == 'on':
283        if env['CXX'] == 'icpc':      env.Append(CPPDEFINES=['FRESCOLLECTON'])
284           env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py  elif env['forcecollres'] == 'off':
285     else:      env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
286        env = Environment(tools = ['default'], options = opts)  
287  Help(opts.GenerateHelpText(env))  # allow non-standard C if requested
288    if env['iknowwhatimdoing']:
289  if env['bounds_check']:      env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
290     env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ])  
291     env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ])  # Disable OpenMP if no flags provided
292     bounds_check = env['bounds_check']  if env['openmp'] and env['omp_flags'] == '':
293       print("OpenMP requested but no flags provided - disabling OpenMP!")
294       env['openmp'] = False
295    
296    if env['openmp']:
297        env.Append(CCFLAGS = env['omp_flags'])
298        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
299    else:
300        env['omp_flags']=''
301        env['omp_ldflags']=''
302    
303    # add debug/non-debug compiler flags
304    if env['debug']:
305        env.Append(CCFLAGS = env['cc_debug'])
306    else:
307        env.Append(CCFLAGS = env['cc_optim'])
308    
309    # always add cc_flags
310    env.Append(CCFLAGS = env['cc_flags'])
311    
312    # add system libraries
313    env.AppendUnique(LIBS = env['sys_libs'])
314    
315    
316    global_revision=ARGUMENTS.get('SVN_VERSION', None)
317    if global_revision:
318        global_revision = re.sub(':.*', '', global_revision)
319        global_revision = re.sub('[^0-9]', '', global_revision)
320        if global_revision == '': global_revision='-2'
321    else:
322      # Get the global Subversion revision number for the getVersion() method
323      try:
324        global_revision = os.popen('svnversion -n .').read()
325        global_revision = re.sub(':.*', '', global_revision)
326        global_revision = re.sub('[^0-9]', '', global_revision)
327        if global_revision == '': global_revision='-2'
328      except:
329        global_revision = '-1'
330    env['svn_revision']=global_revision
331    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
332    
333    if IS_WINDOWS:
334        if not env['build_shared']:
335            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
336            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
337    
338    ###################### Copy required environment vars ########################
339    
340    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
341    if IS_WINDOWS:
342        LD_LIBRARY_PATH_KEY='PATH'
343        env['ENV']['LD_LIBRARY_PATH']=''
344  else:  else:
345     bounds_check = 0      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
346    
347  #=================================================================================================  # the following env variables are exported for the unit tests
 #  
 #     Initialise Scons Build Environment  
 #     check for user environment variables we are interested in  
 try:  
    tmp = os.environ['PYTHONPATH']  
    env['ENV']['PYTHONPATH'] = tmp  
 except KeyError:  
    pass  
348    
349  env.PrependENVPath('PYTHONPATH', source_root)  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
350        try:
351            env['ENV'][key] = os.environ[key]
352        except KeyError:
353            env['ENV'][key] = 1
354    
355  try:  env_export=env['env_export']
356     omp_num_threads = os.environ['OMP_NUM_THREADS']  env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
357  except KeyError:  
358     omp_num_threads = 1  for key in set(env_export):
359  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads      try:
360            env['ENV'][key] = os.environ[key]
361        except KeyError:
362            pass
363    
364  try:  try:
365     path = os.environ['PATH']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
    env['ENV']['PATH'] = path  
366  except KeyError:  except KeyError:
367     omp_num_threads = 1      pass
368    
369  env['ENV']['OMP_NUM_THREADS'] = omp_num_threads  # these shouldn't be needed
370    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
371    #    try:
372    #        env['ENV'][key] = os.environ[key]
373    #    except KeyError:
374    #        pass
375    
   
 # Copy some variables from the system environment to the build environment  
376  try:  try:
377     env['ENV']['DISPLAY'] = os.environ['DISPLAY']      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
    env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
    home_temp = os.environ['HOME']   # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf  
    env['ENV']['HOME'] = home_temp  
378  except KeyError:  except KeyError:
379     pass      pass
380    
381  try:  ######################## Add some custom builders ############################
    tmp = os.environ['PATH']  
    env['ENV']['PATH'] = tmp  
 except KeyError:  
    pass  
382    
383  try:  if env['pythoncmd']=='python':
384     tmp = os.environ['LD_LIBRARY_PATH']      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
385     print tmp  else:
386     env['ENV']['LD_LIBRARY_PATH'] = tmp      py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
 except KeyError:  
    pass  
 #==========================================================================  
 #  
 #    Add some customer builders  
 #  
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
387  env.Append(BUILDERS = {'PyCompile' : py_builder});  env.Append(BUILDERS = {'PyCompile' : py_builder});
388    
389  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed',  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
                               src_suffix=env['PROGSUFFIX'], single_source=True)  
   
390  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
391    
392  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
393  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
394    
395  # Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
396  try:  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
    incinstall = env['incinstall']  
    env.Append(CPPPATH = [incinstall,])  
 except KeyError:  
    incinstall = None  
 try:  
    libinstall = env['libinstall']  
    env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so  
    env.PrependENVPath('LD_LIBRARY_PATH', libinstall)  
    if IS_WINDOWS_PLATFORM :  
       env.PrependENVPath('PATH', libinstall)  
       env.PrependENVPath('PATH', env['boost_lib_path'])  
 except KeyError:  
    libinstall = None  
 try:  
    pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc  
 except KeyError:  
    pyinstall = None  
   
 try:  
    cc_defines = env['cc_defines']  
    env.Append(CPPDEFINES = cc_defines)  
 except KeyError:  
    pass  
 try:  
    flags = env['ar_flags']  
    env.Append(ARFLAGS = flags)  
 except KeyError:  
    ar_flags = None  
 try:  
    sys_libs = env['sys_libs']  
 except KeyError:  
    sys_libs = []  
   
 try:  
    tar_flags = env['tar_flags']  
    env.Replace(TARFLAGS = tar_flags)  
 except KeyError:  
    pass  
   
 try:  
    exinstall = env['exinstall']  
 except KeyError:  
    exinstall = None  
 try:  
    sys_libinstall = env['sys_libinstall']  
 except KeyError:  
    sys_libinstall = None  
 try:  
    sys_pyinstall = env['sys_pyinstall']  
 except KeyError:  
    sys_pyinstall = None  
 try:  
    sys_exinstall = env['sys_exinstall']  
 except KeyError:  
    sys_exinstall = None  
   
 # ====================== debugging ===================================  
 try:  
    dodebug = env['dodebug']  
 except KeyError:  
    dodebug = None  
397    
398  # === switch on omp ===================================================  ############################ Dependency checks ###############################
 try:  
   omp_flags = env['omp_flags']  
 except KeyError:  
   omp_flags = ''  
399    
400  try:  # Create a Configure() environment to check for compilers and python
401    omp_flags_debug = env['omp_flags_debug']  conf = Configure(env.Clone())
 except KeyError:  
   omp_flags_debug = ''  
   
 # ========= use mpi? =====================================================  
 try:  
    useMPI = env['useMPI']  
 except KeyError:  
    useMPI = None  
 # ========= set compiler flags ===========================================  
402    
403  # Can't use MPI and OpenMP simultaneously at this time  ######## Test that the compilers work
404  if useMPI:  
405      omp_flags=''  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
406      omp_flags_debug=''      if not conf.CheckCC():
407            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
408            Exit(1)
409        if not conf.CheckCXX():
410            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
411            Exit(1)
412    else:
413        if not conf.CheckFunc('printf', language='c'):
414            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
415            Exit(1)
416        if not conf.CheckFunc('printf', language='c++'):
417            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
418            Exit(1)
419    
420    if conf.CheckFunc('gethostname'):
421        conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
422    
423    ######## Python headers & library (required)
424    
425    #First we check to see if the config file has specified
426    ##Where to find the filae. Ideally, this should be automatic
427    #But we need to deal with the case where python is not in its INSTALL
428    #Directory
429    # Use the python scons is running
430    if env['pythoncmd']=='python':
431        python_inc_path=sysconfig.get_python_inc()
432        if IS_WINDOWS:
433            python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
434        elif env['PLATFORM']=='darwin':
435            python_lib_path=sysconfig.get_config_var('LIBPL')
436        else:
437            python_lib_path=sysconfig.get_config_var('LIBDIR')
438    
439        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
440        if IS_WINDOWS:
441            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
442        else:
443            python_libs=['python'+sysconfig.get_python_version()]
444    
445    #if we want to use a python other than the one scons is running
446    else:
447        initstring='from __future__ import print_function;from distutils import sysconfig;'
448        if env['pythonlibname']!='':
449            python_libs=env['pythonlibname']
450        else:   # work it out by calling python    
451            if IS_WINDOWS:
452                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
453            else:
454                cmd='print("python"+sysconfig.get_python_version())'
455            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
456            python_libs=p.stdout.readline()
457            if env['usepython3']:       # This is to convert unicode str into py2 string
458                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
459            p.wait()
460            python_libs=python_libs.strip()
461    
 if dodebug:  
     try:  
       flags = env['cc_flags_debug'] + ' ' + omp_flags_debug  
       env.Append(CCFLAGS = flags)  
     except KeyError:  
       pass  
 else:  
    try:  
       flags = env['cc_flags'] + ' ' + omp_flags  
       env.Append(CCFLAGS = flags)  
    except KeyError:  
       pass  
 if dodebug:  
      try:  
         flags = env['cxx_flags_debug']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 else:  
      try:  
         flags = env['cxx_flags']  
         env.Append(CXXFLAGS = flags)  
      except KeyError:  
         pass  
 try:  
      if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long")  
 except:  
      pass  
   
 # ============= Remember what options were used in the compile =====================================  
 if not IS_WINDOWS_PLATFORM:  
   env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*")  
   if dodebug:       env.Execute("touch " + libinstall + "/Compiled.with.debug")  
   if useMPI:        env.Execute("touch " + libinstall + "/Compiled.with.mpi")  
   if omp_flags != '':   env.Execute("touch " + libinstall + "/Compiled.with.OpenMP")  
   if bounds_check:  env.Execute("touch " + libinstall + "/Compiled.with.bounds_check")  
   
 # ============= set mkl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['mkl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['mkl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
   
    try:  
       mkl_libs = env['mkl_libs']  
    except KeyError:  
       mkl_libs = []  
 else:  
      mkl_libs = []  
   
 # ============= set scsl (but only of no MPI) =====================================  
 if not useMPI:  
    try:  
       includes = env['scsl_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['scsl_lib_path']  
       env.Append(LIBPATH = [lib_path,])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
    except KeyError:  
       pass  
462        
463     try:      # Now we know whether we are using python3 or not
464        scsl_libs = env['scsl_libs']      p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
465     except KeyError:      python_inc_path=p.stdout.readline()
466        scsl_libs = [ ]      if env['usepython3']:
467             python_inc_path=python_inc_path.encode()
468  else:      p.wait()  
469      scsl_libs =  []      python_inc_path=python_inc_path.strip()
470        if IS_WINDOWS:
471  # ============= set TRILINOS (but only with MPI) =====================================          cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
472  if useMPI:      elif env['PLATFORM']=='darwin':
473     try:          cmd="sysconfig.get_config_var(\"LIBPL\")"
474        includes = env['trilinos_path']      else:
475        env.Append(CPPPATH = [includes,])          cmd="sysconfig.get_config_var(\"LIBDIR\")"
476     except KeyError:  
477        pass      p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
478        python_lib_path=p.stdout.readline()
479     try:      if env['usepython3']:
480        lib_path = env['trilinos_lib_path']          python_lib_path=python_lib_path.decode()
481        env.Append(LIBPATH = [lib_path,])      p.wait()
482     except KeyError:      python_lib_path=python_lib_path.strip()
483        pass  
484    #Check for an override from the config file.
485     try:  #Ideally, this should be automatic
486        trilinos_libs = env['trilinos_libs']  #But we need to deal with the case where python is not in its INSTALL
487     except KeyError:  #Directory
488        trilinos_libs = []  if env['pythonlibpath']!='':
489  else:      python_lib_path=env['pythonlibpath']
490       trilinos_libs = []  
491    if env['pythonincpath']!='':
492        python_inc_path=env['pythonincpath']
493  # ============= set umfpack (but only without MPI) =====================================  
494  umf_libs=[ ]  
495  if not useMPI:  if sysheaderopt == '':
496     try:      conf.env.AppendUnique(CPPPATH = [python_inc_path])
497        includes = env['umf_path']  else:
498        env.Append(CPPPATH = [includes,])      conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
499     except KeyError:  
500        pass  conf.env.AppendUnique(LIBPATH = [python_lib_path])
501    conf.env.AppendUnique(LIBS = python_libs)
502     try:  # The wrapper script needs to find the libs
503        lib_path = env['umf_lib_path']  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
504        env.Append(LIBPATH = [lib_path,])  
505     except KeyError:  if not conf.CheckCHeader('Python.h'):
506        pass      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
507        Exit(1)
508     try:  if not conf.CheckFunc('Py_Exit'):
509        umf_libs = env['umf_libs']      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
510        umf_libs+=umf_libs      Exit(1)
511     except KeyError:  
512        pass  ## reuse conf to check for numpy header (optional)
513    if env['usepython3']:
514     try:      # FIXME: This is until we can work out how to make the checks in python 3
515        includes = env['ufc_path']      conf.env['numpy_h']=False
516        env.Append(CPPPATH = [includes,])  else:
517     except KeyError:      if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
518        pass          conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
519            conf.env['numpy_h']=True
520     try:      else:
521        includes = env['amd_path']          conf.env['numpy_h']=False
522        env.Append(CPPPATH = [includes,])  
523     except KeyError:  # Commit changes to environment
524        pass  env = conf.Finish()
525    
526     try:  ######## boost (required)
527        lib_path = env['amd_lib_path']  
528        env.Append(LIBPATH = [lib_path,])  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
529     except KeyError:  if sysheaderopt == '':
530        pass      env.AppendUnique(CPPPATH = [boost_inc_path])
531    else:
532     try:      # This is required because we can't -isystem /usr/include since it breaks
533        amd_libs = env['amd_libs']      # std includes
534        umf_libs+=amd_libs      if os.path.normpath(boost_inc_path) == '/usr/include':
535     except KeyError:          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
536        pass      else:
537            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
538  # ============= set TRILINOS (but only with MPI) =====================================  
539  if useMPI:  env.AppendUnique(LIBPATH = [boost_lib_path])
540     try:  env.AppendUnique(LIBS = env['boost_libs'])
541        includes = env['trilinos_path']  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
542        env.Append(CPPPATH = [includes,])  
543     except KeyError:  ######## numpy (required)
544        pass  
545    if not detectModule(env, 'numpy'):
546     try:      print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
547        lib_path = env['trilinos_lib_path']      Exit(1)
548        env.Append(LIBPATH = [lib_path,])  
549     except KeyError:  ######## CppUnit (required for tests)
550        pass  
551    try:
552     try:      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
553        trilinos_libs = env['trilinos_libs']      env.AppendUnique(CPPPATH = [cppunit_inc_path])
554     except KeyError:      env.AppendUnique(LIBPATH = [cppunit_lib_path])
555        trilinos_libs = []      env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
556  else:      env['cppunit']=True
557       trilinos_libs = []  except:
558        env['cppunit']=False
 # ============= set blas =====================================  
 try:  
    includes = env['blas_path']  
    env.Append(CPPPATH = [includes,])  
 except KeyError:  
    pass  
   
 try:  
    lib_path = env['blas_lib_path']  
    env.Append(LIBPATH = [lib_path,])  
 except KeyError:  
    pass  
559    
560  try:  ######## sympy (optional)
    blas_libs = env['blas_libs']  
 except KeyError:  
    blas_libs = [ ]  
561    
562  # ========== netcdf ====================================  if detectModule(env, 'sympy'):
563  try:      env['sympy'] = True
    useNetCDF = env['useNetCDF']  
 except KeyError:  
    useNetCDF = 'yes'  
    pass  
       
 if useNetCDF == 'yes':  
    try:  
       netCDF_libs = env['netCDF_libs']  
    except KeyError:  
       pass  
   
    env.Append(LIBS = netCDF_libs)  
    env.Append(CPPDEFINES = [ 'USE_NETCDF' ])  
    try:  
       includes = env['netCDF_path']  
       env.Append(CPPPATH = [includes,])  
    except KeyError:  
       pass  
   
    try:  
       lib_path = env['netCDF_lib_path']  
       env.Append(LIBPATH = [ lib_path, ])  
       env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
       if IS_WINDOWS_PLATFORM :  
          env.PrependENVPath('PATH', lib_path)  
    except KeyError:  
       pass  
564  else:  else:
565     print "Warning: Installation is not configured with netCDF. Some I/O function may not be available."      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
566     netCDF_libs=[ ]      env['sympy'] = False
567    
568  # ====================== boost ======================================  ######## netCDF (optional)
569  try:  
570     includes = env['boost_path']  netcdf_inc_path=''
571     env.Append(CPPPATH = [includes,])  netcdf_lib_path=''
572  except KeyError:  if env['netcdf']:
573     pass      netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
574  try:      env.AppendUnique(CPPPATH = [netcdf_inc_path])
575     lib_path = env['boost_lib_path']      env.AppendUnique(LIBPATH = [netcdf_lib_path])
576     env.Append(LIBPATH = [lib_path,])      env.AppendUnique(LIBS = env['netcdf_libs'])
577     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path      env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
578     if IS_WINDOWS_PLATFORM :      env.Append(CPPDEFINES = ['USE_NETCDF'])
579        env.PrependENVPath('PATH', lib_path)  
580  except KeyError:  ######## PAPI (optional)
581     pass  
582  try:  papi_inc_path=''
583     boost_lib = env['boost_lib']  papi_lib_path=''
584  except KeyError:  if env['papi']:
585     boost_lib = None      papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
586  # ====================== python ======================================      env.AppendUnique(CPPPATH = [papi_inc_path])
587  try:      env.AppendUnique(LIBPATH = [papi_lib_path])
588     includes = env['python_path']      env.AppendUnique(LIBS = env['papi_libs'])
589     env.Append(CPPPATH = [includes,])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
590  except KeyError:      env.Append(CPPDEFINES = ['BLOCKPAPI'])
591     pass  
592  try:  ######## MKL (optional)
593     lib_path = env['python_lib_path']  
594     env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  mkl_inc_path=''
595     env.Append(LIBPATH = [lib_path,])  mkl_lib_path=''
596  except KeyError:  if env['mkl']:
597     pass      mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
598  try:      env.AppendUnique(CPPPATH = [mkl_inc_path])
599     python_lib = env['python_lib']      env.AppendUnique(LIBPATH = [mkl_lib_path])
600  except KeyError:      env.AppendUnique(LIBS = env['mkl_libs'])
601     python_lib = None      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
602  # =============== documentation =======================================      env.Append(CPPDEFINES = ['MKL'])
603  try:  
604     doxygen_path = env['doxygen_path']  ######## UMFPACK (optional)
605  except KeyError:  
606     doxygen_path = None  umfpack_inc_path=''
607  try:  umfpack_lib_path=''
608     epydoc_path = env['epydoc_path']  if env['umfpack']:
609  except KeyError:      umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
610     epydoc_path = None      env.AppendUnique(CPPPATH = [umfpack_inc_path])
611  # =============== PAPI =======================================      env.AppendUnique(LIBPATH = [umfpack_lib_path])
612  try:      env.AppendUnique(LIBS = env['umfpack_libs'])
613     includes = env['papi_path']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
614     env.Append(CPPPATH = [includes,])      env.Append(CPPDEFINES = ['UMFPACK'])
615  except KeyError:  
616     pass  ######## LAPACK (optional)
617  try:  
618     lib_path = env['papi_lib_path']  if env['lapack']=='mkl' and not env['mkl']:
619     env.Append(LIBPATH = [lib_path,])      print("mkl_lapack requires MKL!")
620  except KeyError:      Exit(1)
621     pass  
622  try:  env['uselapack'] = env['lapack']!='none'
623     papi_libs = env['papi_libs']  lapack_inc_path=''
624  except KeyError:  lapack_lib_path=''
625     papi_libs = None  if env['uselapack']:
626  # ============= set mpi =====================================      header='clapack.h'
627  if useMPI:      if env['lapack']=='mkl':
628     env.Append(CPPDEFINES=['PASO_MPI',])          env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
629     try:          header='mkl_lapack.h'
630        includes = env['mpi_path']      lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
631        env.Append(CPPPATH = [includes,])      env.AppendUnique(CPPPATH = [lapack_inc_path])
632     except KeyError:      env.AppendUnique(LIBPATH = [lapack_lib_path])
633        pass      env.AppendUnique(LIBS = env['lapack_libs'])
634     try:      env.Append(CPPDEFINES = ['USE_LAPACK'])
635        lib_path = env['mpi_lib_path']  
636        env.Append(LIBPATH = [lib_path,])  ######## Silo (optional)
637        env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path  
638     except KeyError:  silo_inc_path=''
639        pass  silo_lib_path=''
640     try:  if env['silo']:
641        mpi_libs = env['mpi_libs']      silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
642     except KeyError:      env.AppendUnique(CPPPATH = [silo_inc_path])
643        mpi_libs = []      env.AppendUnique(LIBPATH = [silo_lib_path])
644        # Note that we do not add the libs since they are only needed for the
645     try:      # weipa library and tools.
646        mpi_run = env['mpi_run']      #env.AppendUnique(LIBS = [env['silo_libs']])
647     except KeyError:  
648        mpi_run = ''  ######## VSL random numbers (optional)
649    if env['vsl_random']:
650     try:      env.Append(CPPDEFINES = ['MKLRANDOM'])
651         mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK']  
652         env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] )  ######## VisIt (optional)
653     except KeyError:  
654        pass  visit_inc_path=''
655  else:  visit_lib_path=''
656    mpi_libs=[]  if env['visit']:
657    mpi_run = mpi_run_default      visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
658  # =========== zip files ===========================================      env.AppendUnique(CPPPATH = [visit_inc_path])
659  try:      env.AppendUnique(LIBPATH = [visit_lib_path])
660     includes = env['papi_path']  
661     env.Append(CPPPATH = [includes,])  ######## MPI (optional)
662  except KeyError:  
663     pass  if env['mpi']=='no':
664  try:      env['mpi']='none'
665     lib_path = env['papi_lib_path']  
666     env.Append(LIBPATH = [lib_path,])  env['usempi'] = env['mpi']!='none'
667  except KeyError:  mpi_inc_path=''
668     pass  mpi_lib_path=''
669  try:  if env['usempi']:
670     papi_libs = env['papi_libs']      mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
671  except KeyError:      env.AppendUnique(CPPPATH = [mpi_inc_path])
672     papi_libs = None      env.AppendUnique(LIBPATH = [mpi_lib_path])
673  try:      env.AppendUnique(LIBS = env['mpi_libs'])
674     papi_instrument_solver = env['papi_instrument_solver']      env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
675  except KeyError:      env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
676     papi_instrument_solver = None      # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
677        # On the other hand MPT and OpenMPI don't define the latter so we have to
678        # do that here
679  # ============= and some helpers =====================================      if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
680  try:          env.Append(CPPDEFINES = ['MPI_INCLUDED'])
681     doxygen_path = env['doxygen_path']  
682  except KeyError:  ######## BOOMERAMG (optional)
683     doxygen_path = None  
684  try:  if env['mpi'] == 'none': env['boomeramg'] = False
685     epydoc_path = env['epydoc_path']  
686  except KeyError:  boomeramg_inc_path=''
687     epydoc_path = None  boomeramg_lib_path=''
688  try:  if env['boomeramg']:
689     src_zipfile = env.File(env['src_zipfile'])      boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
690  except KeyError:      env.AppendUnique(CPPPATH = [boomeramg_inc_path])
691     src_zipfile = None      env.AppendUnique(LIBPATH = [boomeramg_lib_path])
692  try:      env.AppendUnique(LIBS = env['boomeramg_libs'])
693     test_zipfile = env.File(env['test_zipfile'])      env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
694  except KeyError:      env.Append(CPPDEFINES = ['BOOMERAMG'])
695     test_zipfile = None  
696  try:  ######## ParMETIS (optional)
697     examples_zipfile = env.File(env['examples_zipfile'])  
698  except KeyError:  if not env['usempi']: env['parmetis'] = False
699     examples_zipfile = None  
700    parmetis_inc_path=''
701  try:  parmetis_lib_path=''
702     src_tarfile = env.File(env['src_tarfile'])  if env['parmetis']:
703  except KeyError:      parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
704     src_tarfile = None      env.AppendUnique(CPPPATH = [parmetis_inc_path])
705  try:      env.AppendUnique(LIBPATH = [parmetis_lib_path])
706     test_tarfile = env.File(env['test_tarfile'])      env.AppendUnique(LIBS = env['parmetis_libs'])
707  except KeyError:      env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
708     test_tarfile = None      env.Append(CPPDEFINES = ['USE_PARMETIS'])
709  try:  
710     examples_tarfile = env.File(env['examples_tarfile'])  ######## gmsh (optional, for tests)
711  except KeyError:  
712     examples_tarfile = None  try:
713        p=Popen(['gmsh', '-info'], stderr=PIPE)
714  try:      _,e=p.communicate()
715     guide_pdf = env.File(env['guide_pdf'])      if e.split().count("MPI"):
716  except KeyError:          env['gmsh']='m'
717     guide_pdf = None      else:
718            env['gmsh']='s'
719  try:  except OSError:
720     guide_html_index = env.File('index.htm',env['guide_html'])      env['gmsh']=False
721  except KeyError:  
722     guide_html_index = None  ######## PDFLaTeX (for documentation)
723    if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
724  try:      env['pdflatex']=True
725     api_epydoc = env.Dir(env['api_epydoc'])  else:
726  except KeyError:      env['pdflatex']=False
727     api_epydoc = None  
728    ######################## Summarize our environment ###########################
729  try:  
730     api_doxygen = env.Dir(env['api_doxygen'])  # keep some of our install paths first in the list for the unit tests
731  except KeyError:  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
732     api_doxygen = None  env.PrependENVPath('PYTHONPATH', prefix)
733    env['ENV']['ESCRIPT_ROOT'] = prefix
734  global_revision="$Rev$"  
735  try:  if not env['verbose']:
736     svn_pipe = os.popen("svnversion -n .")      env['CCCOMSTR'] = "Compiling $TARGET"
737     global_revision = svn_pipe.readlines()      env['CXXCOMSTR'] = "Compiling $TARGET"
738     svn_pipe.close()      env['SHCCCOMSTR'] = "Compiling $TARGET"
739  except:      env['SHCXXCOMSTR'] = "Compiling $TARGET"
740     print "Extracting revision number failed. Using %s instead."%global_revision      env['ARCOMSTR'] = "Linking $TARGET"
741  global_revision = re.sub(":.*", "", global_revision[0])      env['LINKCOMSTR'] = "Linking $TARGET"
742  global_revision = re.sub("[^0-9]", "", global_revision)      env['SHLINKCOMSTR'] = "Linking $TARGET"
743  env.Append(CPPDEFINES = "SVN_VERSION="+global_revision)      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
744        env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
745  # Python install - esys __init__.py      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
746  init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET'))      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
747        #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
748  # FIXME: exinstall and friends related to examples are not working.  
749  build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target])  print("")
750    print("*** Config Summary (see config.log and lib/buildvars for details) ***")
751  env.Default(build_target)  print("Escript/Finley revision %s"%global_revision)
752    print("  Install prefix:  %s"%env['prefix'])
753  # Zipgets  print("          Python:  %s"%sysconfig.PREFIX)
754  env.Alias('release_src',[ src_zipfile, src_tarfile ])  print("           boost:  %s"%env['boost_prefix'])
755  env.Alias('release_tests',[ test_zipfile, test_tarfile])  print("           numpy:  YES")
756  env.Alias('release_examples',[ examples_zipfile, examples_tarfile])  if env['usempi']:
757  env.Alias('examples_zipfile',examples_zipfile)      print("             MPI:  YES (flavour: %s)"%env['mpi'])
758  env.Alias('examples_tarfile',examples_tarfile)  else:
759  env.Alias('api_epydoc',api_epydoc)      print("             MPI:  DISABLED")
760  env.Alias('api_doxygen',api_doxygen)  if env['uselapack']:
761  env.Alias('guide_html_index',guide_html_index)      print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
762  env.Alias('guide_pdf', guide_pdf)  else:
763  env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index])      print("          LAPACK:  DISABLED")
764  env.Alias('release', ['release_src', 'release_tests', 'docs'])  d_list=[]
765    e_list=[]
766  env.Alias('build_tests',build_target)    # target to build all C++ tests  for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','silo','sympy','umfpack','visit','vsl_random':
767  env.Alias('build_py_tests',build_target) # target to build all python tests      if env[i]: e_list.append(i)
768  env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests      else: d_list.append(i)
769  env.Alias('run_tests', 'build_tests')   # target to run all C++ test  for i in e_list:
770  env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests      print("%16s:  YES"%i)
771  env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests  for i in d_list:
772        print("%16s:  DISABLED"%i)
773    if env['cppunit']:
774  # Allow sconscripts to see the env      print("         CppUnit:  FOUND")
775  Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run",  else:
776      "boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs",      print("         CppUnit:  NOT FOUND")
777          "sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver",  if env['gmsh']=='m':
778          "guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ])      print("            gmsh:  FOUND, MPI-ENABLED")
779    elif env['gmsh']=='s':
780  # End initialisation section      print("            gmsh:  FOUND")
781  # Begin configuration section  else:
782  # adds this file and the scons option directore to the source tar      print("            gmsh:  NOT FOUND")
783  release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ]  if env['numpy_h']:
784  release_testfiles=[env.File('README_TESTS'),]      print("   numpy headers:  FOUND")
785  env.Zip(src_zipfile, release_srcfiles)  else:
786  env.Zip(test_zipfile, release_testfiles)      print("   numpy headers:  NOT FOUND")
787  try:  print("   vsl_random:  %s"%env['vsl_random'])
788     env.Tar(src_tarfile, release_srcfiles)      
789     env.Tar(test_tarfile, release_testfiles)  if ((fatalwarning != '') and (env['werror'])):
790  except AttributeError:      print("  Treating warnings as errors")
791     pass  else:
792  # Insert new components to be build here      print("  NOT treating warnings as errors")
793  # FIXME: might be nice to replace this verbosity with a list of targets and some  print("")
794  # FIXME: nifty python to create the lengthy but very similar env.Sconscript lines  
795  # Third Party libraries  ####################### Configure the subdirectories #########################
796  env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  
797  # C/C++ Libraries  from grouptest import *
798  env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  
799  # bruce is removed for now as it doesn't really do anything  TestGroups=[]
800  # env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0)  
801  env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  # keep an environment without warnings-as-errors
802  env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  dodgy_env=env.Clone()
803  env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  
804  env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)  # now add warnings-as-errors flags. This needs to be done after configuration
805  env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  # because the scons test files have warnings in them
806  env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  if ((fatalwarning != '') and (env['werror'])):
807  env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)      env.Append(CCFLAGS = fatalwarning)
808  env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
809  #env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0)  Export(
810      ['env',
811       'dodgy_env',
812       'IS_WINDOWS',
813       'TestGroups'
814      ]
815    )
816    
817    env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
818    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
819    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
820    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
821    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
822    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
823    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
824    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
825    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
826    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
827    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
828    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
829    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
830    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
831    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
832    
833    
834    ######################## Populate the buildvars file #########################
835    
836    # remove obsolete file
837    if not env['usempi']:
838        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
839        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
840    
841    # Try to extract the boost version from version.hpp
842    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
843    boostversion='unknown'
844    try:
845        for line in boosthpp:
846            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
847            if ver:
848                boostversion=ver.group(1)
849    except StopIteration:
850        pass
851    boosthpp.close()
852    
853    
854    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
855    buildvars.write("svn_revision="+str(global_revision)+"\n")
856    buildvars.write("prefix="+prefix+"\n")
857    buildvars.write("cc="+env['CC']+"\n")
858    buildvars.write("cxx="+env['CXX']+"\n")
859    if env['pythoncmd']=='python':
860        buildvars.write("python="+sys.executable+"\n")
861        buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
862    else:
863        buildvars.write("python="+env['pythoncmd']+"\n")
864        p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
865        verstring=p.stdout.readline().strip()
866        p.wait()
867        buildvars.write("python_version="+verstring+"\n")
868    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
869    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
870    buildvars.write("boost_version="+boostversion+"\n")
871    buildvars.write("debug=%d\n"%int(env['debug']))
872    buildvars.write("openmp=%d\n"%int(env['openmp']))
873    buildvars.write("mpi=%s\n"%env['mpi'])
874    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
875    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
876    buildvars.write("lapack=%s\n"%env['lapack'])
877    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
878    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
879        buildvars.write("%s=%d\n"%(i, int(env[i])))
880        if env[i]:
881            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
882            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
883    buildvars.close()
884    
885    ################### Targets to build and install libraries ###################
886    
887    target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
888    env.Alias('target_init', [target_init])
889    # delete buildvars upon cleanup
890    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
891    
892    # The headers have to be installed prior to build in order to satisfy
893    # #include <paso/Common.h>
894    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
895    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
896    
897    env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
898    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
899    
900    env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
901    env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
902    
903    env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
904    env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
905    
906    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
907    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
908    
909    env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
910    env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
911    
912    env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
913    env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
914    
915    env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
916    env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
917    
918    env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
919    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
920    
921    # Now gather all the above into some easy targets: build_all and install_all
922    build_all_list = []
923    build_all_list += ['build_esysUtils']
924    build_all_list += ['build_paso']
925    build_all_list += ['build_escript']
926    build_all_list += ['build_pasowrap']
927    build_all_list += ['build_dudley']
928    build_all_list += ['build_finley']
929    build_all_list += ['build_ripley']
930    build_all_list += ['build_weipa']
931    if not IS_WINDOWS: build_all_list += ['build_escriptreader']
932    if env['usempi']:   build_all_list += ['build_pythonMPI']
933    build_all_list += ['build_escriptconvert']
934    env.Alias('build_all', build_all_list)
935    
936    install_all_list = []
937    install_all_list += ['target_init']
938    install_all_list += ['install_esysUtils']
939    install_all_list += ['install_paso']
940    install_all_list += ['install_escript']
941    install_all_list += ['install_pasowrap']
942    install_all_list += ['install_dudley']
943    install_all_list += ['install_finley']
944    install_all_list += ['install_ripley']
945    install_all_list += ['install_weipa']
946    if not IS_WINDOWS: install_all_list += ['install_escriptreader']
947    install_all_list += ['install_downunder_py']
948    install_all_list += ['install_modellib_py']
949    install_all_list += ['install_pycad_py']
950    if env['usempi']:   install_all_list += ['install_pythonMPI']
951    install_all_list += ['install_escriptconvert']
952    env.Alias('install_all', install_all_list)
953    
954    # Default target is install
955    env.Default('install_all')
956    
957    ################## Targets to build and run the test suite ###################
958    
959    if not env['cppunit']:
960        test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
961        env.Alias('run_tests', test_msg)
962    env.Alias('run_tests', ['install_all'])
963    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
964    env.Alias('build_full',['install_all','build_tests','build_py_tests'])
965    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
966    
967    ##################### Targets to build the documentation #####################
968    
969    env.Alias('basedocs', ['examples_tarfile', 'examples_zipfile', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
970    env.Alias('docs', ['basedocs', 'sphinxdoc'])
971    env.Alias('release_prep', ['docs', 'install_all'])
972    env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
973    
974    
975    # The test scripts are always generated, this target allows us to
976    # generate the testscripts without doing a full build
977    env.Alias('testscripts',[])
978    
979  syslib_install_target = env.installDirectory(sys_libinstall,libinstall)  if not IS_WINDOWS:
980  syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True)      try:
981            utest=open('utest.sh','w')
982            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
983            for tests in TestGroups:
984                utest.write(tests.makeString())
985            utest.close()
986            Execute(Chmod('utest.sh', 0o755))
987            print("Generated utest.sh.")
988            # This version contains only python tests - I want this to be usable
989            # From a binary only install if you have the test files
990            utest=open('itest.sh','w')
991            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
992            for tests in TestGroups:
993              if tests.exec_cmd=='$PYTHONRUNNER ':
994                utest.write(tests.makeString())
995            utest.close()
996            Execute(Chmod('itest.sh', 0o755))
997            print("Generated itest.sh.")        
998        except IOError:
999            print("Error attempting to write unittests file.")
1000            Exit(1)
1001    
1002        # delete utest.sh upon cleanup
1003        env.Clean('target_init', 'utest.sh')
1004        env.Clean('target_init', 'itest.sh')
1005    
1006        # Make sure that the escript wrapper is in place
1007        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1008            print("Copying escript wrapper.")
1009            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1010    
 install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) )  

Legend:
Removed from v.1390  
changed lines
  Added in v.4173

  ViewVC Help
Powered by ViewVC 1.1.26