/[escript]/trunk/SConstruct
ViewVC logotype

Diff of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 2300 by gross, Wed Mar 11 08:17:57 2009 UTC revision 3604 by caltinay, Mon Sep 19 04:47:53 2011 UTC
# Line 1  Line 1 
   
1  ########################################################  ########################################################
2  #  #
3  # Copyright (c) 2003-2008 by University of Queensland  # Copyright (c) 2003-2010 by University of Queensland
4  # Earth Systems Science Computational Center (ESSCC)  # Earth Systems Science Computational Center (ESSCC)
5  # http://www.uq.edu.au/esscc  # http://www.uq.edu.au/esscc
6  #  #
# Line 11  Line 10 
10  #  #
11  ########################################################  ########################################################
12    
13    EnsureSConsVersion(0,98,1)
14    EnsurePythonVersion(2,5)
15    
16  EnsureSConsVersion(0,96,91)  import sys, os, platform, re
17  EnsurePythonVersion(2,3)  from distutils import sysconfig
18    from site_init import *
19  import sys, os, re, socket, platform, stat  
20    # Version number to check for in options file. Increment when new features are
21  # Add our extensions  # added or existing options changed.
22  if os.path.isdir('scons'): sys.path.append('scons')  REQUIRED_OPTS_VERSION=201
 import scons_extensions  
   
 # Use /usr/lib64 if available, else /usr/lib  
 usr_lib = '/usr/lib'  
 if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64'  
   
 # The string python2.4 or python2.5  
 python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])  
23    
24  # MS Windows support, many thanks to PH  # MS Windows support, many thanks to PH
25  IS_WINDOWS_PLATFORM = (os.name== "nt")  IS_WINDOWS = (os.name == 'nt')
26    
27  prefix = ARGUMENTS.get('prefix', Dir('#.').abspath)  ########################## Determine options file ############################
28    # 1. command line
29    # 2. scons/<hostname>_options.py
30    # 3. name as part of a cluster
31    options_file=ARGUMENTS.get('options_file', None)
32    if not options_file:
33        ext_dir = os.path.join(os.getcwd(), 'scons')
34        hostname = platform.node().split('.')[0]
35        for name in hostname, effectiveName(hostname):
36            mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
37            options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
38            if os.path.isfile(options_file): break
39    
 # Read configuration options from file scons/<hostname>_options.py  
 hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0])  
 tmp = os.path.join("scons",hostname+"_options.py")  
 options_file = ARGUMENTS.get('options_file', tmp)  
40  if not os.path.isfile(options_file):  if not os.path.isfile(options_file):
41    options_file = False      print("\nWARNING:\nOptions file %s" % options_file)
42    print "Options file not found (expected '%s')" % tmp      print("not found! Default options will be used which is most likely suboptimal.")
43  else:      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
44    print "Options file is", options_file      print("subdirectory and customize it to your needs.\n")
45        options_file = None
46  # Load options file and command-line arguments  
47  opts = Options(options_file, ARGUMENTS)  ############################### Build options ################################
48    
49  ############ Load build options ################################  default_prefix='/usr'
50    mpi_flavours=('none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
51  opts.AddOptions(  lapack_flavours=('none', 'clapack', 'mkl')
52  # Where to install esys stuff  
53    ('prefix', 'where everything will be installed',                       Dir('#.').abspath),  vars = Variables(options_file, ARGUMENTS)
54    ('incinstall', 'where the esys headers will be installed',             os.path.join(Dir('#.').abspath,'include')),  vars.AddVariables(
55    ('bininstall', 'where the esys binaries will be installed',            os.path.join(prefix,'bin')),    PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
56    ('libinstall', 'where the esys libraries will be installed',           os.path.join(prefix,'lib')),    PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
57    ('pyinstall', 'where the esys python modules will be installed',       os.path.join(prefix,'esys')),    PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
58  # Compilation options    BoolVariable('verbose', 'Output full compile/link lines', False),
59    BoolOption('dodebug', 'For backwards compatibility', 'no'),  # Compiler/Linker options
60    BoolOption('usedebug', 'Do you want a debug build?', 'no'),    ('cc', 'Path to C compiler', 'default'),
61    BoolOption('usevtk', 'Do you want to use VTK?', 'yes'),    ('cxx', 'Path to C++ compiler', 'default'),
62    ('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file),    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
63    ('win_cc_name', 'windows C compiler name if needed', 'msvc'),    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
64    # The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
65    ('cc_flags', 'C compiler flags to use', '-DEFAULT_1'),    ('cc_extra', 'Extra C compiler flags', ''),
66    ('cc_optim', 'C compiler optimization flags to use', '-DEFAULT_2'),    ('cxx_extra', 'Extra C++ compiler flags', ''),
   ('cc_debug', 'C compiler debug flags to use', '-DEFAULT_3'),  
   ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'),  
   ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'),  
   ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'),  
   ('cc_extra', 'Extra C/C++ flags', ''),  
67    ('ld_extra', 'Extra linker flags', ''),    ('ld_extra', 'Extra linker flags', ''),
68    ('sys_libs', 'System libraries to link with', []),    BoolVariable('werror','Treat compiler warnings as errors', True),
69    ('ar_flags', 'Static library archiver flags to use', ''),    BoolVariable('debug', 'Compile with debug flags', False),
70    BoolOption('useopenmp', 'Compile parallel version using OpenMP', 'no'),    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
71    BoolOption('usepedantic', 'Compile with -pedantic if using gcc', 'no'),    ('omp_flags', 'OpenMP compiler flags', 'default'),
72    BoolOption('usewarnings','Compile with warnings as errors if using gcc','yes'),    ('omp_ldflags', 'OpenMP linker flags', 'default'),
73    ('forcelazy','for testing use only - set the default value for autolazy','leave_alone'),  # Mandatory libraries
74  # Python    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
75    ('python_path', 'Path to Python includes', '/usr/include/'+python_version),    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
76    ('python_lib_path', 'Path to Python libs', usr_lib),  # Mandatory for tests
77    ('python_libs', 'Python libraries to link with', [python_version]),    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
78    ('python_cmd', 'Python command', 'python'),    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
79  # Boost  # Optional libraries and options
80    ('boost_path', 'Path to Boost includes', '/usr/include'),    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
81    ('boost_lib_path', 'Path to Boost libs', usr_lib),    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
82    ('boost_libs', 'Boost libraries to link with', ['boost_python']),    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
83  # NetCDF    BoolVariable('netcdf', 'Enable netCDF file support', False),
84    BoolOption('usenetcdf', 'switch on/off the usage of netCDF', 'yes'),    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
85    ('netCDF_path', 'Path to netCDF includes', '/usr/include'),    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
86    ('netCDF_lib_path', 'Path to netCDF libs', usr_lib),    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
87    ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']),    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
88  # MPI    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
89    BoolOption('useMPI', 'For backwards compatibility', 'no'),    BoolVariable('papi', 'Enable PAPI', False),
90    BoolOption('usempi', 'Compile parallel version using MPI', 'no'),    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
   ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  
   ('mpi_path', 'Path to MPI includes', '/usr/include'),  
   ('mpi_run', 'mpirun name' , 'mpiexec -np 1'),  
   ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib),  
   ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', ['mpich' , 'pthread', 'rt']),  
 # ParMETIS  
   BoolOption('useparmetis', 'Compile parallel version using ParMETIS', 'yes'),  
   ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'),  
   ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib),  
   ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']),  
 # PAPI  
   BoolOption('usepapi', 'switch on/off the usage of PAPI', 'no'),  
   ('papi_path', 'Path to PAPI includes', '/usr/include'),  
   ('papi_lib_path', 'Path to PAPI libs', usr_lib),  
91    ('papi_libs', 'PAPI libraries to link with', ['papi']),    ('papi_libs', 'PAPI libraries to link with', ['papi']),
92    BoolOption('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False),    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
93  # MKL    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
94    BoolOption('usemkl', 'switch on/off the usage of MKL', 'no'),    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
95    ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'),    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
96    ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'),    BoolVariable('umfpack', 'Enable UMFPACK', False),
97    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']),    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
98  # UMFPACK    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
99    BoolOption('useumfpack', 'switch on/off the usage of UMFPACK', 'no'),    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
100    ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'),    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
101    ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'),    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
102    ('umf_lib_path', 'Path to UMFPACK libs', usr_lib),    EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
103    ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']),    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
104  # Silo    ('lapack_libs', 'LAPACK libraries to link with', []),
105    BoolOption('usesilo', 'switch on/off the usage of Silo', 'yes'),    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
106    ('silo_path', 'Path to Silo includes', '/usr/include'),    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
   ('silo_lib_path', 'Path to Silo libs', usr_lib),  
107    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
108  # AMD (used by UMFPACK)    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
109    ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'),    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
110    ('amd_lib_path', 'Path to AMD libs', usr_lib),    ('visit_libs', 'VisIt libraries to link with', ['simV2']),
111    ('amd_libs', 'AMD libraries to link with', ['amd']),    BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False),
112  # BLAS (used by UMFPACK)    BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
113    ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'),  # Advanced settings
114    ('blas_lib_path', 'Path to BLAS libs', usr_lib),    #dudley_assemble_flags = -funroll-loops      to actually do something
115    ('blas_libs', 'BLAS libraries to link with', ['blas']),    ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
116  # An option for specifying the compiler tools set (see windows branch).    # To enable passing function pointers through python
117    ('tools_names', 'allow control over the tools in the env setup', ['intelc']),    BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
118  # finer control over library building, intel aggressive global optimisation    # An option for specifying the compiler tools (see windows branch)
119  # works with dynamic libraries on windows.    ('tools_names', 'Compiler tools to use', ['default']),
120    ('share_esysUtils', 'control static or dynamic esysUtils lib', False),    ('env_export', 'Environment variables to be passed to tools',[]),
121    ('share_paso', 'control static or dynamic paso lib', False)    EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
122      EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
123      # finer control over library building, intel aggressive global optimisation
124      # works with dynamic libraries on windows.
125      ('build_shared', 'Build dynamic libraries only', False),
126      ('sys_libs', 'Extra libraries to link with', []),
127      ('escript_opts_version', 'Version of options file (do not specify on command line)'),
128  )  )
129    
130  ############ Specify which compilers to use ####################  ##################### Create environment and help text #######################
   
 # intelc uses regular expressions improperly and emits a warning about  
 # failing to find the compilers.  This warning can be safely ignored.  
   
 if IS_WINDOWS_PLATFORM:  
       env = Environment(options = opts)  
       env = Environment(tools = ['default'] + env['tools_names'],  
                         options = opts)  
 else:  
    if socket.gethostname().split('.')[0] == 'service0':  
       env = Environment(tools = ['default', 'intelc'], options = opts)  
    elif os.uname()[4]=='ia64':  
       env = Environment(tools = ['default', 'intelc'], options = opts)  
       if env['CXX'] == 'icpc':  
          env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not)  
    else:  
       env = Environment(tools = ['default'], options = opts)  
 Help(opts.GenerateHelpText(env))  
   
 ############ Fill in compiler options if not set above #########  
   
 # Backwards compatibility: allow dodebug=yes and useMPI=yes  
 if env['dodebug']: env['usedebug'] = 1  
 if env['useMPI']: env['usempi'] = 1  
   
 # Default compiler options (override allowed in hostname_options.py, but should not be necessary)  
 # For both C and C++ you get: cc_flags and either the optim flags or debug flags  
   
 sysheaderopt = ""       # how do we indicate that a header is a system header. Use "" for no action.  
   
 if env["CC"] == "icc":  
   # Intel compilers  
   cc_flags      = "-fPIC -ansi -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"  
   cc_optim      = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias"  
   cc_debug      = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"  
   omp_optim     = "-openmp -openmp_report0"  
   omp_debug     = "-openmp -openmp_report0"  
   omp_libs      = ['guide', 'pthread']  
   pedantic      = ""  
   fatalwarning      = ""        # Switch to turn warnings into errors  
   sysheaderopt      = ""  
 elif env["CC"] == "gcc":  
   # GNU C on any system  
   cc_flags      = "-pedantic -Wall -fPIC -ansi -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing"  
 #the long long warning occurs on the Mac  
   cc_optim      = "-O3"  
   cc_debug      = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"  
   omp_optim     = "-fopenmp"  
   omp_debug     = "-fopenmp"  
   omp_libs      = ['gomp']  
   pedantic      = "-pedantic-errors -Wno-long-long"  
   fatalwarning      = "-Werror"  
   sysheaderopt      = "-isystem "  
 elif env["CC"] == "cl":  
   # Microsoft Visual C on Windows  
   cc_flags      = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF"  
   cc_optim      = "/O2 /Op /MT /W3"  
   cc_debug      = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK"  
   omp_optim     = ""  
   omp_debug     = ""  
   omp_libs      = []  
   pedantic      = ""  
   fatalwarning      = ""  
   sysheaderopt      = ""  
 elif env["CC"] == "icl":  
   # intel C on Windows, see windows_intelc_options.py for a start  
   pedantic      = ""  
   fatalwarning      = ""  
   sysheaderopt      = ""  
   
   
 # If not specified in hostname_options.py then set them here  
 if env["cc_flags"]  == "-DEFAULT_1": env['cc_flags'] = cc_flags  
 if env["cc_optim"]  == "-DEFAULT_2": env['cc_optim'] = cc_optim  
 if env["cc_debug"]  == "-DEFAULT_3": env['cc_debug'] = cc_debug  
 if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim  
 if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug  
 if env["omp_libs"]  == "-DEFAULT_6": env['omp_libs'] = omp_libs  
   
 #set up the autolazy values  
 if env['forcelazy']    != "leave_alone":  
   if env['forcelazy'] == 'on':  
     env.Append(CPPDEFINES='FAUTOLAZYON')  
   else:  
      if env['forcelazy'] == 'off':  
     env.Append(CPPDEFINES='FAUTOLAZYOFF')  
   
 # OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty  
 if not env["useopenmp"]:  
   env['omp_optim'] = ""  
   env['omp_debug'] = ""  
   env['omp_libs'] = []  
   
 if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0  
   
 ############ Copy environment variables into scons env #########  
   
 try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS']  
 except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1  
   
 try: env['ENV']['PATH'] = os.environ['PATH']  
 except KeyError: pass  
   
 try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']  
 except KeyError: pass  
   
 try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH']  
 except KeyError: pass  
   
 try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH']  
 except KeyError: pass  
   
 try: env['ENV']['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']  
 except KeyError: pass  
   
 try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH']  
 except KeyError: pass  
   
 try: env['ENV']['DISPLAY'] = os.environ['DISPLAY']  
 except KeyError: pass  
   
 try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  
 except KeyError: pass  
   
 try: env['ENV']['HOME'] = os.environ['HOME']  
 except KeyError: pass  
   
 # Configure for test suite  
 env.PrependENVPath('PYTHONPATH', prefix)  
 env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 env['ENV']['ESCRIPT_ROOT'] = prefix  
131    
132  ############ Set up paths for Configure() ######################  # Intel's compiler uses regular expressions improperly and emits a warning
133    # about failing to find the compilers. This warning can be safely ignored.
134    
135  # Make a copy of an environment  # PATH is needed so the compiler, linker and tools are found if they are not
136  # Use env.Clone if available, but fall back on env.Copy for older version of scons  # in default locations.
137  def clone_env(env):  env = Environment(tools = ['default'], options = vars,
138    if 'Clone' in dir(env): return env.Clone()    # scons-0.98                    ENV = {'PATH': os.environ['PATH']})
139    else:                   return env.Copy() # scons-0.96  if env['tools_names'] != 'default':
140        env = Environment(tools = ['default'] + env['tools_names'], options = vars,
141                          ENV = {'PATH' : os.environ['PATH']})
142    
143    if options_file:
144        opts_valid=False
145        if 'escript_opts_version' in env.Dictionary() and \
146            int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
147                opts_valid=True
148        if opts_valid:
149            print("Using options in %s." % options_file)
150        else:
151            print("\nOptions file %s" % options_file)
152            print("is outdated! Please update the file by examining one of the TEMPLATE")
153            print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
154            Exit(1)
155    
156    # Generate help text (scons -h)
157    Help(vars.GenerateHelpText(env))
158    
159    # Check for superfluous options
160    if len(vars.UnknownVariables())>0:
161        for k in vars.UnknownVariables():
162            print("Unknown option '%s'" % k)
163        Exit(1)
164    
165    #################### Make sure install directories exist #####################
166    
167    env['BUILD_DIR']=env['build_dir']
168    prefix=Dir(env['prefix']).abspath
169    env['incinstall'] = os.path.join(prefix, 'include')
170    env['bininstall'] = os.path.join(prefix, 'bin')
171    env['libinstall'] = os.path.join(prefix, 'lib')
172    env['pyinstall']  = os.path.join(prefix, 'esys')
173    if not os.path.isdir(env['bininstall']):
174        os.makedirs(env['bininstall'])
175    if not os.path.isdir(env['libinstall']):
176        os.makedirs(env['libinstall'])
177    if not os.path.isdir(env['pyinstall']):
178        os.makedirs(env['pyinstall'])
179    
180    env.Append(CPPPATH = [env['incinstall']])
181    env.Append(LIBPATH = [env['libinstall']])
182    
183    ################# Fill in compiler options if not set above ##################
184    
185    if env['cc'] != 'default': env['CC']=env['cc']
186    if env['cxx'] != 'default': env['CXX']=env['cxx']
187    
188    # version >=9 of intel C++ compiler requires use of icpc to link in C++
189    # runtimes (icc does not)
190    if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
191        env['LINK'] = env['CXX']
192    
193    # default compiler/linker options
194    cc_flags = ''
195    cc_optim = ''
196    cc_debug = ''
197    omp_flags = ''
198    omp_ldflags = ''
199    fatalwarning = '' # switch to turn warnings into errors
200    sysheaderopt = '' # how to indicate that a header is a system header
201    
202    # env['CC'] might be a full path
203    cc_name=os.path.basename(env['CC'])
204    
205    if cc_name == 'icc':
206        # Intel compiler
207        cc_flags    = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"
208        cc_optim    = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip"
209        cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
210        omp_flags   = "-openmp -openmp_report0"
211        omp_ldflags = "-openmp -openmp_report0 -lguide -lpthread"
212        fatalwarning = "-Werror"
213    elif cc_name[:3] == 'gcc':
214        # GNU C on any system
215        cc_flags     = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
216        cc_optim     = "-O3"
217        cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
218        omp_flags    = "-fopenmp"
219        omp_ldflags  = "-fopenmp"
220        fatalwarning = "-Werror"
221        sysheaderopt = "-isystem"
222    elif cc_name == 'cl':
223        # Microsoft Visual C on Windows
224        cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
225        cc_optim     = "/O2 /Op /W3"
226        cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
227        fatalwarning = "/WX"
228    elif cc_name == 'icl':
229        # Intel C on Windows
230        cc_flags     = '/EHsc /GR /MD'
231        cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
232        cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
233        omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
234        omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
235    
236    # set defaults if not otherwise specified
237    if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
238    if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
239    if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
240    if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
241    if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
242    if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
243    if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
244    if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
245    
246    # set up the autolazy values
247    if env['forcelazy'] == 'on':
248        env.Append(CPPDEFINES=['FAUTOLAZYON'])
249    elif env['forcelazy'] == 'off':
250        env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
251    
252    # set up the collective resolve values
253    if env['forcecollres'] == 'on':
254        env.Append(CPPDEFINES=['FRESCOLLECTON'])
255    elif env['forcecollres'] == 'off':
256        env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
257    
258    # allow non-standard C if requested
259    if env['iknowwhatimdoing']:
260        env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
261    
262    # Disable OpenMP if no flags provided
263    if env['openmp'] and env['omp_flags'] == '':
264       print("OpenMP requested but no flags provided - disabling OpenMP!")
265       env['openmp'] = False
266    
267    if env['openmp']:
268        env.Append(CCFLAGS = env['omp_flags'])
269        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
270    else:
271        env['omp_flags']=''
272        env['omp_ldflags']=''
273    
274    # add debug/non-debug compiler flags
275    if env['debug']:
276        env.Append(CCFLAGS = env['cc_debug'])
277    else:
278        env.Append(CCFLAGS = env['cc_optim'])
279    
280  # Add cc option -I<Escript>/trunk/include  # always add cc_flags
281  env.Append(CPPPATH      = [Dir('include')])  env.Append(CCFLAGS = env['cc_flags'])
282    
283  # Add cc option -L<Escript>/trunk/lib  # add system libraries
284  env.Append(LIBPATH      = [Dir(env['libinstall'])])  env.AppendUnique(LIBS = env['sys_libs'])
285    
286  if env['cc_extra'] != '': env.Append(CCFLAGS = env['cc_extra'])  # Get the global Subversion revision number for the getVersion() method
287  if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])  try:
288        global_revision = os.popen('svnversion -n .').read()
289        global_revision = re.sub(':.*', '', global_revision)
290        global_revision = re.sub('[^0-9]', '', global_revision)
291        if global_revision == '': global_revision='-2'
292    except:
293        global_revision = '-1'
294    env['svn_revision']=global_revision
295    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
296    
297    if IS_WINDOWS:
298        if not env['build_shared']:
299            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
300            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
301    
302    ###################### Copy required environment vars ########################
303    
304    # Windows doesn't use LD_LIBRARY_PATH but PATH instead
305    if IS_WINDOWS:
306        LD_LIBRARY_PATH_KEY='PATH'
307        env['ENV']['LD_LIBRARY_PATH']=''
308    else:
309        LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
310    
311    # the following env variables are exported for the unit tests
312    
313    for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
314        try:
315            env['ENV'][key] = os.environ[key]
316        except KeyError:
317            env['ENV'][key] = 1
318    
319    env_export=env['env_export']
320    env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','TMPDIR','TEMP','TMP'])
321    
322    for key in set(env_export):
323        try:
324            env['ENV'][key] = os.environ[key]
325        except KeyError:
326            pass
327    
328  if env['usepedantic']: env.Append(CCFLAGS = pedantic)  try:
329        env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
330    except KeyError:
331        pass
332    
333    # these shouldn't be needed
334    #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
335    #    try:
336    #        env['ENV'][key] = os.environ[key]
337    #    except KeyError:
338    #        pass
339    
340  # MS Windows  try:
341  if IS_WINDOWS_PLATFORM:      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
342    env.PrependENVPath('PATH',    [env['boost_lib_path']])  except KeyError:
343    env.PrependENVPath('PATH',    [env['libinstall']])      pass
   if not env['share_esysUtils'] :  
     env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])  
   if not env['share_paso'] :  
     env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])  
344    
345    if env['usenetcdf']:  ######################## Add some custom builders ############################
     env.PrependENVPath('PATH',  [env['netCDF_lib_path']])  
346    
347  env.Append(ARFLAGS = env['ar_flags'])  py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
348    env.Append(BUILDERS = {'PyCompile' : py_builder});
349    
350  # Get the global Subversion revision number for getVersion() method  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
351  try:  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
    global_revision = os.popen("svnversion -n .").read()  
    global_revision = re.sub(":.*", "", global_revision)  
    global_revision = re.sub("[^0-9]", "", global_revision)  
 except:  
    global_revision="-1"  
 if global_revision == "": global_revision="-2"  
 env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision])  
352    
353  ############ numarray (required) ###############################  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
354    env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
355    
356  try:  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
357    from numarray import identity  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
 except ImportError:  
   print "Cannot import numarray, you need to set your PYTHONPATH"  
   sys.exit(1)  
358    
359  ############ C compiler (required) #############################  ############################ Dependency checks ###############################
360    
361  # Create a Configure() environment for checking existence of required libraries and headers  # Create a Configure() environment to check for compilers and python
362  conf = Configure(clone_env(env))  conf = Configure(env.Clone())
363    
364  # Test that the compiler is working  ######## Test that the compilers work
365  if not conf.CheckFunc('printf'):  
366     print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC'])  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
367     sys.exit(1)      if not conf.CheckCC():
368            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
369            Exit(1)
370        if not conf.CheckCXX():
371            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
372            Exit(1)
373    else:
374        if not conf.CheckFunc('printf', language='c'):
375            print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
376            Exit(1)
377        if not conf.CheckFunc('printf', language='c++'):
378            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
379            Exit(1)
380    
381  if conf.CheckFunc('gethostname'):  if conf.CheckFunc('gethostname'):
382    conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])      conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
383    
384  ############ python libraries (required) #######################  ######## Python headers & library (required)
385    
386    python_inc_path=sysconfig.get_python_inc()
387  if not sysheaderopt =="":  if IS_WINDOWS:
388    conf.env.Append(CCFLAGS=sysheaderopt+env['python_path'])      python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
389    elif env['PLATFORM']=='darwin':
390        python_lib_path=sysconfig.get_config_var('LIBPL')
391    else:
392        python_lib_path=sysconfig.get_config_var('LIBDIR')
393    #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
394    if IS_WINDOWS:
395        python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
396  else:  else:
397    conf.env.AppendUnique(CPPPATH     = [env['python_path']])      python_libs=['python'+sysconfig.get_python_version()]
398    
399  conf.env.AppendUnique(LIBPATH       = [env['python_lib_path']])  if sysheaderopt == '':
400  conf.env.AppendUnique(LIBS      = [env['python_libs']])      conf.env.AppendUnique(CPPPATH = [python_inc_path])
401    else:
402        conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
403    
404  conf.env.PrependENVPath('LD_LIBRARY_PATH', env['python_lib_path'])  # The wrapper script needs to find these libs  conf.env.AppendUnique(LIBPATH = [python_lib_path])
405  conf.env.PrependENVPath('PYTHONPATH', prefix)  conf.env.AppendUnique(LIBS = python_libs)
406  conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  # The wrapper script needs to find the libs
407    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
408    
409  if not conf.CheckCHeader('Python.h'):  if not conf.CheckCHeader('Python.h'):
410    print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path'])      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
411    sys.exit(1)      Exit(1)
412  if not conf.CheckFunc('Py_Exit'):  if not conf.CheckFunc('Py_Exit'):
413    print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path'])      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
414    sys.exit(1)      Exit(1)
   
 ############ boost (required) ##################################  
   
 if not sysheaderopt =="":  
 # This is required because we can't -isystem /usr/system because it breaks std includes  
   if os.path.normpath(env['boost_path']) =="/usr/include":  
     conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost'))  
   else:  
     conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path'])  
 else:  
   conf.env.AppendUnique(CPPPATH     = [env['boost_path']])  
   
 conf.env.AppendUnique(LIBPATH       = [env['boost_lib_path']])  
 conf.env.AppendUnique(LIBS      = [env['boost_libs']])  
   
 conf.env.PrependENVPath('LD_LIBRARY_PATH', env['boost_lib_path'])   # The wrapper script needs to find these libs  
 #ensure that our path entries remain at the front  
 conf.env.PrependENVPath('PYTHONPATH', prefix)  
 conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if not conf.CheckCXXHeader('boost/python.hpp'):  
   print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path'])  
   sys.exit(1)  
   
 if not conf.CheckFunc('PyObject_SetAttr'):  
   print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path'])  
   sys.exit(1)  
415    
416  # Commit changes to environment  # Commit changes to environment
417  env = conf.Finish()  env = conf.Finish()
418    
419  ############ VTK (optional) ####################################  ######## boost (required)
420    
421  if env['usevtk']:  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
422    try:  if sysheaderopt == '':
423      import vtk      env.AppendUnique(CPPPATH = [boost_inc_path])
424      env['usevtk'] = 1  else:
425    except ImportError:      # This is required because we can't -isystem /usr/include since it breaks
426      env['usevtk'] = 0      # std includes
427        if os.path.normpath(boost_inc_path) == '/usr/include':
428  # Add VTK to environment env if it was found          conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
429  if env['usevtk']:      else:
430    env.Append(CPPDEFINES = ['USE_VTK'])          env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
431    
432  ############ NetCDF (optional) #################################  env.AppendUnique(LIBPATH = [boost_lib_path])
433    env.AppendUnique(LIBS = env['boost_libs'])
434  conf = Configure(clone_env(env))  env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
   
 if env['usenetcdf']:  
   conf.env.AppendUnique(CPPPATH = [env['netCDF_path']])  
   conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['netCDF_libs']])  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['netCDF_lib_path'])    # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0  
 if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0  
   
 # Add NetCDF to environment env if it was found  
 if env['usenetcdf']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['USE_NETCDF'])  
 else:  
   conf.Finish()  
   
 ############ PAPI (optional) ###################################  
   
 # Start a new configure environment that reflects what we've already found  
 conf = Configure(clone_env(env))  
   
 if env['usepapi']:  
   conf.env.AppendUnique(CPPPATH = [env['papi_path']])  
   conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['papi_libs']])  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['papi_lib_path'])  # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0  
 if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0  
   
 # Add PAPI to environment env if it was found  
 if env['usepapi']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['BLOCKPAPI'])  
 else:  
   conf.Finish()  
   
 ############ MKL (optional) ####################################  
   
 # Start a new configure environment that reflects what we've already found  
 conf = Configure(clone_env(env))  
   
 if env['usemkl']:  
   conf.env.AppendUnique(CPPPATH = [env['mkl_path']])  
   conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['mkl_libs']])  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mkl_lib_path'])   # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0  
 if env['usemkl'] and not conf.CheckFunc('pardiso_'): env['usemkl'] = 0  
   
 # Add MKL to environment env if it was found  
 if env['usemkl']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['MKL'])  
 else:  
   conf.Finish()  
   
 ############ UMFPACK (optional) ################################  
   
 # Start a new configure environment that reflects what we've already found  
 conf = Configure(clone_env(env))  
   
 if env['useumfpack']:  
   conf.env.AppendUnique(CPPPATH = [env['ufc_path']])  
   conf.env.AppendUnique(CPPPATH = [env['umf_path']])  
   conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['umf_libs']])  
   conf.env.AppendUnique(CPPPATH = [env['amd_path']])  
   conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['amd_libs']])  
   conf.env.AppendUnique(CPPPATH = [env['blas_path']])  
   conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['blas_libs']])  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['umf_lib_path'])   # The wrapper script needs to find these libs  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['amd_lib_path'])   # The wrapper script needs to find these libs  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['blas_lib_path'])  # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0  
 if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0  
 # if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73?  
   
 # Add UMFPACK to environment env if it was found  
 if env['useumfpack']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['UMFPACK'])  
 else:  
   conf.Finish()  
   
 ############ Silo (optional) ###################################  
   
 if env['usesilo']:  
   conf = Configure(clone_env(env))  
   conf.env.AppendUnique(CPPPATH = [env['silo_path']])  
   conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']])  
   conf.env.AppendUnique(LIBS = [env['silo_libs']])  
   if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0  
   if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0  
   conf.Finish()  
   
 # Add the path to Silo to environment env if it was found.  
 # Note that we do not add the libs since they are only needed for the  
 # escriptreader library and tools.  
 if env['usesilo']:  
   env.AppendUnique(CPPPATH = [env['silo_path']])  
   env.AppendUnique(LIBPATH = [env['silo_lib_path']])  
   env.Append(CPPDEFINES = ['HAVE_SILO'])  
   
 ############ Add the compiler flags ############################  
   
 # Enable debug by choosing either cc_debug or cc_optim  
 if env['usedebug']:  
   env.Append(CCFLAGS        = env['cc_debug'])  
   env.Append(CCFLAGS        = env['omp_debug'])  
 else:  
   env.Append(CCFLAGS        = env['cc_optim'])  
   env.Append(CCFLAGS        = env['omp_optim'])  
   
 # Always use cc_flags  
 env.Append(CCFLAGS      = env['cc_flags'])  
 env.Append(LIBS         = [env['omp_libs']])  
435    
436    ######## numpy (required)
437    
438  ############ Add some custom builders ##########################  try:
439        from numpy import identity
440    except ImportError:
441        print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
442        Exit(1)
443    
444  py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  ######## CppUnit (required for tests)
 env.Append(BUILDERS = {'PyCompile' : py_builder});  
445    
446  runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)  try:
447  env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});      cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
448        env.AppendUnique(CPPPATH = [cppunit_inc_path])
449        env.AppendUnique(LIBPATH = [cppunit_lib_path])
450        env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
451        env['cppunit']=True
452    except:
453        env['cppunit']=False
454    
455  runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  ######## VTK (optional)
 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  
456    
457  ############ MPI (optional) ####################################  if env['pyvisi']:
458        try:
459            import vtk
460            env['pyvisi'] = True
461        except ImportError:
462            print("Cannot import vtk, disabling pyvisi.")
463            env['pyvisi'] = False
464    
465    ######## netCDF (optional)
466    
467    netcdf_inc_path=''
468    netcdf_lib_path=''
469    if env['netcdf']:
470        netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
471        env.AppendUnique(CPPPATH = [netcdf_inc_path])
472        env.AppendUnique(LIBPATH = [netcdf_lib_path])
473        env.AppendUnique(LIBS = env['netcdf_libs'])
474        env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
475        env.Append(CPPDEFINES = ['USE_NETCDF'])
476    
477    ######## PAPI (optional)
478    
479    papi_inc_path=''
480    papi_lib_path=''
481    if env['papi']:
482        papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
483        env.AppendUnique(CPPPATH = [papi_inc_path])
484        env.AppendUnique(LIBPATH = [papi_lib_path])
485        env.AppendUnique(LIBS = env['papi_libs'])
486        env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
487        env.Append(CPPDEFINES = ['BLOCKPAPI'])
488    
489    ######## MKL (optional)
490    
491    mkl_inc_path=''
492    mkl_lib_path=''
493    if env['mkl']:
494        mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
495        env.AppendUnique(CPPPATH = [mkl_inc_path])
496        env.AppendUnique(LIBPATH = [mkl_lib_path])
497        env.AppendUnique(LIBS = env['mkl_libs'])
498        env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
499        env.Append(CPPDEFINES = ['MKL'])
500    
501    ######## UMFPACK (optional)
502    
503    umfpack_inc_path=''
504    umfpack_lib_path=''
505    if env['umfpack']:
506        umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
507        env.AppendUnique(CPPPATH = [umfpack_inc_path])
508        env.AppendUnique(LIBPATH = [umfpack_lib_path])
509        env.AppendUnique(LIBS = env['umfpack_libs'])
510        env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
511        env.Append(CPPDEFINES = ['UMFPACK'])
512    
513    ######## LAPACK (optional)
514    
515    if env['lapack']=='mkl' and not env['mkl']:
516        print("mkl_lapack requires MKL!")
517        Exit(1)
518    
519    env['uselapack'] = env['lapack']!='none'
520    lapack_inc_path=''
521    lapack_lib_path=''
522    if env['uselapack']:
523        header='clapack.h'
524        if env['lapack']=='mkl':
525            env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
526            header='mkl_lapack.h'
527        lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
528        env.AppendUnique(CPPPATH = [lapack_inc_path])
529        env.AppendUnique(LIBPATH = [lapack_lib_path])
530        env.AppendUnique(LIBS = env['lapack_libs'])
531        env.Append(CPPDEFINES = ['USE_LAPACK'])
532    
533    ######## Silo (optional)
534    
535    silo_inc_path=''
536    silo_lib_path=''
537    if env['silo']:
538        silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
539        env.AppendUnique(CPPPATH = [silo_inc_path])
540        env.AppendUnique(LIBPATH = [silo_lib_path])
541        # Note that we do not add the libs since they are only needed for the
542        # weipa library and tools.
543        #env.AppendUnique(LIBS = [env['silo_libs']])
544    
545    ######## VSL random numbers (optional)
546    if env['vsl_random']:
547        env.Append(CPPDEFINES = ['MKLRANDOM'])
548    
549    ######## VisIt (optional)
550    
551    visit_inc_path=''
552    visit_lib_path=''
553    if env['visit']:
554        visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
555        env.AppendUnique(CPPPATH = [visit_inc_path])
556        env.AppendUnique(LIBPATH = [visit_lib_path])
557    
558    ######## MPI (optional)
559    
560    env['usempi'] = env['mpi']!='none'
561    mpi_inc_path=''
562    mpi_lib_path=''
563    if env['usempi']:
564        mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
565        env.AppendUnique(CPPPATH = [mpi_inc_path])
566        env.AppendUnique(LIBPATH = [mpi_lib_path])
567        env.AppendUnique(LIBS = env['mpi_libs'])
568        env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
569        env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
570        # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
571        # On the other hand MPT and OpenMPI don't define the latter so we have to
572        # do that here
573        if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
574            env.Append(CPPDEFINES = ['MPI_INCLUDED'])
575    
576    ######## BOOMERAMG (optional)
577    
578    if env['mpi'] == 'none': env['boomeramg'] = False
579    
580    boomeramg_inc_path=''
581    boomeramg_lib_path=''
582    if env['boomeramg']:
583        boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
584        env.AppendUnique(CPPPATH = [boomeramg_inc_path])
585        env.AppendUnique(LIBPATH = [boomeramg_lib_path])
586        env.AppendUnique(LIBS = env['boomeramg_libs'])
587        env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
588        env.Append(CPPDEFINES = ['BOOMERAMG'])
589    
590    ######## ParMETIS (optional)
591    
592    if not env['usempi']: env['parmetis'] = False
593    
594    parmetis_inc_path=''
595    parmetis_lib_path=''
596    if env['parmetis']:
597        parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
598        env.AppendUnique(CPPPATH = [parmetis_inc_path])
599        env.AppendUnique(LIBPATH = [parmetis_lib_path])
600        env.AppendUnique(LIBS = env['parmetis_libs'])
601        env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
602        env.Append(CPPDEFINES = ['USE_PARMETIS'])
603    
604  # Create a modified environment for MPI programs (identical to env if usempi=no)  ######## gmsh (optional, for tests)
 env_mpi = clone_env(env)  
605    
606  # Start a new configure environment that reflects what we've already found  try:
607  conf = Configure(clone_env(env_mpi))      import subprocess
608        p=subprocess.Popen(['gmsh', '-info'], stderr=subprocess.PIPE)
609        _,e=p.communicate()
610        if e.split().count("MPI"):
611            env['gmsh']='m'
612        else:
613            env['gmsh']='s'
614    except OSError:
615        env['gmsh']=False
616    
617    ######## PDFLaTeX (for documentation)
618    if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
619        env['pdflatex']=True
620    else:
621        env['pdflatex']=False
622    
623  if env_mpi['usempi']:  ######################## Summarize our environment ###########################
624    conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']])  
625    conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']])  # keep some of our install paths first in the list for the unit tests
626    conf.env.AppendUnique(LIBS    = [env_mpi['mpi_libs']])  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
627    conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mpi_lib_path'])   # The wrapper script needs to find these libs  env.PrependENVPath('PYTHONPATH', prefix)
628    #ensure that our path entries remain at the front  env['ENV']['ESCRIPT_ROOT'] = prefix
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0  
 if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0  
   
 # Add MPI to environment env_mpi if it was found  
 if env_mpi['usempi']:  
   env_mpi = conf.Finish()  
   env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']])  
 else:  
   conf.Finish()  
   
 env['usempi'] = env_mpi['usempi']  
   
 ############ ParMETIS (optional) ###############################  
   
 # Start a new configure environment that reflects what we've already found  
 conf = Configure(clone_env(env_mpi))  
   
 if not env_mpi['usempi']: env_mpi['useparmetis'] = 0  
   
 if env_mpi['useparmetis']:  
   conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']])  
   conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env_mpi['parmetis_libs']])  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['parmetis_lib_path'])  # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall'])  
   
 if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0  
 if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0  
   
 # Add ParMETIS to environment env_mpi if it was found  
 if env_mpi['useparmetis']:  
   env_mpi = conf.Finish()  
   env_mpi.Append(CPPDEFINES = ['USE_PARMETIS'])  
 else:  
   conf.Finish()  
   
 env['useparmetis'] = env_mpi['useparmetis']  
   
 ############ Now we switch on Warnings as errors ###############  
   
 #this needs to be done after configuration because the scons test files have warnings in them  
   
 if ((fatalwarning != "") and (env['usewarnings'])):  
   env.Append(CCFLAGS        = fatalwarning)  
   env_mpi.Append(CCFLAGS        = fatalwarning)  
   
 ############ Summarize our environment #########################  
   
 print ""  
 print "Summary of configuration (see ./config.log for information)"  
 print " Using python libraries"  
 print " Using numarray"  
 print " Using boost"  
 if env['usenetcdf']: print "    Using NetCDF"  
 else: print "   Not using NetCDF"  
 if env['usevtk']: print "   Using VTK"  
 else: print "   Not using VTK"  
 if env['usemkl']: print "   Using MKL"  
 else: print "   Not using MKL"  
 if env['useumfpack']: print "   Using UMFPACK"  
 else: print "   Not using UMFPACK"  
 if env['usesilo']: print "  Using Silo"  
 else: print "   Not using Silo"  
 if env['useopenmp']: print "    Using OpenMP"  
 else: print "   Not using OpenMP"  
 if env['usempi']: print "   Using MPI"  
 else: print "   Not using MPI"  
 if env['useparmetis']: print "  Using ParMETIS"  
 else: print "   Not using ParMETIS (requires MPI)"  
 if env['usepapi']: print "  Using PAPI"  
 else: print "   Not using PAPI"  
 if env['usedebug']: print " Compiling for debug"  
 else: print "   Not compiling for debug"  
 print " Installing in", prefix  
 if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors"  
 else: print "   Not treating warnings as errors"  
 print ""  
   
 ############ Delete option-dependent files #####################  
   
 Execute(Delete(env['libinstall'] + "/Compiled.with.debug"))  
 Execute(Delete(env['libinstall'] + "/Compiled.with.mpi"))  
 Execute(Delete(env['libinstall'] + "/Compiled.with.openmp"))  
 Execute(Delete(env['libinstall'] + "pyversion"))  
 if not env['usempi']: Execute(Delete(env['libinstall'] + "/pythonMPI"))  
629    
630    if not env['verbose']:
631        env['CCCOMSTR'] = "Compiling $TARGET"
632        env['CXXCOMSTR'] = "Compiling $TARGET"
633        env['SHCCCOMSTR'] = "Compiling $TARGET"
634        env['SHCXXCOMSTR'] = "Compiling $TARGET"
635        env['ARCOMSTR'] = "Linking $TARGET"
636        env['LINKCOMSTR'] = "Linking $TARGET"
637        env['SHLINKCOMSTR'] = "Linking $TARGET"
638        env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
639        env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
640        env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
641        env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
642        #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
643    
644    print("")
645    print("*** Config Summary (see config.log and lib/buildvars for details) ***")
646    print("Escript/Finley revision %s"%global_revision)
647    print("  Install prefix:  %s"%env['prefix'])
648    print("          Python:  %s"%sysconfig.PREFIX)
649    print("           boost:  %s"%env['boost_prefix'])
650    print("           numpy:  YES")
651    if env['usempi']:
652        print("             MPI:  YES (flavour: %s)"%env['mpi'])
653    else:
654        print("             MPI:  DISABLED")
655    if env['uselapack']:
656        print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
657    else:
658        print("          LAPACK:  DISABLED")
659    d_list=[]
660    e_list=[]
661    for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
662        if env[i]: e_list.append(i)
663        else: d_list.append(i)
664    for i in e_list:
665        print("%16s:  YES"%i)
666    for i in d_list:
667        print("%16s:  DISABLED"%i)
668    if env['cppunit']:
669        print("         CppUnit:  FOUND")
670    else:
671        print("         CppUnit:  NOT FOUND")
672    if env['gmsh']=='m':
673        print("            gmsh:  FOUND, MPI-ENABLED")
674    elif env['gmsh']=='s':
675        print("            gmsh:  FOUND")
676    else:
677        print("            gmsh:  NOT FOUND")
678    print("      vsl_random:  %s"%env['vsl_random'])
679        
680    if ((fatalwarning != '') and (env['werror'])):
681        print("  Treating warnings as errors")
682    else:
683        print("  NOT treating warnings as errors")
684    print("")
685    
686  ############ Build the subdirectories ##########################  ####################### Configure the subdirectories #########################
687    
688  from grouptest import *  from grouptest import *
689    
690  TestGroups=[]  TestGroups=[]
691    
692  Export(  # keep an environment without warnings-as-errors
693    ["env",  dodgy_env=env.Clone()
    "env_mpi",  
    "clone_env",  
    "IS_WINDOWS_PLATFORM",  
    "TestGroups"  
    ]  
   )  
   
 env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  
 env.SConscript(dirs = ['tools/libescriptreader/src'], build_dir='build/$PLATFORM/tools/libescriptreader', duplicate=0)  
 env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0)  
 env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0)  
 env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0)  
 env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0)  
 env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0)  
 env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0)  
 env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0)  
 env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0)  
 env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
 env.SConscript(dirs = ['scripts'], build_dir='build/$PLATFORM/scripts', duplicate=0)  
 env.SConscript(dirs = ['paso/profiling'], build_dir='build/$PLATFORM/paso/profiling', duplicate=0)  
   
   
 ############ Remember what optimizations we used ###############  
694    
695  remember_list = []  # now add warnings-as-errors flags. This needs to be done after configuration
696    # because the scons test files have warnings in them
697    if ((fatalwarning != '') and (env['werror'])):
698        env.Append(CCFLAGS = fatalwarning)
699    
700  if env['usedebug']:  Export(
701    remember_list += env.Command(env['libinstall'] + "/Compiled.with.debug", None, Touch('$TARGET'))    ['env',
702       'dodgy_env',
703  if env['usempi']:     'IS_WINDOWS',
704    remember_list += env.Command(env['libinstall'] + "/Compiled.with.mpi", None, Touch('$TARGET'))     'TestGroups'
705      ]
706  if env['omp_optim'] != '':  )
   remember_list += env.Command(env['libinstall'] + "/Compiled.with.openmp", None, Touch('$TARGET'))  
707    
708  env.Alias('remember_options', remember_list)  env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
709    env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
710    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
711    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
712    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
713    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
714    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
715    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
716    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
717    env.SConscript(dirs = ['pyvisi/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pyvisi', duplicate=0)
718    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
719    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
720    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
721    
722    ######################## Populate the buildvars file #########################
723    
724    # remove obsolete file
725    if not env['usempi']:
726        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
727        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
728    
729    # Try to extract the boost version from version.hpp
730    boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
731    boostversion='unknown'
732    try:
733        for line in boosthpp:
734            ver=re.match(r'#define BOOST_VERSION (\d+)',line)
735            if ver:
736                boostversion=ver.group(1)
737    except StopIteration:
738        pass
739    boosthpp.close()
740    
741    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
742    buildvars.write("svn_revision="+str(global_revision)+"\n")
743    buildvars.write("prefix="+prefix+"\n")
744    buildvars.write("cc="+env['CC']+"\n")
745    buildvars.write("cxx="+env['CXX']+"\n")
746    buildvars.write("python="+sys.executable+"\n")
747    buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
748    buildvars.write("boost_inc_path="+boost_inc_path+"\n")
749    buildvars.write("boost_lib_path="+boost_lib_path+"\n")
750    buildvars.write("boost_version="+boostversion+"\n")
751    buildvars.write("debug=%d\n"%int(env['debug']))
752    buildvars.write("openmp=%d\n"%int(env['openmp']))
753    buildvars.write("mpi=%s\n"%env['mpi'])
754    buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
755    buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
756    buildvars.write("lapack=%s\n"%env['lapack'])
757    buildvars.write("pyvisi=%d\n"%env['pyvisi'])
758    buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
759    for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
760        buildvars.write("%s=%d\n"%(i, int(env[i])))
761        if env[i]:
762            buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
763            buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
764    buildvars.close()
765    
766    ################### Targets to build and install libraries ###################
767    
768  ############### Record python interpreter version ##############  target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
769    env.Alias('target_init', [target_init])
770    # delete buildvars upon cleanup
771    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
772    
773  if not IS_WINDOWS_PLATFORM:  # The headers have to be installed prior to build in order to satisfy
774    versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])  # #include <paso/Common.h>
775    os.system("echo "+versionstring+" > "+env['libinstall']+"/pyversion")  env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
776    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
777    
778  ############ Targets to build and install libraries ############  env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
779    env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
780    
781  target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET'))  env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
782  env.Alias('target_init', [target_init])  env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
783    
784  # The headers have to be installed prior to build in order to satisfy #include <paso/Common.h>  env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
785  env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a'])  env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
 env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a'])  
786    
787  env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a'])  env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
788  env.Alias('install_paso', ['build_paso', 'target_install_paso_a'])  env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
789    
790  env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so'])  env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
791  env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py'])  env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
792    
793  env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so'])  env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
794  env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py'])  env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
795    
796  # Now gather all the above into a couple easy targets: build_all and install_all  # Now gather all the above into some easy targets: build_all and install_all
797  build_all_list = []  build_all_list = []
798  build_all_list += ['build_esysUtils']  build_all_list += ['build_esysUtils']
799  build_all_list += ['build_paso']  build_all_list += ['build_paso']
800  build_all_list += ['build_escript']  build_all_list += ['build_escript']
801    build_all_list += ['build_dudley']
802  build_all_list += ['build_finley']  build_all_list += ['build_finley']
803  if env['usempi']:       build_all_list += ['target_pythonMPI_exe']  build_all_list += ['build_weipa']
804  if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper']  if not IS_WINDOWS: build_all_list += ['build_escriptreader']
805  if env['usesilo']:  build_all_list += ['target_escript2silo']  if env['usempi']:   build_all_list += ['build_pythonMPI']
806    build_all_list += ['build_escriptconvert']
807  env.Alias('build_all', build_all_list)  env.Alias('build_all', build_all_list)
808    
809  install_all_list = []  install_all_list = []
# Line 746  install_all_list += ['target_init'] Line 811  install_all_list += ['target_init']
811  install_all_list += ['install_esysUtils']  install_all_list += ['install_esysUtils']
812  install_all_list += ['install_paso']  install_all_list += ['install_paso']
813  install_all_list += ['install_escript']  install_all_list += ['install_escript']
814    install_all_list += ['install_dudley']
815  install_all_list += ['install_finley']  install_all_list += ['install_finley']
816  install_all_list += ['target_install_pyvisi_py']  install_all_list += ['install_weipa']
817  install_all_list += ['target_install_modellib_py']  if not IS_WINDOWS: install_all_list += ['install_escriptreader']
818  install_all_list += ['target_install_pycad_py']  install_all_list += ['install_pyvisi_py']
819  if env['usempi']:       install_all_list += ['target_install_pythonMPI_exe']  install_all_list += ['install_modellib_py']
820  if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper']  install_all_list += ['install_pycad_py']
821  if env['usesilo']:  install_all_list += ['target_install_escript2silo']  if env['usempi']:   install_all_list += ['install_pythonMPI']
822  install_all_list += ['remember_options']  install_all_list += ['install_escriptconvert']
823  env.Alias('install_all', install_all_list)  env.Alias('install_all', install_all_list)
824    
825  # Default target is install  # Default target is install
826  env.Default('install_all')  env.Default('install_all')
827    
828  ############ Targets to build and run the test suite ###########  ################## Targets to build and run the test suite ###################
829    
830  env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a'])  test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
831  env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a'])  if not env['cppunit']:
832  env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a'])      env.Alias('run_tests', test_msg)
833  env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests'])  env.Alias('run_tests', ['install_all'])
834    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
835  env.Alias('build_full',['install_all','build_tests','build_py_tests'])  env.Alias('build_full',['install_all','build_tests','build_py_tests'])
836    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
837    
838  ############ Targets to build the documentation ################  ##################### Targets to build the documentation #####################
   
 env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html'])  
839    
840  if not IS_WINDOWS_PLATFORM:  env.Alias('api_epydoc','install_all')
841     try:  env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf'])
842      utest=open("utest.sh","w")  env.Alias('release_prep', ['docs', 'install_all'])
843      build_platform=os.name      #Sometimes Mac python says it is posix  
844      if (build_platform=='posix') and platform.system()=="Darwin":  if not IS_WINDOWS:
845          build_platform='darwin'      try:
846      utest.write(GroupTest.makeHeader(build_platform))          utest=open('utest.sh','w')
847      for tests in TestGroups:          utest.write(GroupTest.makeHeader(env['PLATFORM']))
848          utest.write(tests.makeString())          for tests in TestGroups:
849      utest.close()              utest.write(tests.makeString())
850      os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH)          utest.close()
851      print "utest.sh written"          Execute(Chmod('utest.sh', 0755))
852     except IOError:          print("Generated utest.sh.")
853      print "Error attempting to write unittests file."      except IOError:
854      sys.exit(1)          print("Error attempting to write unittests file.")
855            Exit(1)
856    
857        # delete utest.sh upon cleanup
858        env.Clean('target_init', 'utest.sh')
859    
860        # Make sure that the escript wrapper is in place
861        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
862            print("Copying escript wrapper.")
863            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
864    

Legend:
Removed from v.2300  
changed lines
  Added in v.3604

  ViewVC Help
Powered by ViewVC 1.1.26