--- trunk/SConstruct 2009/02/08 23:40:22 2254 +++ trunk/SConstruct 2011/09/07 06:16:08 3585 @@ -1,7 +1,6 @@ - ######################################################## # -# Copyright (c) 2003-2008 by University of Queensland +# Copyright (c) 2003-2010 by University of Queensland # Earth Systems Science Computational Center (ESSCC) # http://www.uq.edu.au/esscc # @@ -11,689 +10,782 @@ # ######################################################## +EnsureSConsVersion(0,98,1) +EnsurePythonVersion(2,5) -EnsureSConsVersion(0,96,91) -EnsurePythonVersion(2,3) - -import sys, os, re, socket - -# Add our extensions -if os.path.isdir('scons'): sys.path.append('scons') -import scons_extensions - -# Use /usr/lib64 if available, else /usr/lib -usr_lib = '/usr/lib' -if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64' - -# The string python2.4 or python2.5 -python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1]) +import sys, os, platform, re +from distutils import sysconfig +from site_init import * + +# Version number to check for in options file. Increment when new features are +# added or existing options changed. +REQUIRED_OPTS_VERSION=200 # MS Windows support, many thanks to PH -IS_WINDOWS_PLATFORM = (os.name== "nt") +IS_WINDOWS = (os.name == 'nt') -prefix = ARGUMENTS.get('prefix', Dir('#.').abspath) +########################## Determine options file ############################ +# 1. command line +# 2. scons/_options.py +# 3. name as part of a cluster +options_file=ARGUMENTS.get('options_file', None) +if not options_file: + ext_dir = os.path.join(os.getcwd(), 'scons') + hostname = platform.node().split('.')[0] + for name in hostname, effectiveName(hostname): + mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) + options_file = os.path.join(ext_dir, mangledhostname+'_options.py') + if os.path.isfile(options_file): break -# Read configuration options from file scons/_options.py -hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) -tmp = os.path.join("scons",hostname+"_options.py") -options_file = ARGUMENTS.get('options_file', tmp) if not os.path.isfile(options_file): - options_file = False - print "Options file not found (expected '%s')" % tmp -else: - print "Options file is", options_file - -# Load options file and command-line arguments -opts = Options(options_file, ARGUMENTS) - -############ Load build options ################################ - -opts.AddOptions( -# Where to install esys stuff - ('prefix', 'where everything will be installed', Dir('#.').abspath), - ('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')), - ('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')), - ('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')), - ('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')), -# Compilation options - BoolOption('dodebug', 'For backwards compatibility', 'no'), - BoolOption('usedebug', 'Do you want a debug build?', 'no'), - BoolOption('usevtk', 'Do you want to use VTK?', 'yes'), - ('options_file', 'File of paths/options. Default: scons/_options.py', options_file), - ('win_cc_name', 'windows C compiler name if needed', 'msvc'), - # The strings -DDEFAULT_ get replaced by scons/_options.py or by defaults below - ('cc_flags', 'C compiler flags to use', '-DEFAULT_1'), - ('cc_optim', 'C compiler optimization flags to use', '-DEFAULT_2'), - ('cc_debug', 'C compiler debug flags to use', '-DEFAULT_3'), - ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'), - ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'), - ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'), - ('cc_extra', 'Extra C/C++ flags', ''), + print("\nWARNING:\nOptions file %s" % options_file) + print("not found! Default options will be used which is most likely suboptimal.") + print("It is recommended that you copy one of the TEMPLATE files in the scons/") + print("subdirectory and customize it to your needs.\n") + options_file = None + +############################### Build options ################################ + +default_prefix='/usr' +mpi_flavours=('none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') +lapack_flavours=('none', 'clapack', 'mkl') + +vars = Variables(options_file, ARGUMENTS) +vars.AddVariables( + PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), + PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), + PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), + BoolVariable('verbose', 'Output full compile/link lines', False), +# Compiler/Linker options + ('cc', 'Path to C compiler', 'default'), + ('cxx', 'Path to C++ compiler', 'default'), + ('cc_flags', 'Base C/C++ compiler flags', 'default'), + ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'), + ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'), + ('cc_extra', 'Extra C compiler flags', ''), + ('cxx_extra', 'Extra C++ compiler flags', ''), ('ld_extra', 'Extra linker flags', ''), - ('sys_libs', 'System libraries to link with', []), - ('ar_flags', 'Static library archiver flags to use', ''), - BoolOption('useopenmp', 'Compile parallel version using OpenMP', 'yes'), - BoolOption('usepedantic', 'Compile with -pedantic if using gcc', 'no'), - BoolOption('usewarnings','Compile with warnings as errors if using gcc','yes'), -# Python - ('python_path', 'Path to Python includes', '/usr/include/'+python_version), - ('python_lib_path', 'Path to Python libs', usr_lib), - ('python_libs', 'Python libraries to link with', [python_version]), - ('python_cmd', 'Python command', 'python'), -# Boost - ('boost_path', 'Path to Boost includes', '/usr/include'), - ('boost_lib_path', 'Path to Boost libs', usr_lib), - ('boost_libs', 'Boost libraries to link with', ['boost_python']), -# NetCDF - BoolOption('usenetcdf', 'switch on/off the usage of netCDF', 'yes'), - ('netCDF_path', 'Path to netCDF includes', '/usr/include'), - ('netCDF_lib_path', 'Path to netCDF libs', usr_lib), - ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']), -# MPI - BoolOption('useMPI', 'For backwards compatibility', 'no'), - BoolOption('usempi', 'Compile parallel version using MPI', 'no'), - ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'), - ('mpi_path', 'Path to MPI includes', '/usr/include'), - ('mpi_run', 'mpirun name' , 'mpiexec -np 1'), - ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib), - ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', ['mpich' , 'pthread', 'rt']), -# ParMETIS - BoolOption('useparmetis', 'Compile parallel version using ParMETIS', 'yes'), - ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'), - ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib), - ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']), -# PAPI - BoolOption('usepapi', 'switch on/off the usage of PAPI', 'no'), - ('papi_path', 'Path to PAPI includes', '/usr/include'), - ('papi_lib_path', 'Path to PAPI libs', usr_lib), + BoolVariable('werror','Treat compiler warnings as errors', True), + BoolVariable('debug', 'Compile with debug flags', False), + BoolVariable('openmp', 'Compile parallel version using OpenMP', False), + ('omp_flags', 'OpenMP compiler flags', 'default'), + ('omp_ldflags', 'OpenMP linker flags', 'default'), +# Mandatory libraries + ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), + ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), +# Optional libraries and options + EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), + ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), + ('mpi_libs', 'MPI shared libraries to link with', ['mpi']), + BoolVariable('netcdf', 'Enable netCDF file support', False), + ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), + ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']), + BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), + ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), + ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), + BoolVariable('papi', 'Enable PAPI', False), + ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), ('papi_libs', 'PAPI libraries to link with', ['papi']), - BoolOption('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False), -# MKL - BoolOption('usemkl', 'switch on/off the usage of MKL', 'no'), - ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'), - ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'), - ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']), -# UMFPACK - BoolOption('useumfpack', 'switch on/off the usage of UMFPACK', 'no'), - ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'), - ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'), - ('umf_lib_path', 'Path to UMFPACK libs', usr_lib), - ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']), -# Silo - BoolOption('usesilo', 'switch on/off the usage of Silo', 'yes'), - ('silo_path', 'Path to Silo includes', '/usr/include'), - ('silo_lib_path', 'Path to Silo libs', usr_lib), + BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), + BoolVariable('mkl', 'Enable the Math Kernel Library', False), + ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), + ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), + BoolVariable('umfpack', 'Enable UMFPACK', False), + ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), + ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), + BoolVariable('boomeramg', 'Enable BoomerAMG', False), + ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix), + ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']), + EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), + ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), + ('lapack_libs', 'LAPACK libraries to link with', []), + BoolVariable('silo', 'Enable the Silo file format in weipa', False), + ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), -# AMD (used by UMFPACK) - ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'), - ('amd_lib_path', 'Path to AMD libs', usr_lib), - ('amd_libs', 'AMD libraries to link with', ['amd']), -# BLAS (used by UMFPACK) - ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'), - ('blas_lib_path', 'Path to BLAS libs', usr_lib), - ('blas_libs', 'BLAS libraries to link with', ['blas']), -# An option for specifying the compiler tools set (see windows branch). - ('tools_names', 'allow control over the tools in the env setup', ['intelc']), -# finer control over library building, intel aggressive global optimisation -# works with dynamic libraries on windows. - ('share_esysUtils', 'control static or dynamic esysUtils lib', False), - ('share_paso', 'control static or dynamic paso lib', False) + BoolVariable('visit', 'Enable the VisIt simulation interface', False), + ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), + ('visit_libs', 'VisIt libraries to link with', ['simV2']), + BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False), + BoolVariable('vsl_random', 'Use VSL from intel for random data', False), +# Advanced settings + #dudley_assemble_flags = -funroll-loops to actually do something + ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''), + # To enable passing function pointers through python + BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), + # An option for specifying the compiler tools (see windows branch) + ('tools_names', 'Compiler tools to use', ['default']), + ('env_export', 'Environment variables to be passed to tools',[]), + EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), + EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), + # finer control over library building, intel aggressive global optimisation + # works with dynamic libraries on windows. + ('share_esysutils', 'Build a dynamic esysUtils library', False), + ('share_paso', 'Build a dynamic paso library', False), + ('sys_libs', 'Extra libraries to link with', []), + ('escript_opts_version', 'Version of options file (do not specify on command line)'), ) -############ Specify which compilers to use #################### +##################### Create environment and help text ####################### -# intelc uses regular expressions improperly and emits a warning about -# failing to find the compilers. This warning can be safely ignored. +# Intel's compiler uses regular expressions improperly and emits a warning +# about failing to find the compilers. This warning can be safely ignored. -if IS_WINDOWS_PLATFORM: - env = Environment(options = opts) - env = Environment(tools = ['default'] + env['tools_names'], - options = opts) -else: - if socket.gethostname().split('.')[0] == 'service0': - env = Environment(tools = ['default', 'intelc'], options = opts) - elif os.uname()[4]=='ia64': - env = Environment(tools = ['default', 'intelc'], options = opts) - if env['CXX'] == 'icpc': - env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not) - else: - env = Environment(tools = ['default'], options = opts) -Help(opts.GenerateHelpText(env)) - -############ Fill in compiler options if not set above ######### - -# Backwards compatibility: allow dodebug=yes and useMPI=yes -if env['dodebug']: env['usedebug'] = 1 -if env['useMPI']: env['usempi'] = 1 - -# Default compiler options (override allowed in hostname_options.py, but should not be necessary) -# For both C and C++ you get: cc_flags and either the optim flags or debug flags - -sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action. - -if env["CC"] == "icc": - # Intel compilers - cc_flags = "-fPIC -ansi -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" - cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias" - cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" - omp_optim = "-openmp -openmp_report0" - omp_debug = "-openmp -openmp_report0" - omp_libs = ['guide', 'pthread'] - pedantic = "" - fatalwarning = "" # Switch to turn warnings into errors - sysheaderopt = "" -elif env["CC"] == "gcc": - # GNU C on any system - cc_flags = "-pedantic -Wall -fPIC -ansi -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing" -#the long long warning occurs on the Mac - cc_optim = "-O3" - cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" - omp_optim = "" - omp_debug = "" - omp_libs = [] - pedantic = "-pedantic-errors -Wno-long-long" - fatalwarning = "-Werror" - sysheaderopt = "-isystem " -elif env["CC"] == "cl": - # Microsoft Visual C on Windows - cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" - cc_optim = "/O2 /Op /MT /W3" - cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK" - omp_optim = "" - omp_debug = "" - omp_libs = [] - pedantic = "" - fatalwarning = "" - sysheaderopt = "" -elif env["CC"] == "icl": - # intel C on Windows, see windows_intelc_options.py for a start - pedantic = "" - fatalwarning = "" - sysheaderopt = "" - - -# If not specified in hostname_options.py then set them here -if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags -if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim -if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug -if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim -if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug -if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs - -# OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty -if not env["useopenmp"]: - env['omp_optim'] = "" - env['omp_debug'] = "" - env['omp_libs'] = [] - -if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0 - -############ Copy environment variables into scons env ######### - -try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS'] -except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1 - -try: env['ENV']['PATH'] = os.environ['PATH'] -except KeyError: pass - -try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] -except KeyError: pass - -try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH'] -except KeyError: pass - -try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH'] -except KeyError: pass - -try: env['ENV']['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] -except KeyError: pass - -try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH'] -except KeyError: pass - -try: env['ENV']['DISPLAY'] = os.environ['DISPLAY'] -except KeyError: pass +# PATH is needed so the compiler, linker and tools are found if they are not +# in default locations. +env = Environment(tools = ['default'], options = vars, + ENV = {'PATH': os.environ['PATH']}) +if env['tools_names'] != 'default': + env = Environment(tools = ['default'] + env['tools_names'], options = vars, + ENV = {'PATH' : os.environ['PATH']}) + +if options_file: + opts_valid=False + if 'escript_opts_version' in env.Dictionary() and \ + int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: + opts_valid=True + if opts_valid: + print("Using options in %s." % options_file) + else: + print("\nOptions file %s" % options_file) + print("is outdated! Please update the file by examining one of the TEMPLATE") + print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) + Exit(1) + +# Generate help text (scons -h) +Help(vars.GenerateHelpText(env)) + +# Check for superfluous options +if len(vars.UnknownVariables())>0: + for k in vars.UnknownVariables(): + print("Unknown option '%s'" % k) + Exit(1) + +#################### Make sure install directories exist ##################### + +env['BUILD_DIR']=env['build_dir'] +prefix=Dir(env['prefix']).abspath +env['incinstall'] = os.path.join(prefix, 'include') +env['bininstall'] = os.path.join(prefix, 'bin') +env['libinstall'] = os.path.join(prefix, 'lib') +env['pyinstall'] = os.path.join(prefix, 'esys') +if not os.path.isdir(env['bininstall']): + os.makedirs(env['bininstall']) +if not os.path.isdir(env['libinstall']): + os.makedirs(env['libinstall']) +if not os.path.isdir(env['pyinstall']): + os.makedirs(env['pyinstall']) + +env.Append(CPPPATH = [env['incinstall']]) +env.Append(LIBPATH = [env['libinstall']]) + +################# Fill in compiler options if not set above ################## + +if env['cc'] != 'default': env['CC']=env['cc'] +if env['cxx'] != 'default': env['CXX']=env['cxx'] + +# version >=9 of intel C++ compiler requires use of icpc to link in C++ +# runtimes (icc does not) +if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc': + env['LINK'] = env['CXX'] + +# default compiler/linker options +cc_flags = '' +cc_optim = '' +cc_debug = '' +omp_flags = '' +omp_ldflags = '' +fatalwarning = '' # switch to turn warnings into errors +sysheaderopt = '' # how to indicate that a header is a system header + +# env['CC'] might be a full path +cc_name=os.path.basename(env['CC']) + +if cc_name == 'icc': + # Intel compiler + cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" + cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" + cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" + omp_flags = "-openmp -openmp_report0" + omp_ldflags = "-openmp -openmp_report0 -lguide -lpthread" + fatalwarning = "-Werror" +elif cc_name[:3] == 'gcc': + # GNU C on any system + cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" + cc_optim = "-O3" + cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" + omp_flags = "-fopenmp" + omp_ldflags = "-fopenmp" + fatalwarning = "-Werror" + sysheaderopt = "-isystem" +elif cc_name == 'cl': + # Microsoft Visual C on Windows + cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF" + cc_optim = "/O2 /Op /W3" + cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK" + fatalwarning = "/WX" +elif cc_name == 'icl': + # Intel C on Windows + cc_flags = '/EHsc /GR /MD' + cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' + cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' + omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' + omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' + +# set defaults if not otherwise specified +if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags +if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim +if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug +if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags +if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags +if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) +if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) +if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) + +# set up the autolazy values +if env['forcelazy'] == 'on': + env.Append(CPPDEFINES=['FAUTOLAZYON']) +elif env['forcelazy'] == 'off': + env.Append(CPPDEFINES=['FAUTOLAZYOFF']) + +# set up the collective resolve values +if env['forcecollres'] == 'on': + env.Append(CPPDEFINES=['FRESCOLLECTON']) +elif env['forcecollres'] == 'off': + env.Append(CPPDEFINES=['FRESCOLLECTOFF']) + +# allow non-standard C if requested +if env['iknowwhatimdoing']: + env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) + +# Disable OpenMP if no flags provided +if env['openmp'] and env['omp_flags'] == '': + print("OpenMP requested but no flags provided - disabling OpenMP!") + env['openmp'] = False + +if env['openmp']: + env.Append(CCFLAGS = env['omp_flags']) + if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) +else: + env['omp_flags']='' + env['omp_ldflags']='' + +# add debug/non-debug compiler flags +if env['debug']: + env.Append(CCFLAGS = env['cc_debug']) +else: + env.Append(CCFLAGS = env['cc_optim']) -try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY'] -except KeyError: pass +# always add cc_flags +env.Append(CCFLAGS = env['cc_flags']) -try: env['ENV']['HOME'] = os.environ['HOME'] -except KeyError: pass +# add system libraries +env.AppendUnique(LIBS = env['sys_libs']) -# Configure for test suite -env.PrependENVPath('PYTHONPATH', prefix) -env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall']) - -env['ENV']['ESCRIPT_ROOT'] = prefix - -############ Set up paths for Configure() ###################### - -# Make a copy of an environment -# Use env.Clone if available, but fall back on env.Copy for older version of scons -def clone_env(env): - if 'Clone' in dir(env): return env.Clone() # scons-0.98 - else: return env.Copy() # scons-0.96 - -# Add cc option -I/trunk/include -env.Append(CPPPATH = [Dir('include')]) - -# Add cc option -L/trunk/lib -env.Append(LIBPATH = [Dir(env['libinstall'])]) - -if env['cc_extra'] != '': env.Append(CCFLAGS = env['cc_extra']) -if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) +# Get the global Subversion revision number for the getVersion() method +try: + global_revision = os.popen('svnversion -n .').read() + global_revision = re.sub(':.*', '', global_revision) + global_revision = re.sub('[^0-9]', '', global_revision) + if global_revision == '': global_revision='-2' +except: + global_revision = '-1' +env['svn_revision']=global_revision +env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) + +if IS_WINDOWS: + if not env['share_esysutils']: + env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) + if not env['share_paso']: + env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) + +###################### Copy required environment vars ######################## + +# Windows doesn't use LD_LIBRARY_PATH but PATH instead +if IS_WINDOWS: + LD_LIBRARY_PATH_KEY='PATH' + env['ENV']['LD_LIBRARY_PATH']='' +else: + LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' + +# the following env variables are exported for the unit tests + +for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': + try: + env['ENV'][key] = os.environ[key] + except KeyError: + env['ENV'][key] = 1 + +env_export=env['env_export'] +env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','TMPDIR','TEMP','TMP']) + +for key in set(env_export): + try: + env['ENV'][key] = os.environ[key] + except KeyError: + pass -if env['usepedantic']: env.Append(CCFLAGS = pedantic) +try: + env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) +except KeyError: + pass + +# these shouldn't be needed +#for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': +# try: +# env['ENV'][key] = os.environ[key] +# except KeyError: +# pass -# MS Windows -if IS_WINDOWS_PLATFORM: - env.PrependENVPath('PATH', [env['boost_lib_path']]) - env.PrependENVPath('PATH', [env['libinstall']]) - if not env['share_esysUtils'] : - env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) - if not env['share_paso'] : - env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) +try: + env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] +except KeyError: + pass - if env['usenetcdf']: - env.PrependENVPath('PATH', [env['netCDF_lib_path']]) +######################## Add some custom builders ############################ -env.Append(ARFLAGS = env['ar_flags']) +py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) +env.Append(BUILDERS = {'PyCompile' : py_builder}); -# Get the global Subversion revision number for getVersion() method -try: - global_revision = os.popen("svnversion -n .").read() - global_revision = re.sub(":.*", "", global_revision) - global_revision = re.sub("[^0-9]", "", global_revision) -except: - global_revision="-1" -if global_revision == "": global_revision="-2" -env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision]) +runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) +env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); -############ numarray (required) ############################### +runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) +env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); -try: - from numarray import identity -except ImportError: - print "Cannot import numarray, you need to set your PYTHONPATH" - sys.exit(1) +epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) +env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); -############ C compiler (required) ############################# +############################ Dependency checks ############################### -# Create a Configure() environment for checking existence of required libraries and headers -conf = Configure(clone_env(env)) +# Create a Configure() environment to check for compilers and python +conf = Configure(env.Clone()) -# Test that the compiler is working -if not conf.CheckFunc('printf'): - print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC']) - sys.exit(1) +######## Test that the compilers work + +if 'CheckCC' in dir(conf): # exists since scons 1.1.0 + if not conf.CheckCC(): + print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) + Exit(1) + if not conf.CheckCXX(): + print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) + Exit(1) +else: + if not conf.CheckFunc('printf', language='c'): + print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) + Exit(1) + if not conf.CheckFunc('printf', language='c++'): + print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) + Exit(1) if conf.CheckFunc('gethostname'): - conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) - -############ python libraries (required) ####################### + conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) +######## Python headers & library (required) -if not sysheaderopt =="": - conf.env.Append(CCFLAGS=sysheaderopt+env['python_path']) +python_inc_path=sysconfig.get_python_inc() +if IS_WINDOWS: + python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs') +elif env['PLATFORM']=='darwin': + python_lib_path=sysconfig.get_config_var('LIBPL') +else: + python_lib_path=sysconfig.get_config_var('LIBDIR') +#python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux +if IS_WINDOWS: + python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])] else: - conf.env.AppendUnique(CPPPATH = [env['python_path']]) + python_libs=['python'+sysconfig.get_python_version()] -conf.env.AppendUnique(LIBPATH = [env['python_lib_path']]) -conf.env.AppendUnique(LIBS = [env['python_libs']]) +if sysheaderopt == '': + conf.env.AppendUnique(CPPPATH = [python_inc_path]) +else: + conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path]) -conf.env.PrependENVPath('LD_LIBRARY_PATH', env['python_lib_path']) # The wrapper script needs to find these libs +conf.env.AppendUnique(LIBPATH = [python_lib_path]) +conf.env.AppendUnique(LIBS = python_libs) +# The wrapper script needs to find the libs +conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path) if not conf.CheckCHeader('Python.h'): - print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path']) - sys.exit(1) -if not conf.CheckFunc('Py_Main'): - print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path']) - sys.exit(1) - -############ boost (required) ################################## - -if not sysheaderopt =="": - conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path']+'boost') -else: - conf.env.AppendUnique(CPPPATH = [env['boost_path']]) - -conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']]) -conf.env.AppendUnique(LIBS = [env['boost_libs']]) - -conf.env.PrependENVPath('LD_LIBRARY_PATH', env['boost_lib_path']) # The wrapper script needs to find these libs - -if not conf.CheckCXXHeader('boost/python.hpp'): - print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path']) - sys.exit(1) -if not conf.CheckFunc('PyObject_SetAttr'): - print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path']) - sys.exit(1) + print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path)) + Exit(1) +if not conf.CheckFunc('Py_Exit'): + print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path)) + Exit(1) # Commit changes to environment env = conf.Finish() -############ VTK (optional) #################################### - -if env['usevtk']: - try: - import vtk - env['usevtk'] = 1 - except ImportError: - env['usevtk'] = 0 +######## boost (required) -# Add VTK to environment env if it was found -if env['usevtk']: - env.Append(CPPDEFINES = ['USE_VTK']) +boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++') +if sysheaderopt == '': + env.AppendUnique(CPPPATH = [boost_inc_path]) +else: + # This is required because we can't -isystem /usr/include since it breaks + # std includes + if os.path.normpath(boost_inc_path) == '/usr/include': + conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')]) + else: + env.Append(CCFLAGS=[sysheaderopt, boost_inc_path]) + +env.AppendUnique(LIBPATH = [boost_lib_path]) +env.AppendUnique(LIBS = env['boost_libs']) +env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path) -############ NetCDF (optional) ################################# +######## numpy (required) -conf = Configure(clone_env(env)) - -if env['usenetcdf']: - conf.env.AppendUnique(CPPPATH = [env['netCDF_path']]) - conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']]) - conf.env.AppendUnique(LIBS = [env['netCDF_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['netCDF_lib_path']) # The wrapper script needs to find these libs - -if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0 -if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0 - -# Add NetCDF to environment env if it was found -if env['usenetcdf']: - env = conf.Finish() - env.Append(CPPDEFINES = ['USE_NETCDF']) -else: - conf.Finish() - -############ PAPI (optional) ################################### - -# Start a new configure environment that reflects what we've already found -conf = Configure(clone_env(env)) - -if env['usepapi']: - conf.env.AppendUnique(CPPPATH = [env['papi_path']]) - conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']]) - conf.env.AppendUnique(LIBS = [env['papi_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['papi_lib_path']) # The wrapper script needs to find these libs - -if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0 -if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0 - -# Add PAPI to environment env if it was found -if env['usepapi']: - env = conf.Finish() - env.Append(CPPDEFINES = ['BLOCKPAPI']) -else: - conf.Finish() - -############ MKL (optional) #################################### - -# Start a new configure environment that reflects what we've already found -conf = Configure(clone_env(env)) - -if env['usemkl']: - conf.env.AppendUnique(CPPPATH = [env['mkl_path']]) - conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']]) - conf.env.AppendUnique(LIBS = [env['mkl_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mkl_lib_path']) # The wrapper script needs to find these libs +try: + from numpy import identity +except ImportError: + print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) + Exit(1) -if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0 -if env['usemkl'] and not conf.CheckFunc('pardiso_'): env['usemkl'] = 0 +######## VTK (optional) -# Add MKL to environment env if it was found -if env['usemkl']: - env = conf.Finish() - env.Append(CPPDEFINES = ['MKL']) -else: - conf.Finish() +if env['pyvisi']: + try: + import vtk + env['pyvisi'] = True + except ImportError: + print("Cannot import vtk, disabling pyvisi.") + env['pyvisi'] = False + +######## netCDF (optional) + +netcdf_inc_path='' +netcdf_lib_path='' +if env['netcdf']: + netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++') + env.AppendUnique(CPPPATH = [netcdf_inc_path]) + env.AppendUnique(LIBPATH = [netcdf_lib_path]) + env.AppendUnique(LIBS = env['netcdf_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path) + env.Append(CPPDEFINES = ['USE_NETCDF']) + +######## PAPI (optional) + +papi_inc_path='' +papi_lib_path='' +if env['papi']: + papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c') + env.AppendUnique(CPPPATH = [papi_inc_path]) + env.AppendUnique(LIBPATH = [papi_lib_path]) + env.AppendUnique(LIBS = env['papi_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path) + env.Append(CPPDEFINES = ['BLOCKPAPI']) + +######## MKL (optional) + +mkl_inc_path='' +mkl_lib_path='' +if env['mkl']: + mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c') + env.AppendUnique(CPPPATH = [mkl_inc_path]) + env.AppendUnique(LIBPATH = [mkl_lib_path]) + env.AppendUnique(LIBS = env['mkl_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path) + env.Append(CPPDEFINES = ['MKL']) + +######## UMFPACK (optional) + +umfpack_inc_path='' +umfpack_lib_path='' +if env['umfpack']: + umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c') + env.AppendUnique(CPPPATH = [umfpack_inc_path]) + env.AppendUnique(LIBPATH = [umfpack_lib_path]) + env.AppendUnique(LIBS = env['umfpack_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path) + env.Append(CPPDEFINES = ['UMFPACK']) + +######## LAPACK (optional) + +if env['lapack']=='mkl' and not env['mkl']: + print("mkl_lapack requires MKL!") + Exit(1) + +env['uselapack'] = env['lapack']!='none' +lapack_inc_path='' +lapack_lib_path='' +if env['uselapack']: + header='clapack.h' + if env['lapack']=='mkl': + env.AppendUnique(CPPDEFINES = ['MKL_LAPACK']) + header='mkl_lapack.h' + lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c') + env.AppendUnique(CPPPATH = [lapack_inc_path]) + env.AppendUnique(LIBPATH = [lapack_lib_path]) + env.AppendUnique(LIBS = env['lapack_libs']) + env.Append(CPPDEFINES = ['USE_LAPACK']) + +######## Silo (optional) + +silo_inc_path='' +silo_lib_path='' +if env['silo']: + silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c') + env.AppendUnique(CPPPATH = [silo_inc_path]) + env.AppendUnique(LIBPATH = [silo_lib_path]) + # Note that we do not add the libs since they are only needed for the + # weipa library and tools. + #env.AppendUnique(LIBS = [env['silo_libs']]) + +######## VSL random numbers (optional) +if env['vsl_random']: + env.Append(CPPDEFINES = ['MKLRANDOM']) + +######## VisIt (optional) + +visit_inc_path='' +visit_lib_path='' +if env['visit']: + visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c') + env.AppendUnique(CPPPATH = [visit_inc_path]) + env.AppendUnique(LIBPATH = [visit_lib_path]) + +######## MPI (optional) + +env['usempi'] = env['mpi']!='none' +mpi_inc_path='' +mpi_lib_path='' +if env['usempi']: + mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c') + env.AppendUnique(CPPPATH = [mpi_inc_path]) + env.AppendUnique(LIBPATH = [mpi_lib_path]) + env.AppendUnique(LIBS = env['mpi_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path) + env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK']) + # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! + # On the other hand MPT and OpenMPI don't define the latter so we have to + # do that here + if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']: + env.Append(CPPDEFINES = ['MPI_INCLUDED']) + +######## BOOMERAMG (optional) + +#if env['boomeramg'] and env['mpi'] == 'none': +# print("boomeramg requires mpi!") +# Exit(1) +if env['mpi'] == 'none': env['boomeramg'] = False + +boomeramg_inc_path='' +boomeramg_lib_path='' +if env['boomeramg']: + boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c') + env.AppendUnique(CPPPATH = [boomeramg_inc_path]) + env.AppendUnique(LIBPATH = [boomeramg_lib_path]) + # Note that we do not add the libs since they are only needed for the + # weipa library and tools. + env.AppendUnique(LIBS = env['boomeramg_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path) + env.Append(CPPDEFINES = ['BOOMERAMG']) + +######## ParMETIS (optional) + +if not env['usempi']: env['parmetis'] = False + +parmetis_inc_path='' +parmetis_lib_path='' +if env['parmetis']: + parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c') + env.AppendUnique(CPPPATH = [parmetis_inc_path]) + env.AppendUnique(LIBPATH = [parmetis_lib_path]) + env.AppendUnique(LIBS = env['parmetis_libs']) + env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path) + env.Append(CPPDEFINES = ['USE_PARMETIS']) -############ UMFPACK (optional) ################################ +######## gmsh (optional, for tests) -# Start a new configure environment that reflects what we've already found -conf = Configure(clone_env(env)) +try: + import subprocess + p=subprocess.Popen(['gmsh', '-info'], stderr=subprocess.PIPE) + _,e=p.communicate() + if e.split().count("MPI"): + env['gmsh']='m' + else: + env['gmsh']='s' +except OSError: + env['gmsh']=False -if env['useumfpack']: - conf.env.AppendUnique(CPPPATH = [env['ufc_path']]) - conf.env.AppendUnique(CPPPATH = [env['umf_path']]) - conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']]) - conf.env.AppendUnique(LIBS = [env['umf_libs']]) - conf.env.AppendUnique(CPPPATH = [env['amd_path']]) - conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']]) - conf.env.AppendUnique(LIBS = [env['amd_libs']]) - conf.env.AppendUnique(CPPPATH = [env['blas_path']]) - conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']]) - conf.env.AppendUnique(LIBS = [env['blas_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['umf_lib_path']) # The wrapper script needs to find these libs - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['amd_lib_path']) # The wrapper script needs to find these libs - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['blas_lib_path']) # The wrapper script needs to find these libs +######################## Summarize our environment ########################### -if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0 -if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0 -if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73? +# keep some of our install paths first in the list for the unit tests +env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) +env.PrependENVPath('PYTHONPATH', prefix) +env['ENV']['ESCRIPT_ROOT'] = prefix -# Add UMFPACK to environment env if it was found -if env['useumfpack']: - env = conf.Finish() - env.Append(CPPDEFINES = ['UMFPACK']) +if not env['verbose']: + env['CCCOMSTR'] = "Compiling $TARGET" + env['CXXCOMSTR'] = "Compiling $TARGET" + env['SHCCCOMSTR'] = "Compiling $TARGET" + env['SHCXXCOMSTR'] = "Compiling $TARGET" + env['ARCOMSTR'] = "Linking $TARGET" + env['LINKCOMSTR'] = "Linking $TARGET" + env['SHLINKCOMSTR'] = "Linking $TARGET" + env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" + env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET" + env['MAKEINDEXCOMSTR'] = "Generating index $TARGET" + env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" + #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) + +print("") +print("*** Config Summary (see config.log and lib/buildvars for details) ***") +print("Escript/Finley revision %s"%global_revision) +print(" Install prefix: %s"%env['prefix']) +print(" Python: %s"%sysconfig.PREFIX) +print(" boost: %s"%env['boost_prefix']) +print(" numpy: YES") +if env['usempi']: + print(" MPI: YES (flavour: %s)"%env['mpi']) else: - conf.Finish() - -############ Silo (optional) ################################### - -if env['usesilo']: - conf = Configure(clone_env(env)) - conf.env.AppendUnique(CPPPATH = [env['silo_path']]) - conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']]) - conf.env.AppendUnique(LIBS = [env['silo_libs']]) - if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0 - if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0 - conf.Finish() - -# Add the path to Silo to environment env if it was found. -# Note that we do not add the libs since they are only needed for the -# escriptreader library and tools. -if env['usesilo']: - env.AppendUnique(CPPPATH = [env['silo_path']]) - env.AppendUnique(LIBPATH = [env['silo_lib_path']]) - env.Append(CPPDEFINES = ['HAVE_SILO']) - -############ Add the compiler flags ############################ - -# Enable debug by choosing either cc_debug or cc_optim -if env['usedebug']: - env.Append(CCFLAGS = env['cc_debug']) - env.Append(CCFLAGS = env['omp_debug']) + print(" MPI: DISABLED") +if env['uselapack']: + print(" LAPACK: YES (flavour: %s)"%env['lapack']) +else: + print(" LAPACK: DISABLED") +d_list=[] +e_list=[] +for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit': + if env[i]: e_list.append(i) + else: d_list.append(i) +for i in e_list: + print("%16s: YES"%i) +for i in d_list: + print("%16s: DISABLED"%i) +if env['gmsh']=='m': + print(" gmsh: FOUND, MPI-ENABLED") +elif env['gmsh']=='s': + print(" gmsh: FOUND") +else: + print(" gmsh: NOT FOUND") +print(" vsl_random: %s"%env['vsl_random']) + +if ((fatalwarning != '') and (env['werror'])): + print(" Treating warnings as errors") else: - env.Append(CCFLAGS = env['cc_optim']) - env.Append(CCFLAGS = env['omp_optim']) - -# Always use cc_flags -env.Append(CCFLAGS = env['cc_flags']) -env.Append(LIBS = [env['omp_libs']]) - - -############ Add some custom builders ########################## - -py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) -env.Append(BUILDERS = {'PyCompile' : py_builder}); - -runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) -env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); - -runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) -env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); - -############ MPI (optional) #################################### - -# Create a modified environment for MPI programs (identical to env if usempi=no) -env_mpi = clone_env(env) - -# Start a new configure environment that reflects what we've already found -conf = Configure(clone_env(env_mpi)) - -if env_mpi['usempi']: - conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']]) - conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']]) - conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mpi_lib_path']) # The wrapper script needs to find these libs - -if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0 -if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0 - -# Add MPI to environment env_mpi if it was found -if env_mpi['usempi']: - env_mpi = conf.Finish() - env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']]) -else: - conf.Finish() - -env['usempi'] = env_mpi['usempi'] - -############ ParMETIS (optional) ############################### - -# Start a new configure environment that reflects what we've already found -conf = Configure(clone_env(env_mpi)) - -if not env_mpi['usempi']: env_mpi['useparmetis'] = 0 - -if env_mpi['useparmetis']: - conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']]) - conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']]) - conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']]) - conf.env.PrependENVPath('LD_LIBRARY_PATH', env['parmetis_lib_path']) # The wrapper script needs to find these libs - -if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0 -if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0 - -# Add ParMETIS to environment env_mpi if it was found -if env_mpi['useparmetis']: - env_mpi = conf.Finish() - env_mpi.Append(CPPDEFINES = ['USE_PARMETIS']) -else: - conf.Finish() - -env['useparmetis'] = env_mpi['useparmetis'] - -############ Now we switch on Warnings as errors ############### - -#this needs to be done after configuration because the scons test files have warnings in them - -if ((fatalwarning != "") and (env['usewarnings'])): - env.Append(CCFLAGS = fatalwarning) - env_mpi.Append(CCFLAGS = fatalwarning) - -############ Summarize our environment ######################### - -print "" -print "Summary of configuration (see ./config.log for information)" -print " Using python libraries" -print " Using numarray" -print " Using boost" -if env['usenetcdf']: print " Using NetCDF" -else: print " Not using NetCDF" -if env['usevtk']: print " Using VTK" -else: print " Not using VTK" -if env['usemkl']: print " Using MKL" -else: print " Not using MKL" -if env['useumfpack']: print " Using UMFPACK" -else: print " Not using UMFPACK" -if env['usesilo']: print " Using Silo" -else: print " Not using Silo" -if env['useopenmp']: print " Using OpenMP" -else: print " Not using OpenMP" -if env['usempi']: print " Using MPI" -else: print " Not using MPI" -if env['useparmetis']: print " Using ParMETIS" -else: print " Not using ParMETIS (requires MPI)" -if env['usepapi']: print " Using PAPI" -else: print " Not using PAPI" -if env['usedebug']: print " Compiling for debug" -else: print " Not compiling for debug" -print " Installing in", prefix -if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors" -else: print " Not treating warnings as errors" -print "" - -############ Delete option-dependent files ##################### - -Execute(Delete(env['libinstall'] + "/Compiled.with.debug")) -Execute(Delete(env['libinstall'] + "/Compiled.with.mpi")) -Execute(Delete(env['libinstall'] + "/Compiled.with.openmp")) -if not env['usempi']: Execute(Delete(env['libinstall'] + "/pythonMPI")) - + print(" NOT treating warnings as errors") +print("") -############ Build the subdirectories ########################## +####################### Configure the subdirectories ######################### from grouptest import * TestGroups=[] -Export( - ["env", - "env_mpi", - "clone_env", - "IS_WINDOWS_PLATFORM", - "TestGroups" - ] - ) - -env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) -env.SConscript(dirs = ['tools/libescriptreader/src'], build_dir='build/$PLATFORM/tools/libescriptreader', duplicate=0) -env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) -env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) -env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) -env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0) -env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0) -env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0) -env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0) -env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) -env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) -env.SConscript(dirs = ['scripts'], build_dir='build/$PLATFORM/scripts', duplicate=0) -env.SConscript(dirs = ['paso/profiling'], build_dir='build/$PLATFORM/paso/profiling', duplicate=0) - +# keep an environment without warnings-as-errors +dodgy_env=env.Clone() -############ Remember what optimizations we used ############### +# now add warnings-as-errors flags. This needs to be done after configuration +# because the scons test files have warnings in them +if ((fatalwarning != '') and (env['werror'])): + env.Append(CCFLAGS = fatalwarning) -remember_list = [] +Export( + ['env', + 'dodgy_env', + 'IS_WINDOWS', + 'TestGroups' + ] +) -if env['usedebug']: - remember_list += env.Command(env['libinstall'] + "/Compiled.with.debug", None, Touch('$TARGET')) +env.SConscript(dirs = ['tools/CppUnitTest/src'], variant_dir='$BUILD_DIR/$PLATFORM/tools/CppUnitTest', duplicate=0) +env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0) +env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0) +env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0) +env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0) +env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0) +env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0) +env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0) +env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0) +env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0) +env.SConscript(dirs = ['pyvisi/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pyvisi', duplicate=0) +env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0) +env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0) +env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0) + +######################## Populate the buildvars file ######################### + +# remove obsolete file +if not env['usempi']: + Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) + Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) + +# Try to extract the boost version from version.hpp +boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp')) +boostversion='unknown' +try: + for line in boosthpp: + ver=re.match(r'#define BOOST_VERSION (\d+)',line) + if ver: + boostversion=ver.group(1) +except StopIteration: + pass +boosthpp.close() + +buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w') +buildvars.write("svn_revision="+str(global_revision)+"\n") +buildvars.write("prefix="+prefix+"\n") +buildvars.write("cc="+env['CC']+"\n") +buildvars.write("cxx="+env['CXX']+"\n") +buildvars.write("python="+sys.executable+"\n") +buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n") +buildvars.write("boost_inc_path="+boost_inc_path+"\n") +buildvars.write("boost_lib_path="+boost_lib_path+"\n") +buildvars.write("boost_version="+boostversion+"\n") +buildvars.write("debug=%d\n"%int(env['debug'])) +buildvars.write("openmp=%d\n"%int(env['openmp'])) +buildvars.write("mpi=%s\n"%env['mpi']) +buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path) +buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path) +buildvars.write("lapack=%s\n"%env['lapack']) +buildvars.write("pyvisi=%d\n"%env['pyvisi']) +buildvars.write("vsl_random=%d\n"%int(env['vsl_random'])) +for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit': + buildvars.write("%s=%d\n"%(i, int(env[i]))) + if env[i]: + buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path'))) + buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path'))) +buildvars.close() -if env['usempi']: - remember_list += env.Command(env['libinstall'] + "/Compiled.with.mpi", None, Touch('$TARGET')) +################### Targets to build and install libraries ################### -if env['omp_optim'] != '': - remember_list += env.Command(env['libinstall'] + "/Compiled.with.openmp", None, Touch('$TARGET')) +target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) +env.Alias('target_init', [target_init]) -env.Alias('remember_options', remember_list) +# The headers have to be installed prior to build in order to satisfy +# #include +env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib']) +env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib']) -############ Targets to build and install libraries ############ +env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib']) +env.Alias('install_paso', ['build_paso', 'install_paso_lib']) -target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) -env.Alias('target_init', [target_init]) +env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib']) +env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py']) -# The headers have to be installed prior to build in order to satisfy #include -env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a']) -env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a']) +env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib']) +env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py']) -env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a']) -env.Alias('install_paso', ['build_paso', 'target_install_paso_a']) +env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib']) +env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py']) -env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so']) -env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py']) +env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib']) +env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py']) -env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so']) -env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py']) +env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib']) +env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib']) -# Now gather all the above into a couple easy targets: build_all and install_all +# Now gather all the above into some easy targets: build_all and install_all build_all_list = [] build_all_list += ['build_esysUtils'] build_all_list += ['build_paso'] build_all_list += ['build_escript'] +build_all_list += ['build_dudley'] build_all_list += ['build_finley'] -if env['usempi']: build_all_list += ['target_pythonMPI_exe'] -if not IS_WINDOWS_PLATFORM: build_all_list += ['target_finley_wrapper'] -if env['usesilo']: build_all_list += ['target_escript2silo'] +build_all_list += ['build_weipa'] +if not IS_WINDOWS: build_all_list += ['build_escriptreader'] +if env['usempi']: build_all_list += ['build_pythonMPI'] +build_all_list += ['build_escriptconvert'] env.Alias('build_all', build_all_list) install_all_list = [] @@ -701,39 +793,50 @@ install_all_list += ['install_esysUtils'] install_all_list += ['install_paso'] install_all_list += ['install_escript'] +install_all_list += ['install_dudley'] install_all_list += ['install_finley'] -install_all_list += ['target_install_pyvisi_py'] -install_all_list += ['target_install_modellib_py'] -install_all_list += ['target_install_pycad_py'] -if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] -if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_finley_wrapper'] -if env['usesilo']: install_all_list += ['target_install_escript2silo'] -install_all_list += ['remember_options'] +install_all_list += ['install_weipa'] +if not IS_WINDOWS: install_all_list += ['install_escriptreader'] +install_all_list += ['install_pyvisi_py'] +install_all_list += ['install_modellib_py'] +install_all_list += ['install_pycad_py'] +if env['usempi']: install_all_list += ['install_pythonMPI'] +install_all_list += ['install_escriptconvert'] env.Alias('install_all', install_all_list) # Default target is install env.Default('install_all') -############ Targets to build and run the test suite ########### +################## Targets to build and run the test suite ################### -env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a']) -env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) -env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) -env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) - -############ Targets to build the documentation ################ - -env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html']) - -if not IS_WINDOWS_PLATFORM: - try: - utest=open("utest.sh","w") - utest.write(GroupTest.makeHeader()) - for tests in TestGroups: - utest.write(tests.makeString()) - utest.close() - print "utest.sh written" - except IOError: - print "Error attempting to write unittests file." - sys.exit(1) +env.Alias('build_cppunittest', ['install_cppunittest_headers', 'build_cppunittest_lib']) +env.Alias('install_cppunittest', ['build_cppunittest', 'install_cppunittest_lib']) +env.Alias('run_tests', ['install_all', 'install_cppunittest_lib']) +env.Alias('all_tests', ['install_all', 'install_cppunittest_lib', 'run_tests', 'py_tests']) +env.Alias('build_full',['install_all','build_tests','build_py_tests']) +env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests') + +##################### Targets to build the documentation ##################### + +env.Alias('api_epydoc','install_all') +env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf']) +env.Alias('release_prep', ['docs', 'install_all']) + +if not IS_WINDOWS: + try: + utest=open('utest.sh','w') + utest.write(GroupTest.makeHeader(env['PLATFORM'])) + for tests in TestGroups: + utest.write(tests.makeString()) + utest.close() + Execute(Chmod('utest.sh', 0755)) + print("Generated utest.sh.") + except IOError: + print("Error attempting to write unittests file.") + Exit(1) + + # Make sure that the escript wrapper is in place + if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')): + print("Copying escript wrapper.") + Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))