style: bring packages in compliance with pep8-naming

This commit is contained in:
Todd Gamblin 2018-06-27 10:27:03 -07:00
parent 20e4038a72
commit d0a808944e
27 changed files with 308 additions and 312 deletions

View file

@ -217,10 +217,10 @@ def bjam_python_line(self, spec):
spec['python'].libs[0]
)
def determine_bootstrap_options(self, spec, withLibs, options):
boostToolsetId = self.determine_toolset(spec)
options.append('--with-toolset=%s' % boostToolsetId)
options.append("--with-libraries=%s" % ','.join(withLibs))
def determine_bootstrap_options(self, spec, with_libs, options):
boost_toolset_id = self.determine_toolset(spec)
options.append('--with-toolset=%s' % boost_toolset_id)
options.append("--with-libraries=%s" % ','.join(with_libs))
if '+python' in spec:
options.append('--with-python=%s' % spec['python'].command.path)
@ -234,7 +234,7 @@ def determine_bootstrap_options(self, spec, withLibs, options):
# error: duplicate initialization of intel-linux with the following parameters: # noqa
# error: version = <unspecified>
# error: previous initialization at ./user-config.jam:1
f.write("using {0} : : {1} ;\n".format(boostToolsetId,
f.write("using {0} : : {1} ;\n".format(boost_toolset_id,
spack_cxx))
if '+mpi' in spec:
@ -292,16 +292,16 @@ def determine_b2_options(self, spec, options):
'-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include,
'-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib])
linkTypes = ['static']
link_types = ['static']
if '+shared' in spec:
linkTypes.append('shared')
link_types.append('shared')
threadingOpts = []
threading_opts = []
if '+multithreaded' in spec:
threadingOpts.append('multi')
threading_opts.append('multi')
if '+singlethreaded' in spec:
threadingOpts.append('single')
if not threadingOpts:
threading_opts.append('single')
if not threading_opts:
raise RuntimeError("At least one of {singlethreaded, " +
"multithreaded} must be enabled")
@ -310,13 +310,13 @@ def determine_b2_options(self, spec, options):
elif '+versionedlayout' in spec:
layout = 'versioned'
else:
if len(threadingOpts) > 1:
if len(threading_opts) > 1:
raise RuntimeError("Cannot build both single and " +
"multi-threaded targets with system layout")
layout = 'system'
options.extend([
'link=%s' % ','.join(linkTypes),
'link=%s' % ','.join(link_types),
'--layout=%s' % layout
])
@ -352,7 +352,7 @@ def determine_b2_options(self, spec, options):
if cxxflags:
options.append('cxxflags="{0}"'.format(' '.join(cxxflags)))
return threadingOpts
return threading_opts
def add_buildopt_symlinks(self, prefix):
with working_dir(prefix.lib):
@ -371,11 +371,11 @@ def install(self, spec, prefix):
force_symlink('/usr/bin/libtool', join_path(newdir, 'libtool'))
env['PATH'] = newdir + ':' + env['PATH']
withLibs = list()
with_libs = list()
for lib in Boost.all_libs:
if "+{0}".format(lib) in spec:
withLibs.append(lib)
if not withLibs:
with_libs.append(lib)
if not with_libs:
# if no libraries are specified for compilation, then you dont have
# to configure/build anything, just copy over to the prefix
# directory.
@ -387,19 +387,19 @@ def install(self, spec, prefix):
# Remove libraries that the release version does not support
if not spec.satisfies('@1.54.0:'):
withLibs.remove('log')
with_libs.remove('log')
if not spec.satisfies('@1.53.0:'):
withLibs.remove('atomic')
with_libs.remove('atomic')
if not spec.satisfies('@1.48.0:'):
withLibs.remove('locale')
with_libs.remove('locale')
if not spec.satisfies('@1.47.0:'):
withLibs.remove('chrono')
with_libs.remove('chrono')
if not spec.satisfies('@1.43.0:'):
withLibs.remove('random')
with_libs.remove('random')
if not spec.satisfies('@1.39.0:'):
withLibs.remove('exception')
with_libs.remove('exception')
if '+graph' in spec and '+mpi' in spec:
withLibs.append('graph_parallel')
with_libs.append('graph_parallel')
# to make Boost find the user-config.jam
env['BOOST_BUILD_PATH'] = self.stage.source_path
@ -407,7 +407,7 @@ def install(self, spec, prefix):
bootstrap = Executable('./bootstrap.sh')
bootstrap_options = ['--prefix=%s' % prefix]
self.determine_bootstrap_options(spec, withLibs, bootstrap_options)
self.determine_bootstrap_options(spec, with_libs, bootstrap_options)
bootstrap(*bootstrap_options)
@ -426,13 +426,13 @@ def install(self, spec, prefix):
self.stage.source_path, 'user-config.jam')
]
threadingOpts = self.determine_b2_options(spec, b2_options)
threading_opts = self.determine_b2_options(spec, b2_options)
b2('--clean')
# In theory it could be done on one call but it fails on
# Boost.MPI if the threading options are not separated.
for threadingOpt in threadingOpts:
for threadingOpt in threading_opts:
b2('install', 'threading=%s' % threadingOpt, *b2_options)
if '+multithreaded' in spec and '~taggedlayout' in spec:

View file

@ -150,46 +150,46 @@ class CbtfKrell(CMakePackage):
build_directory = 'build_cbtf_krell'
def set_RTOnly_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable the appropriate
def set_rt_only_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable the appropriate
# MPI implementations
RTOnlyOptions = []
RTOnlyOptions.append('-DRUNTIME_ONLY=true')
cmakeOptions.extend(RTOnlyOptions)
rt_only_options = []
rt_only_options.append('-DRUNTIME_ONLY=true')
cmake_options.extend(rt_only_options)
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable the appropriate
def set_mpi_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable the appropriate
# MPI implementations
MPIOptions = []
mpi_options = []
# openmpi
if spec.satisfies('+openmpi'):
MPIOptions.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
mpi_options.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
# mpich
if spec.satisfies('+mpich'):
MPIOptions.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
mpi_options.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
# mpich2
if spec.satisfies('+mpich2'):
MPIOptions.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
mpi_options.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
# mvapich
if spec.satisfies('+mvapich'):
MPIOptions.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
mpi_options.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
# mvapich2
if spec.satisfies('+mvapich2'):
MPIOptions.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
mpi_options.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
# mpt
if spec.satisfies('+mpt'):
MPIOptions.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
mpi_options.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
cmakeOptions.extend(MPIOptions)
cmake_options.extend(mpi_options)
def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable
def set_cray_login_node_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable
# the appropriate Cray login node libraries
CrayLoginNodeOptions = []
cray_login_node_options = []
rt_platform = "cray"
# How do we get the compute node (CNL) cbtf package
# install directory path. spec['cbtf'].prefix is the
@ -207,31 +207,31 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
be_dyn = spack.store.db.query_one('dyninst arch=cray-CNL-haswell')
be_mrnet = spack.store.db.query_one('mrnet arch=cray-CNL-haswell')
CrayLoginNodeOptions.append('-DCN_RUNTIME_PLATFORM=%s'
% rt_platform)
cray_login_node_options.append(
'-DCN_RUNTIME_PLATFORM=%s' % rt_platform)
# Use install directories as CMAKE args for the building
# of login cbtf-krell
CrayLoginNodeOptions.append('-DCBTF_CN_RUNTIME_DIR=%s'
% be_cbtf.prefix)
CrayLoginNodeOptions.append('-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
% be_cbtfk.prefix)
CrayLoginNodeOptions.append('-DPAPI_CN_RUNTIME_DIR=%s'
% be_papi.prefix)
CrayLoginNodeOptions.append('-DBOOST_CN_RUNTIME_DIR=%s'
% be_boost.prefix)
CrayLoginNodeOptions.append('-DLIBMONITOR_CN_RUNTIME_DIR=%s'
% be_mont.prefix)
CrayLoginNodeOptions.append('-DLIBUNWIND_CN_RUNTIME_DIR=%s'
% be_unw.prefix)
CrayLoginNodeOptions.append('-DXERCESC_CN_RUNTIME_DIR=%s'
% be_xer.prefix)
CrayLoginNodeOptions.append('-DDYNINST_CN_RUNTIME_DIR=%s'
% be_dyn.prefix)
CrayLoginNodeOptions.append('-DMRNET_CN_RUNTIME_DIR=%s'
% be_mrnet.prefix)
cray_login_node_options.append(
'-DCBTF_CN_RUNTIME_DIR=%s' % be_cbtf.prefix)
cray_login_node_options.append(
'-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % be_cbtfk.prefix)
cray_login_node_options.append(
'-DPAPI_CN_RUNTIME_DIR=%s' % be_papi.prefix)
cray_login_node_options.append(
'-DBOOST_CN_RUNTIME_DIR=%s' % be_boost.prefix)
cray_login_node_options.append(
'-DLIBMONITOR_CN_RUNTIME_DIR=%s' % be_mont.prefix)
cray_login_node_options.append(
'-DLIBUNWIND_CN_RUNTIME_DIR=%s' % be_unw.prefix)
cray_login_node_options.append(
'-DXERCESC_CN_RUNTIME_DIR=%s' % be_xer.prefix)
cray_login_node_options.append(
'-DDYNINST_CN_RUNTIME_DIR=%s' % be_dyn.prefix)
cray_login_node_options.append(
'-DMRNET_CN_RUNTIME_DIR=%s' % be_mrnet.prefix)
cmakeOptions.extend(CrayLoginNodeOptions)
cmake_options.extend(cray_login_node_options)
def cmake_args(self):
spec = self.spec
@ -256,14 +256,14 @@ def cmake_args(self):
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix]
if self.spec.satisfies('+runtime'):
self.set_RTOnly_cmakeOptions(spec, cmake_args)
self.set_rt_only_cmake_options(spec, cmake_args)
# Add any MPI implementations coming from variant settings
self.set_mpi_cmakeOptions(spec, cmake_args)
self.set_mpi_cmake_options(spec, cmake_args)
if self.spec.satisfies('+crayfe'):
# We need to build target/compute node components/libraries first
# then pass those libraries to the cbtf-krell login node build
self.set_CrayLoginNode_cmakeOptions(spec, cmake_args)
self.set_cray_login_node_cmake_options(spec, cmake_args)
return cmake_args

View file

@ -70,7 +70,7 @@ class Dataspaces(AutotoolsPackage):
depends_on('libtool', type='build')
depends_on('mpi', when='+mpi')
def autoreconf(spec, prefix, self):
def autoreconf(self, spec, prefix):
bash = which('bash')
bash('./autogen.sh')

View file

@ -70,7 +70,7 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
run_env.prepend_path('LD_LIBRARY_PATH', self.prefix)
def install(self, spec, prefix):
INSTALL = Executable('./INSTALL')
INSTALL()
install = Executable('./INSTALL')
install()
with working_dir('examples'):
install('dislin_d.h', prefix)

View file

@ -81,8 +81,8 @@ def build_targets(self):
math_includes += " -I" + spec['blas'].prefix.include
targets.append('SPACKBLASINCLUDES=' + math_includes)
# And BML
bmlLibDirs = spec['bml'].libs.directories[0]
targets.append('BML_PATH=' + bmlLibDirs)
bml_lib_dirs = spec['bml'].libs.directories[0]
targets.append('BML_PATH=' + bml_lib_dirs)
targets.append('--file=Makefile.vanilla')
return targets

View file

@ -46,44 +46,44 @@ class Gslib(Package):
conflicts('~mpi', when='+mpiio')
def install(self, spec, prefix):
srcDir = 'src'
libDir = 'lib'
src_dir = 'src'
lib_dir = 'lib'
libname = 'libgs.a'
if self.version == Version('1.0.1'):
makeFile = 'Makefile'
makefile = 'Makefile'
else:
makeFile = 'src/Makefile'
makefile = 'src/Makefile'
CC = self.compiler.cc
cc = self.compiler.cc
if '+mpiio' not in spec:
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makeFile)
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makefile)
if '+mpi' in spec:
CC = spec['mpi'].mpicc
cc = spec['mpi'].mpicc
else:
filter_file(r'MPI.*?=.*1', 'MPI = 0', makeFile)
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makeFile)
filter_file(r'MPI.*?=.*1', 'MPI = 0', makefile)
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makefile)
makeCmd = "CC=" + CC
make_cmd = "CC=" + cc
if '+blas' in spec:
filter_file(r'BLAS.*?=.*0', 'BLAS = 1', makeFile)
filter_file(r'BLAS.*?=.*0', 'BLAS = 1', makefile)
blas = spec['blas'].libs
ldFlags = blas.ld_flags
filter_file(r'\$\(LDFLAGS\)', ldFlags, makeFile)
ld_flags = blas.ld_flags
filter_file(r'\$\(LDFLAGS\)', ld_flags, makefile)
if self.version == Version('1.0.1'):
make(makeCmd)
make(make_cmd)
make('install')
install_tree(libDir, prefix.lib)
install_tree(lib_dir, prefix.lib)
elif self.version == Version('1.0.0'):
with working_dir(srcDir):
make(makeCmd)
with working_dir(src_dir):
make(make_cmd)
mkdir(prefix.lib)
install(libname, prefix.lib)
# Should only install the headers (this will be fixed in gslib on
# future releases).
install_tree(srcDir, prefix.include)
install_tree(src_dir, prefix.include)

View file

@ -58,9 +58,9 @@ class Lbann(CMakePackage):
when=('build_type=Debug' '@0.95:'))
depends_on('hydrogen +openmp_blas +shared +int64 build_type=Debug',
when=('build_type=Debug' '@:0.90'))
depends_on('hydrogen +openmp_blas +shared +int64 +cuda',
depends_on('hydrogen +openmp_blas +shared +int64 +cuda',
when=('+gpu' '@0.95:'))
depends_on('hydrogen +openmp_blas +shared +int64 +cuda',
depends_on('hydrogen +openmp_blas +shared +int64 +cuda',
when=('+gpu' '@:0.90'))
depends_on('hydrogen +openmp_blas +shared +int64 +cuda build_type=Debug',
when=('build_type=Debug' '@0.95:' '+gpu'))
@ -94,12 +94,12 @@ class Lbann(CMakePackage):
def common_config_args(self):
spec = self.spec
# Environment variables
CPPFLAGS = []
CPPFLAGS.append('-DLBANN_SET_EL_RNG -ldl')
cppflags = []
cppflags.append('-DLBANN_SET_EL_RNG -ldl')
return [
'-DCMAKE_INSTALL_MESSAGE=LAZY',
'-DCMAKE_CXX_FLAGS=%s' % ' '.join(CPPFLAGS),
'-DCMAKE_CXX_FLAGS=%s' % ' '.join(cppflags),
'-DLBANN_VERSION=spack',
'-DCNPY_DIR={0}'.format(spec['cnpy'].prefix),
]

View file

@ -80,15 +80,15 @@ def configure(self, spec, prefix):
}
# Store values requested by the installer in a file
with open('spack_installer_input.txt', 'w') as inputFile:
with open('spack_installer_input.txt', 'w') as input_file:
for key in config:
inputFile.write('{0}={1}\n'.format(key, config[key]))
input_file.write('{0}={1}\n'.format(key, config[key]))
def install(self, spec, prefix):
self.configure(spec, prefix)
# Run silent installation script
# Full path required
inputFile = join_path(self.stage.source_path,
'spack_installer_input.txt')
subprocess.call(['./install', '-inputFile', inputFile])
input_file = join_path(
self.stage.source_path, 'spack_installer_input.txt')
subprocess.call(['./install', '-inputFile', input_file])

View file

@ -221,11 +221,11 @@ def install(self, spec, prefix):
make('install')
# install GKlib headers, which will be needed for ParMETIS
GKlib_dist = join_path(prefix.include, 'GKlib')
mkdirp(GKlib_dist)
gklib_dist = join_path(prefix.include, 'GKlib')
mkdirp(gklib_dist)
hfiles = glob.glob(join_path(source_directory, 'GKlib', '*.h'))
for hfile in hfiles:
install(hfile, GKlib_dist)
install(hfile, gklib_dist)
if self.run_tests:
# FIXME: On some systems, the installed binaries for METIS cannot

View file

@ -229,7 +229,7 @@ def yes_no(varstr):
# from within MFEM.
# Similar to spec[pkg].libs.ld_flags but prepends rpath flags too.
def ld_flags_from_LibraryList(libs_list):
def ld_flags_from_library_list(libs_list):
flags = ['-Wl,-rpath,%s' % dir for dir in libs_list.directories]
flags += [libs_list.ld_flags]
return ' '.join(flags)
@ -298,7 +298,7 @@ def find_optional_library(name, prefix):
hypre['blas'].libs
options += [
'HYPRE_OPT=-I%s' % hypre.prefix.include,
'HYPRE_LIB=%s' % ld_flags_from_LibraryList(all_hypre_libs)]
'HYPRE_LIB=%s' % ld_flags_from_library_list(all_hypre_libs)]
if '+metis' in spec:
options += [
@ -310,7 +310,7 @@ def find_optional_library(name, prefix):
lapack_blas = spec['lapack'].libs + spec['blas'].libs
options += [
# LAPACK_OPT is not used
'LAPACK_LIB=%s' % ld_flags_from_LibraryList(lapack_blas)]
'LAPACK_LIB=%s' % ld_flags_from_library_list(lapack_blas)]
if '+superlu-dist' in spec:
lapack_blas = spec['lapack'].libs + spec['blas'].libs
@ -321,28 +321,28 @@ def find_optional_library(name, prefix):
'SUPERLU_LIB=-L%s -L%s -lsuperlu_dist -lparmetis %s' %
(spec['superlu-dist'].prefix.lib,
spec['parmetis'].prefix.lib,
ld_flags_from_LibraryList(lapack_blas))]
ld_flags_from_library_list(lapack_blas))]
if '+suite-sparse' in spec:
ss_spec = 'suite-sparse:' + self.suitesparse_components
options += [
'SUITESPARSE_OPT=-I%s' % spec[ss_spec].prefix.include,
'SUITESPARSE_LIB=%s' %
ld_flags_from_LibraryList(spec[ss_spec].libs)]
ld_flags_from_library_list(spec[ss_spec].libs)]
if '+sundials' in spec:
sun_spec = 'sundials:' + self.sundials_components
options += [
'SUNDIALS_OPT=%s' % spec[sun_spec].headers.cpp_flags,
'SUNDIALS_LIB=%s' %
ld_flags_from_LibraryList(spec[sun_spec].libs)]
ld_flags_from_library_list(spec[sun_spec].libs)]
if '+petsc' in spec:
# options += ['PETSC_DIR=%s' % spec['petsc'].prefix]
options += [
'PETSC_OPT=%s' % spec['petsc'].headers.cpp_flags,
'PETSC_LIB=%s' %
ld_flags_from_LibraryList(spec['petsc'].libs)]
ld_flags_from_library_list(spec['petsc'].libs)]
if '+pumi' in spec:
options += ['PUMI_DIR=%s' % spec['pumi'].prefix]
@ -360,7 +360,7 @@ def find_optional_library(name, prefix):
options += [
'ZLIB_OPT=-I%s' % spec['zlib'].prefix.include,
'ZLIB_LIB=%s' %
ld_flags_from_LibraryList(spec['zlib'].libs)]
ld_flags_from_library_list(spec['zlib'].libs)]
if '+mpfr' in spec:
options += [
@ -383,7 +383,7 @@ def find_optional_library(name, prefix):
libs += LibraryList(find_system_libraries('libdl'))
options += [
'LIBUNWIND_OPT=%s' % headers.cpp_flags,
'LIBUNWIND_LIB=%s' % ld_flags_from_LibraryList(libs)]
'LIBUNWIND_LIB=%s' % ld_flags_from_library_list(libs)]
if '+openmp' in spec:
options += ['OPENMP_OPT=%s' % self.compiler.openmp_flag]
@ -408,7 +408,7 @@ def find_optional_library(name, prefix):
libs += hdf5.libs
options += [
'CONDUIT_OPT=%s' % headers.cpp_flags,
'CONDUIT_LIB=%s' % ld_flags_from_LibraryList(libs)]
'CONDUIT_LIB=%s' % ld_flags_from_library_list(libs)]
make('config', *options, parallel=False)
make('info', parallel=False)

View file

@ -58,8 +58,8 @@ def patch(self):
'image_io_jpeg.cpp', string=True)
def build(self, spec, prefix):
CompilerVersion = self.compiler.version.joined
compiler_version = self.compiler.version.joined
with working_dir(join_path(
'GCC{0}-DebugMT64'.format(CompilerVersion), 'build')):
'GCC{0}-DebugMT64'.format(compiler_version), 'build')):
make('all_r')

View file

@ -97,25 +97,20 @@ def fortran_check(self):
@run_after('install')
def test_install(self):
currentDir = os.getcwd()
eddyDir = 'short_tests/eddy'
os.chdir(eddyDir)
os.system(join_path(self.prefix.bin, 'makenek') + ' eddy_uv')
if not os.path.isfile(join_path(os.getcwd(), 'nek5000')):
msg = 'Cannot build example: short_tests/eddy.'
raise RuntimeError(msg)
os.chdir(currentDir)
with working_dir('short_tests/eddy'):
os.system(join_path(self.prefix.bin, 'makenek') + ' eddy_uv')
if not os.path.isfile(join_path(os.getcwd(), 'nek5000')):
msg = 'Cannot build example: short_tests/eddy.'
raise RuntimeError(msg)
def install(self, spec, prefix):
toolsDir = 'tools'
binDir = 'bin'
tools_dir = 'tools'
bin_dir = 'bin'
# Do not use the Spack compiler wrappers.
# Use directly the compilers:
FC = self.compiler.f77
CC = self.compiler.cc
fc = self.compiler.f77
cc = self.compiler.cc
fflags = spec.compiler_flags['fflags']
cflags = spec.compiler_flags['cflags']
@ -149,10 +144,10 @@ def install(self, spec, prefix):
# Build the tools, maketools copy them to Nek5000/bin by default.
# We will then install Nek5000/bin under prefix after that.
with working_dir(toolsDir):
with working_dir(tools_dir):
# Update the maketools script to use correct compilers
filter_file(r'^#FC\s*=.*', 'FC="{0}"'.format(FC), 'maketools')
filter_file(r'^#CC\s*=.*', 'CC="{0}"'.format(CC), 'maketools')
filter_file(r'^#FC\s*=.*', 'FC="{0}"'.format(fc), 'maketools')
filter_file(r'^#CC\s*=.*', 'CC="{0}"'.format(cc), 'maketools')
if fflags:
filter_file(r'^#FFLAGS=.*', 'FFLAGS="{0}"'.format(fflags),
'maketools')
@ -194,31 +189,31 @@ def install(self, spec, prefix):
maxnel = self.spec.variants['MAXNEL'].value
filter_file(r'^#MAXNEL\s*=.*', 'MAXNEL=' + maxnel, 'maketools')
makeTools = Executable('./maketools')
maketools = Executable('./maketools')
# Build the tools
if '+genbox' in spec:
makeTools('genbox')
maketools('genbox')
# "ERROR: int_tp does not exist!"
# if '+int_tp' in spec:
# makeTools('int_tp')
# maketools('int_tp')
if '+n2to3' in spec:
makeTools('n2to3')
maketools('n2to3')
if '+postnek' in spec:
makeTools('postnek')
maketools('postnek')
if '+reatore2' in spec:
makeTools('reatore2')
maketools('reatore2')
if '+genmap' in spec:
makeTools('genmap')
maketools('genmap')
if '+nekmerge' in spec:
makeTools('nekmerge')
maketools('nekmerge')
if '+prenek' in spec:
makeTools('prenek')
maketools('prenek')
with working_dir(binDir):
with working_dir(bin_dir):
if '+mpi' in spec:
FC = spec['mpi'].mpif77
CC = spec['mpi'].mpicc
fc = spec['mpi'].mpif77
cc = spec['mpi'].mpicc
else:
filter_file(r'^#MPI=0', 'MPI=0', 'makenek')
@ -232,8 +227,8 @@ def install(self, spec, prefix):
# Update the makenek to use correct compilers and
# Nek5000 source.
filter_file(r'^#FC\s*=.*', 'FC="{0}"'.format(FC), 'makenek')
filter_file(r'^#CC\s*=.*', 'CC="{0}"'.format(CC), 'makenek')
filter_file(r'^#FC\s*=.*', 'FC="{0}"'.format(fc), 'makenek')
filter_file(r'^#CC\s*=.*', 'CC="{0}"'.format(cc), 'makenek')
filter_file(r'^#SOURCE_ROOT\s*=\"\$H.*', 'SOURCE_ROOT=\"' +
prefix.bin.Nek5000 + '\"', 'makenek')
if fflags:
@ -254,7 +249,7 @@ def install(self, spec, prefix):
'$(FC) -c -qextname $(L0)', 'makefile.template')
# Install Nek5000/bin in prefix/bin
install_tree(binDir, prefix.bin)
install_tree(bin_dir, prefix.bin)
# Copy Nek5000 source to prefix/bin
install_tree('../Nek5000', prefix.bin.Nek5000)

View file

@ -54,11 +54,11 @@ def fortran_check(self):
def install(self, spec, prefix):
mkdir(prefix.bin)
FC = self.compiler.fc
CC = self.compiler.cc
fc = self.compiler.fc
cc = self.compiler.cc
if '+mpi' in spec:
FC = spec['mpi'].mpif77
CC = spec['mpi'].mpicc
fc = spec['mpi'].mpif77
cc = spec['mpi'].mpicc
# Install Nekbone in prefix.bin
install_tree("../Nekbone", prefix.bin.Nekbone)
@ -73,8 +73,8 @@ def install(self, spec, prefix):
with working_dir(prefix.bin):
filter_file(r'^SOURCE_ROOT\s*=.*', 'SOURCE_ROOT=\"' +
prefix.bin.Nekbone + '/src\"', 'makenek')
filter_file(r'^CC\s*=.*', 'CC=\"' + CC + '\"', 'makenek')
filter_file(r'^F77\s*=.*', 'F77=\"' + FC + '\"', 'makenek')
filter_file(r'^CC\s*=.*', 'CC=\"' + cc + '\"', 'makenek')
filter_file(r'^F77\s*=.*', 'F77=\"' + fc + '\"', 'makenek')
if '+mpi' not in spec:
filter_file(r'^#IFMPI=\"false\"', 'IFMPI=\"false\"', 'makenek')

View file

@ -56,32 +56,32 @@ def fortran_check(self):
@run_after('install')
def test_install(self):
NekCEM_test = join_path(self.prefix.bin, 'NekCEM', 'tests', '2dboxpec')
with working_dir(NekCEM_test):
nekcem_test = join_path(self.prefix.bin, 'NekCEM', 'tests', '2dboxpec')
with working_dir(nekcem_test):
makenek = Executable(join_path(self.prefix.bin, 'makenek'))
makenek(os.path.basename(NekCEM_test))
makenek(os.path.basename(nekcem_test))
if not os.path.isfile('nekcem'):
msg = 'Cannot build example: %s' % NekCEM_test
msg = 'Cannot build example: %s' % nekcem_test
raise RuntimeError(msg)
def install(self, spec, prefix):
binDir = 'bin'
bin_dir = 'bin'
nek = 'nek'
cNek = 'configurenek'
mNek = 'makenek'
configurenek = 'configurenek'
makenek = 'makenek'
FC = self.compiler.f77
CC = self.compiler.cc
fc = self.compiler.f77
cc = self.compiler.cc
fflags = spec.compiler_flags['fflags']
cflags = spec.compiler_flags['cflags']
ldflags = spec.compiler_flags['ldflags']
if '+mpi' in spec:
FC = spec['mpi'].mpif77
CC = spec['mpi'].mpicc
fc = spec['mpi'].mpif77
cc = spec['mpi'].mpicc
with working_dir(binDir):
with working_dir(bin_dir):
fflags = ['-O3'] + fflags
cflags = ['-O3'] + cflags
fflags += ['-I.']
@ -104,14 +104,14 @@ def install(self, spec, prefix):
if '+mpi' in spec:
fflags += ['-DMPI', '-DMPIIO']
cflags += ['-DMPI', '-DMPIIO']
blasLapack = spec['lapack'].libs + spec['blas'].libs
blas_lapack = spec['lapack'].libs + spec['blas'].libs
pthread_lib = find_system_libraries('libpthread')
ldflags += (blasLapack + pthread_lib).ld_flags.split()
ldflags += (blas_lapack + pthread_lib).ld_flags.split()
all_arch = {
'spack-arch': {
'FC': FC, 'FFLAGS': fflags,
'CC': CC, 'CFLAGS': cflags,
'LD': FC, 'LDFLAGS': ldflags
'FC': fc, 'FFLAGS': fflags,
'CC': cc, 'CFLAGS': cflags,
'LD': fc, 'LDFLAGS': ldflags
}
}
os.rename('arch.json', 'arch.json.orig')
@ -125,6 +125,7 @@ def install(self, spec, prefix):
install_tree('../NekCEM', prefix.bin.NekCEM)
# Create symlinks to makenek, nek and configurenek scripts
with working_dir(prefix.bin):
os.symlink(os.path.join('NekCEM', binDir, mNek), mNek)
os.symlink(os.path.join('NekCEM', binDir, cNek), cNek)
os.symlink(os.path.join('NekCEM', binDir, nek), nek)
os.symlink(os.path.join('NekCEM', bin_dir, makenek), makenek)
os.symlink(
os.path.join('NekCEM', bin_dir, configurenek), configurenek)
os.symlink(os.path.join('NekCEM', bin_dir, nek), nek)

View file

@ -153,10 +153,10 @@ def patch(self):
r'\1{0}\2'.format(max_vars))
def configure_args(self):
CFLAGS = []
CPPFLAGS = []
LDFLAGS = []
LIBS = []
cflags = []
cppflags = []
ldflags = []
libs = []
config_args = ['--enable-v2',
'--enable-utilities',
@ -177,7 +177,7 @@ def configure_args(self):
if '~shared' in self.spec:
# We don't have shared libraries but we still want it to be
# possible to use this library in shared builds
CFLAGS.append(self.compiler.pic_flag)
cflags.append(self.compiler.pic_flag)
config_args += self.enable_or_disable('dap')
# config_args += self.enable_or_disable('cdmremote')
@ -189,10 +189,10 @@ def configure_args(self):
# undefined reference to `SSL_CTX_use_certificate_chain_file
curl = self.spec['curl']
curl_libs = curl.libs
LIBS.append(curl_libs.link_flags)
LDFLAGS.append(curl_libs.search_flags)
libs.append(curl_libs.link_flags)
ldflags.append(curl_libs.search_flags)
# TODO: figure out how to get correct flags via headers.cpp_flags
CPPFLAGS.append('-I' + curl.prefix.include)
cppflags.append('-I' + curl.prefix.include)
if self.spec.satisfies('@4.4:'):
if '+mpi' in self.spec:
@ -204,16 +204,16 @@ def configure_args(self):
# are removed. Variables CPPFLAGS, LDFLAGS, and LD_LIBRARY_PATH must be
# used instead.
hdf5_hl = self.spec['hdf5:hl']
CPPFLAGS.append(hdf5_hl.headers.cpp_flags)
LDFLAGS.append(hdf5_hl.libs.search_flags)
cppflags.append(hdf5_hl.headers.cpp_flags)
ldflags.append(hdf5_hl.libs.search_flags)
if '+parallel-netcdf' in self.spec:
config_args.append('--enable-pnetcdf')
pnetcdf = self.spec['parallel-netcdf']
CPPFLAGS.append(pnetcdf.headers.cpp_flags)
cppflags.append(pnetcdf.headers.cpp_flags)
# TODO: change to pnetcdf.libs.search_flags once 'parallel-netcdf'
# package gets custom implementation of 'libs'
LDFLAGS.append('-L' + pnetcdf.prefix.lib)
ldflags.append('-L' + pnetcdf.prefix.lib)
else:
config_args.append('--disable-pnetcdf')
@ -223,26 +223,26 @@ def configure_args(self):
config_args += self.enable_or_disable('hdf4')
if '+hdf4' in self.spec:
hdf4 = self.spec['hdf']
CPPFLAGS.append(hdf4.headers.cpp_flags)
cppflags.append(hdf4.headers.cpp_flags)
# TODO: change to hdf4.libs.search_flags once 'hdf'
# package gets custom implementation of 'libs' property.
LDFLAGS.append('-L' + hdf4.prefix.lib)
ldflags.append('-L' + hdf4.prefix.lib)
# TODO: change to self.spec['jpeg'].libs.link_flags once the
# implementations of 'jpeg' virtual package get 'jpeg_libs'
# property.
LIBS.append('-ljpeg')
libs.append('-ljpeg')
if '+szip' in hdf4:
# This should also come from hdf4.libs
LIBS.append('-lsz')
libs.append('-lsz')
# Fortran support
# In version 4.2+, NetCDF-C and NetCDF-Fortran have split.
# Use the netcdf-fortran package to install Fortran support.
config_args.append('CFLAGS=' + ' '.join(CFLAGS))
config_args.append('CPPFLAGS=' + ' '.join(CPPFLAGS))
config_args.append('LDFLAGS=' + ' '.join(LDFLAGS))
config_args.append('LIBS=' + ' '.join(LIBS))
config_args.append('CFLAGS=' + ' '.join(cflags))
config_args.append('CPPFLAGS=' + ' '.join(cppflags))
config_args.append('LDFLAGS=' + ' '.join(ldflags))
config_args.append('LIBS=' + ' '.join(libs))
return config_args

View file

@ -107,9 +107,9 @@ def install(self, spec, prefix):
# TODO: query if blas/lapack/scalapack uses 64bit Ints
# A flag to distinguish between 32bit and 64bit integers in linear
# algebra (Blas, Lapack, Scalapack)
use32bitLinAlg = True
use_32_bit_lin_alg = True
if use32bitLinAlg:
if use_32_bit_lin_alg:
args.extend([
'USE_64TO32=y',
'BLAS_SIZE=4',
@ -135,7 +135,7 @@ def install(self, spec, prefix):
with working_dir('src'):
make('nwchem_config', *args)
if use32bitLinAlg:
if use_32_bit_lin_alg:
make('64_to_32', *args)
make(*args)

View file

@ -74,8 +74,8 @@
'write_environ',
'rewrite_environ_files',
'mplib_content',
'foamAddPath',
'foamAddLib',
'foam_add_path',
'foam_add_lib',
'OpenfoamArch',
]
@ -204,12 +204,12 @@ def rewrite_environ_files(environ, **kwargs):
filter_file(regex, replace, rcfile, backup=False)
def foamAddPath(*args):
def foam_add_path(*args):
"""A string with args prepended to 'PATH'"""
return '"' + ':'.join(args) + ':${PATH}"'
def foamAddLib(*args):
def foam_add_lib(*args):
"""A string with args prepended to 'LD_LIBRARY_PATH'"""
return '"' + ':'.join(args) + ':${LD_LIBRARY_PATH}"'
@ -553,21 +553,21 @@ def configure(self, spec, prefix):
('BOOST_ARCH_PATH', spec['boost'].prefix),
('CGAL_ARCH_PATH', spec['cgal'].prefix),
('LD_LIBRARY_PATH',
foamAddLib(
foam_add_lib(
pkglib(spec['boost'], '${BOOST_ARCH_PATH}'),
pkglib(spec['cgal'], '${CGAL_ARCH_PATH}'))),
],
'FFTW': [
('FFTW_ARCH_PATH', spec['fftw'].prefix), # Absolute
('LD_LIBRARY_PATH',
foamAddLib(
foam_add_lib(
pkglib(spec['fftw'], '${BOOST_ARCH_PATH}'))),
],
# User-defined MPI
'mpi-user': [
('MPI_ARCH_PATH', spec['mpi'].prefix), # Absolute
('LD_LIBRARY_PATH', foamAddLib(user_mpi['libdir'])),
('PATH', foamAddPath(user_mpi['bindir'])),
('LD_LIBRARY_PATH', foam_add_lib(user_mpi['libdir'])),
('PATH', foam_add_path(user_mpi['bindir'])),
],
'scotch': {},
'kahip': {},
@ -596,12 +596,12 @@ def configure(self, spec, prefix):
}
if '+paraview' in spec:
pvMajor = 'paraview-{0}'.format(spec['paraview'].version.up_to(2))
pvmajor = 'paraview-{0}'.format(spec['paraview'].version.up_to(2))
self.etc_config['paraview'] = [
('ParaView_DIR', spec['paraview'].prefix),
('ParaView_INCLUDE_DIR', '${ParaView_DIR}/include/' + pvMajor),
('PV_PLUGIN_PATH', '$FOAM_LIBBIN/' + pvMajor),
('PATH', foamAddPath('${ParaView_DIR}/bin')),
('ParaView_INCLUDE_DIR', '${ParaView_DIR}/include/' + pvmajor),
('PV_PLUGIN_PATH', '$FOAM_LIBBIN/' + pvmajor),
('PATH', foam_add_path('${ParaView_DIR}/bin')),
]
if '+vtk' in spec:

View file

@ -188,11 +188,11 @@ class OpenspeedshopUtils(CMakePackage):
build_directory = 'build_openspeedshop'
def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable the appropriate
def set_cray_login_node_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable the appropriate
# Cray login node libraries
CrayLoginNodeOptions = []
cray_login_node_options = []
rt_platform = "cray"
# How do we get the compute node (CNL) cbtf package install
@ -205,12 +205,12 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# Equivalent to install-tool cmake arg:
# '-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
# % <base dir>/cbtf_v2.3.1.release/compute)
CrayLoginNodeOptions.append('-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
% be_ck.prefix)
CrayLoginNodeOptions.append('-DRUNTIME_PLATFORM=%s'
% rt_platform)
cray_login_node_options.append('-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
% be_ck.prefix)
cray_login_node_options.append('-DRUNTIME_PLATFORM=%s'
% rt_platform)
cmakeOptions.extend(CrayLoginNodeOptions)
cmake_options.extend(cray_login_node_options)
def cmake_args(self):
# Appends base options to cmake_args
@ -224,7 +224,7 @@ def cmake_args(self):
instrumentor_setting = "cbtf"
if spec.satisfies('+runtime'):
self.set_defaultbase_cmakeOptions(spec, cmake_args)
self.set_defaultbase_cmake_options(spec, cmake_args)
cmake_args.extend(
['-DCMAKE_CXX_FLAGS=%s' % compile_flags,
@ -237,7 +237,7 @@ def cmake_args(self):
else:
# Appends base options to cmake_args
self.set_defaultbase_cmakeOptions(spec, cmake_args)
self.set_defaultbase_cmake_options(spec, cmake_args)
cmake_args.extend(
['-DCMAKE_CXX_FLAGS=%s' % compile_flags,
'-DCMAKE_C_FLAGS=%s' % compile_flags,
@ -252,63 +252,63 @@ def cmake_args(self):
# components/libraries first then pass
# those libraries to the openspeedshop
# login node build
self.set_CrayLoginNode_cmakeOptions(spec, cmake_args)
self.set_cray_login_node_cmake_options(spec, cmake_args)
cmake_args.extend(['-DBUILD_QT3_GUI=FALSE'])
return cmake_args
def set_defaultbase_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable
def set_defaultbase_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable
# the appropriate base level options to the openspeedshop
# cmake build.
python_exe = spec['python'].command.path
python_library = spec['python'].libs[0]
python_include = spec['python'].headers.directories[0]
BaseOptions = []
base_options = []
BaseOptions.append('-DBINUTILS_DIR=%s' % spec['binutils'].prefix)
BaseOptions.append('-DLIBELF_DIR=%s' % spec['elf'].prefix)
BaseOptions.append('-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix)
BaseOptions.append('-DPYTHON_EXECUTABLE=%s' % python_exe)
BaseOptions.append('-DPYTHON_INCLUDE_DIR=%s' % python_include)
BaseOptions.append('-DPYTHON_LIBRARY=%s' % python_library)
BaseOptions.append('-DBoost_NO_SYSTEM_PATHS=TRUE')
BaseOptions.append('-DBoost_NO_BOOST_CMAKE=TRUE')
BaseOptions.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
BaseOptions.append('-DBoost_DIR=%s' % spec['boost'].prefix)
BaseOptions.append('-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib)
BaseOptions.append('-DDYNINST_DIR=%s' % spec['dyninst'].prefix)
base_options.append('-DBINUTILS_DIR=%s' % spec['binutils'].prefix)
base_options.append('-DLIBELF_DIR=%s' % spec['elf'].prefix)
base_options.append('-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix)
base_options.append('-DPYTHON_EXECUTABLE=%s' % python_exe)
base_options.append('-DPYTHON_INCLUDE_DIR=%s' % python_include)
base_options.append('-DPYTHON_LIBRARY=%s' % python_library)
base_options.append('-DBoost_NO_SYSTEM_PATHS=TRUE')
base_options.append('-DBoost_NO_BOOST_CMAKE=TRUE')
base_options.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
base_options.append('-DBoost_DIR=%s' % spec['boost'].prefix)
base_options.append('-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib)
base_options.append('-DDYNINST_DIR=%s' % spec['dyninst'].prefix)
cmakeOptions.extend(BaseOptions)
cmake_options.extend(base_options)
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable
def set_mpi_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable
# the appropriate MPI implementations
MPIOptions = []
mpi_options = []
# openmpi
if spec.satisfies('+openmpi'):
MPIOptions.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
mpi_options.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
# mpich
if spec.satisfies('+mpich'):
MPIOptions.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
mpi_options.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
# mpich2
if spec.satisfies('+mpich2'):
MPIOptions.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
mpi_options.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
# mvapich
if spec.satisfies('+mvapich'):
MPIOptions.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
mpi_options.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
# mvapich2
if spec.satisfies('+mvapich2'):
MPIOptions.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
mpi_options.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
# mpt
if spec.satisfies('+mpt'):
MPIOptions.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
mpi_options.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
cmakeOptions.extend(MPIOptions)
cmake_options.extend(mpi_options)
def setup_environment(self, spack_env, run_env):
"""Set up the compile and runtime environments for a package."""

View file

@ -188,11 +188,11 @@ class Openspeedshop(CMakePackage):
build_directory = 'build_openspeedshop'
def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable the appropriate
def set_cray_login_node_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable the appropriate
# Cray login node libraries
CrayLoginNodeOptions = []
cray_login_node_options = []
rt_platform = "cray"
# How do we get the compute node (CNL) cbtf package install
@ -206,12 +206,12 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# Equivalent to install-tool cmake arg:
# '-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
# % <base dir>/cbtf_v2.3.1.release/compute)
CrayLoginNodeOptions.append('-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
% be_ck.prefix)
CrayLoginNodeOptions.append('-DRUNTIME_PLATFORM=%s'
% rt_platform)
cray_login_node_options.append('-DCBTF_KRELL_CN_RUNTIME_DIR=%s'
% be_ck.prefix)
cray_login_node_options.append('-DRUNTIME_PLATFORM=%s'
% rt_platform)
cmakeOptions.extend(CrayLoginNodeOptions)
cmake_options.extend(cray_login_node_options)
def cmake_args(self):
@ -226,7 +226,7 @@ def cmake_args(self):
if spec.satisfies('+runtime'):
# Appends base options to cmake_args
self.set_defaultbase_cmakeOptions(spec, cmake_args)
self.set_defaultbase_cmake_options(spec, cmake_args)
cmake_args.extend(
['-DCMAKE_CXX_FLAGS=%s' % compile_flags,
'-DCMAKE_C_FLAGS=%s' % compile_flags,
@ -238,7 +238,7 @@ def cmake_args(self):
else:
# Appends base options to cmake_args
self.set_defaultbase_cmakeOptions(spec, cmake_args)
self.set_defaultbase_cmake_options(spec, cmake_args)
guitype = self.spec.variants['gui'].value
cmake_args.extend(
['-DCMAKE_CXX_FLAGS=%s' % compile_flags,
@ -265,61 +265,61 @@ def cmake_args(self):
# components/libraries first then pass
# those libraries to the openspeedshop
# login node build
self.set_CrayLoginNode_cmakeOptions(spec, cmake_args)
self.set_cray_login_node_cmake_options(spec, cmake_args)
return cmake_args
def set_defaultbase_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable
def set_defaultbase_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable
# the appropriate base level options to the openspeedshop
# cmake build.
python_exe = spec['python'].command.path
python_library = spec['python'].libs[0]
python_include = spec['python'].headers.directories[0]
BaseOptions = []
base_options = []
BaseOptions.append('-DBINUTILS_DIR=%s' % spec['binutils'].prefix)
BaseOptions.append('-DLIBELF_DIR=%s' % spec['elf'].prefix)
BaseOptions.append('-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix)
BaseOptions.append('-DPYTHON_EXECUTABLE=%s' % python_exe)
BaseOptions.append('-DPYTHON_INCLUDE_DIR=%s' % python_include)
BaseOptions.append('-DPYTHON_LIBRARY=%s' % python_library)
BaseOptions.append('-DBoost_NO_SYSTEM_PATHS=TRUE')
BaseOptions.append('-DBoost_NO_BOOST_CMAKE=TRUE')
BaseOptions.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
BaseOptions.append('-DBoost_DIR=%s' % spec['boost'].prefix)
BaseOptions.append('-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib)
BaseOptions.append('-DDYNINST_DIR=%s' % spec['dyninst'].prefix)
base_options.append('-DBINUTILS_DIR=%s' % spec['binutils'].prefix)
base_options.append('-DLIBELF_DIR=%s' % spec['elf'].prefix)
base_options.append('-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix)
base_options.append('-DPYTHON_EXECUTABLE=%s' % python_exe)
base_options.append('-DPYTHON_INCLUDE_DIR=%s' % python_include)
base_options.append('-DPYTHON_LIBRARY=%s' % python_library)
base_options.append('-DBoost_NO_SYSTEM_PATHS=TRUE')
base_options.append('-DBoost_NO_BOOST_CMAKE=TRUE')
base_options.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
base_options.append('-DBoost_DIR=%s' % spec['boost'].prefix)
base_options.append('-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib)
base_options.append('-DDYNINST_DIR=%s' % spec['dyninst'].prefix)
cmakeOptions.extend(BaseOptions)
cmake_options.extend(base_options)
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
# Appends to cmakeOptions the options that will enable
def set_mpi_cmake_options(self, spec, cmake_options):
# Appends to cmake_options the options that will enable
# the appropriate MPI implementations
MPIOptions = []
mpi_options = []
# openmpi
if spec.satisfies('+openmpi'):
MPIOptions.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
mpi_options.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
# mpich
if spec.satisfies('+mpich'):
MPIOptions.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
mpi_options.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
# mpich2
if spec.satisfies('+mpich2'):
MPIOptions.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
mpi_options.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
# mvapich
if spec.satisfies('+mvapich'):
MPIOptions.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
mpi_options.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
# mvapich2
if spec.satisfies('+mvapich2'):
MPIOptions.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
mpi_options.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
# mpt
if spec.satisfies('+mpt'):
MPIOptions.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
mpi_options.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
cmakeOptions.extend(MPIOptions)
cmake_options.extend(mpi_options)
def setup_environment(self, spack_env, run_env):
"""Set up the compile and runtime environments for a package."""

View file

@ -141,8 +141,8 @@ def setup_environment(self, spack_env, run_env):
@on_package_attributes(run_tests=True)
def check_install(self):
prefix = self.prefix
clik_example_C = Executable(join_path(prefix.bin, 'clik_example_C'))
clik_example_c = Executable(join_path(prefix.bin, 'clik_example_C'))
with working_dir('spack-check', create=True):
clik_example_C(join_path(prefix, 'share', 'clik',
clik_example_c(join_path(prefix, 'share', 'clik',
'plc_2.0', 'hi_l', 'plik',
'plik_dx11dr2_HM_v18_TT.clik'))

View file

@ -38,6 +38,6 @@ class Platypus(Package):
depends_on('htslib')
def install(self, spec, prefix):
buildPlatypus = Executable('./buildPlatypus.sh')
buildPlatypus()
build_platypus = Executable('./buildPlatypus.sh')
build_platypus()
install_tree('.', prefix.bin)

View file

@ -51,13 +51,13 @@ def configure_args(self):
# The type of MPI. Supported values are:
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
if mpi_name == 'openmpi':
Rmpi_type = 'OPENMPI'
rmpi_type = 'OPENMPI'
elif mpi_name == 'mpich':
Rmpi_type = 'MPICH2'
rmpi_type = 'MPICH2'
else:
raise InstallError('Unsupported MPI type')
return [
'--with-Rmpi-type={0}'.format(Rmpi_type),
'--with-Rmpi-type={0}'.format(rmpi_type),
'--with-mpi={0}'.format(spec['mpi'].prefix),
]

View file

@ -107,16 +107,16 @@ def configure_args(self):
spec = self.spec
prefix = self.prefix
tclConfig_path = join_path(spec['tcl'].prefix.lib, 'tclConfig.sh')
tkConfig_path = join_path(spec['tk'].prefix.lib, 'tkConfig.sh')
tcl_config_path = join_path(spec['tcl'].prefix.lib, 'tclConfig.sh')
tk_config_path = join_path(spec['tk'].prefix.lib, 'tkConfig.sh')
config_args = [
'--libdir={0}'.format(join_path(prefix, 'rlib')),
'--enable-R-shlib',
'--enable-BLAS-shlib',
'--enable-R-framework=no',
'--with-tcl-config={0}'.format(tclConfig_path),
'--with-tk-config={0}'.format(tkConfig_path),
'--with-tcl-config={0}'.format(tcl_config_path),
'--with-tk-config={0}'.format(tk_config_path),
]
if '+external-lapack' in spec:

View file

@ -48,8 +48,8 @@ class Tcptrace(AutotoolsPackage):
@run_after('configure')
def patch_makefile(self):
# see https://github.com/blitz/tcptrace/blob/master/README.linux
Makefile = FileFilter('Makefile')
Makefile.filter(
makefile = FileFilter('Makefile')
makefile.filter(
"PCAP_LDLIBS = -lpcap",
"DEFINES += -D_BSD_SOURCE\nPCAP_LDLIBS = -lpcap")

View file

@ -38,7 +38,7 @@ class Tcsh(AutotoolsPackage):
version('6.20.00', '59d40ef40a68e790d95e182069431834')
def fedora_patch(commit, file, **kwargs):
def fedora_patch(commit, file, **kwargs): # noqa
prefix = 'https://src.fedoraproject.org/rpms/tcsh/raw/{0}/f/'.format(commit)
patch('{0}{1}'.format(prefix, file), **kwargs)

View file

@ -74,7 +74,7 @@ def install_include(self):
# we need to fix the CXX and LINK paths, as they point to the spack
# wrapper scripts which aren't usable without spack
@run_after('install')
def patch_CXX(self):
def patch_cxx(self):
filter_file(r'^CXX\s*=.*', 'CXX = {0}'.format(self.compiler.cxx),
join_path(self.prefix.include, 'verilated.mk'))
filter_file(r'^LINK\s*=.*', 'LINK = {0}'.format(self.compiler.cxx),

View file

@ -154,31 +154,31 @@ def cmake_args(self):
if '+osmesa' in spec:
prefix = spec['mesa'].prefix
osmesaIncludeDir = prefix.include
osmesaLibrary = os.path.join(prefix.lib, 'libOSMesa.so')
osmesa_include_dir = prefix.include
osmesa_library = os.path.join(prefix.lib, 'libOSMesa.so')
useParam = 'VTK_USE_X'
use_param = 'VTK_USE_X'
if 'darwin' in spec.architecture:
useParam = 'VTK_USE_COCOA'
use_param = 'VTK_USE_COCOA'
cmake_args.extend([
'-D{0}:BOOL=OFF'.format(useParam),
'-D{0}:BOOL=OFF'.format(use_param),
'-DVTK_OPENGL_HAS_OSMESA:BOOL=ON',
'-DOSMESA_INCLUDE_DIR:PATH={0}'.format(osmesaIncludeDir),
'-DOSMESA_LIBRARY:FILEPATH={0}'.format(osmesaLibrary),
'-DOSMESA_INCLUDE_DIR:PATH={0}'.format(osmesa_include_dir),
'-DOSMESA_LIBRARY:FILEPATH={0}'.format(osmesa_library),
])
else:
prefix = spec['opengl'].prefix
openglIncludeDir = prefix.include
openglLibrary = os.path.join(prefix.lib, 'libGL.so')
opengl_include_dir = prefix.include
opengl_library = os.path.join(prefix.lib, 'libGL.so')
if 'darwin' in spec.architecture:
openglIncludeDir = prefix
openglLibrary = prefix
opengl_include_dir = prefix
opengl_library = prefix
cmake_args.extend([
'-DOPENGL_INCLUDE_DIR:PATH={0}'.format(openglIncludeDir),
'-DOPENGL_gl_LIBRARY:FILEPATH={0}'.format(openglLibrary)
'-DOPENGL_INCLUDE_DIR:PATH={0}'.format(opengl_include_dir),
'-DOPENGL_gl_LIBRARY:FILEPATH={0}'.format(opengl_library)
])
if spec.satisfies('@:6.1.0'):