Allow spack to build against external non-spack-installed packages.
This commit is contained in:
parent
53d70fff01
commit
650c9d4e36
10 changed files with 387 additions and 43 deletions
|
@ -77,6 +77,12 @@
|
|||
from spack.preferred_packages import PreferredPackages
|
||||
pkgsort = PreferredPackages()
|
||||
|
||||
#
|
||||
# This tests ABI compatibility between packages
|
||||
#
|
||||
from spack.abi import ABI
|
||||
abi = ABI()
|
||||
|
||||
#
|
||||
# This controls how things are concretized in spack.
|
||||
# Replace it with a subclass if you want different
|
||||
|
|
128
lib/spack/spack/abi.py
Normal file
128
lib/spack/spack/abi.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
##############################################################################
|
||||
# Copyright (c) 2015, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://scalability-llnl.github.io/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import os
|
||||
import spack
|
||||
import spack.spec
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
class ABI(object):
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(self, parent, child):
|
||||
"""Returns true iff the parent and child specs have ABI compatible architectures."""
|
||||
return not parent.architecture or not child.architecture or parent.architecture == child.architecture
|
||||
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
"""Returns gcc ABI compatibility info by getting the library version of
|
||||
a compiler's libstdc++.so or libgcc_s.so"""
|
||||
spec = CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
compiler = compilers[0]
|
||||
rungcc = None
|
||||
libname = None
|
||||
output = None
|
||||
if compiler.cxx:
|
||||
rungcc = Executable(compiler.cxx)
|
||||
libname = "libstdc++.so"
|
||||
elif compiler.cc:
|
||||
rungcc = Executable(compiler.cc)
|
||||
libname = "libgcc_s.so"
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
output = rungcc("--print-file-name=%s" % libname, return_output=True)
|
||||
except ProcessError, e:
|
||||
return None
|
||||
if not output:
|
||||
return None
|
||||
libpath = os.readlink(output.strip())
|
||||
if not libpath:
|
||||
return None
|
||||
return os.path.basename(libpath)
|
||||
|
||||
|
||||
@memoized
|
||||
def _gcc_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the gcc version pversion and cversion
|
||||
are ABI compatible."""
|
||||
plib = self._gcc_get_libstdcxx_version(pversion)
|
||||
clib = self._gcc_get_libstdcxx_version(cversion)
|
||||
if not plib or not clib:
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
#Test major and minor versions. Ignore build version.
|
||||
if (len(pversion.version) < 2 or len(cversion.version) < 2):
|
||||
return False
|
||||
return (pversion.version[0] == cversion.version[0]) and \
|
||||
(pversion.version[1] == cversion.version[1])
|
||||
|
||||
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
"""Returns true iff the compilers for parent and child specs are ABI compatible"""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
|
||||
if parent.compiler.name != child.compiler.name:
|
||||
#Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if kwargs.get('loose', False):
|
||||
return True
|
||||
|
||||
for pversion in parent.compiler.versions:
|
||||
for cversion in child.compiler.versions:
|
||||
#For a few compilers use specialized comparisons. Otherwise
|
||||
# match on version match.
|
||||
if pversion.satisfies(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and \
|
||||
self._gcc_compiler_compare(pversion, cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "intel" and \
|
||||
self._intel_compiler_compare(pversion, cversion):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def compatible(self, parent, child, **kwargs):
|
||||
"""Returns true iff a parent and child spec are ABI compatible"""
|
||||
loosematch = kwargs.get('loose', False)
|
||||
return self.architecture_compatible(parent, child) and \
|
||||
self.compiler_compatible(parent, child, loose=loosematch)
|
||||
|
|
@ -33,13 +33,16 @@
|
|||
TODO: make this customizable and allow users to configure
|
||||
concretization policies.
|
||||
"""
|
||||
import spack
|
||||
import spack.spec
|
||||
import spack.compilers
|
||||
import spack.architecture
|
||||
import spack.error
|
||||
from spack.version import *
|
||||
from functools import partial
|
||||
|
||||
from spec import DependencyMap
|
||||
from itertools import chain
|
||||
from spack.config import *
|
||||
|
||||
|
||||
class DefaultConcretizer(object):
|
||||
|
@ -48,6 +51,107 @@ class DefaultConcretizer(object):
|
|||
default concretization strategies, or you can override all of them.
|
||||
"""
|
||||
|
||||
def _find_other_spec(self, spec, condition):
|
||||
"""Searches the dag from spec in an intelligent order and looks
|
||||
for a spec that matches a condition"""
|
||||
dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children'))
|
||||
found = next((x for x in dagiter if x is not spec and condition(x)), None)
|
||||
if found:
|
||||
return found
|
||||
dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children'))
|
||||
searched = list(dagiter)
|
||||
found = next((x for x in spec.root.traverse() if x not in searched and x is not spec and condition(x)), None)
|
||||
if found:
|
||||
return found
|
||||
if condition(spec):
|
||||
return spec
|
||||
return None
|
||||
|
||||
|
||||
def _valid_virtuals_and_externals(self, spec):
|
||||
"""Returns a list of spec/external-path pairs for both virtuals and externals
|
||||
that can concretize this spec."""
|
||||
|
||||
# Get a list of candidate packages that could satisfy this spec
|
||||
packages = []
|
||||
if spec.virtual:
|
||||
providers = spack.db.providers_for(spec)
|
||||
if not providers:
|
||||
raise UnsatisfiableProviderSpecError(providers[0], spec)
|
||||
spec_w_preferred_providers = self._find_other_spec(spec, \
|
||||
lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name))
|
||||
if not spec_w_preferred_providers:
|
||||
spec_w_preferred_providers = spec
|
||||
provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
|
||||
packages = sorted(providers, cmp=provider_cmp)
|
||||
else:
|
||||
if not spec_externals(spec) or spec.external:
|
||||
return None
|
||||
packages = [spec]
|
||||
|
||||
# For each candidate package, if it has externals add those to the candidates
|
||||
# if it's a nobuild, then only add the externals.
|
||||
result = []
|
||||
all_compilers = spack.compilers.all_compilers()
|
||||
for pkg in packages:
|
||||
externals = spec_externals(pkg)
|
||||
buildable = not is_spec_nobuild(pkg)
|
||||
if buildable:
|
||||
result.append((pkg, None))
|
||||
if externals:
|
||||
sorted_externals = sorted(externals, cmp=lambda a,b: a[0].__cmp__(b[0]))
|
||||
for external in sorted_externals:
|
||||
if external[0].satisfies(spec):
|
||||
result.append(external)
|
||||
if not result:
|
||||
raise NoBuildError(spec)
|
||||
return result
|
||||
|
||||
|
||||
def concretize_virtual_and_external(self, spec):
|
||||
"""From a list of candidate virtual and external packages, concretize to one that
|
||||
is ABI compatible with the rest of the DAG."""
|
||||
candidates = self._valid_virtuals_and_externals(spec)
|
||||
if not candidates:
|
||||
return False
|
||||
|
||||
#Find the another spec in the dag that has a compiler. We'll use that
|
||||
# spec to test compiler compatibility.
|
||||
other_spec = self._find_other_spec(spec, lambda(x): x.compiler)
|
||||
if not other_spec:
|
||||
other_spec = spec.root
|
||||
|
||||
#Choose an ABI-compatible candidate, or the first match otherwise.
|
||||
candidate = None
|
||||
if other_spec:
|
||||
candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec)), None)
|
||||
if not candidate:
|
||||
#Try a looser ABI matching
|
||||
candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec, loose=True)), None)
|
||||
if not candidate:
|
||||
#Pick the first choice
|
||||
candidate = candidates[0]
|
||||
external = candidate[1]
|
||||
candidate_spec = candidate[0]
|
||||
|
||||
#Refine this spec to the candidate.
|
||||
changed = False
|
||||
if spec.virtual:
|
||||
spec._replace_with(candidate_spec)
|
||||
changed = True
|
||||
if spec._dup(candidate_spec, deps=False, cleardeps=False):
|
||||
changed = True
|
||||
if not spec.external and external:
|
||||
spec.external = external
|
||||
changed = True
|
||||
#If we're external then trim the dependencies
|
||||
if external and spec.dependencies:
|
||||
changed = True
|
||||
spec.depencencies = DependencyMap()
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def concretize_version(self, spec):
|
||||
"""If the spec is already concrete, return. Otherwise take
|
||||
the preferred version from spackconfig, and default to the package's
|
||||
|
@ -156,25 +260,26 @@ def concretize_compiler(self, spec):
|
|||
spec.compiler in all_compilers):
|
||||
return False
|
||||
|
||||
# Find the parent spec that has a compiler, or the root if none do
|
||||
parent_spec = next(p for p in spec.traverse(direction='parents')
|
||||
if p.compiler is not None or not p.dependents)
|
||||
parent_compiler = parent_spec.compiler
|
||||
assert(parent_spec)
|
||||
#Find the another spec that has a compiler, or the root if none do
|
||||
other_spec = self._find_other_spec(spec, lambda(x) : x.compiler)
|
||||
if not other_spec:
|
||||
other_spec = spec.root
|
||||
other_compiler = other_spec.compiler
|
||||
assert(other_spec)
|
||||
|
||||
# Check if the compiler is already fully specified
|
||||
if parent_compiler in all_compilers:
|
||||
spec.compiler = parent_compiler.copy()
|
||||
if other_compiler in all_compilers:
|
||||
spec.compiler = other_compiler.copy()
|
||||
return True
|
||||
|
||||
# Filter the compilers into a sorted list based on the compiler_order from spackconfig
|
||||
compiler_list = all_compilers if not parent_compiler else spack.compilers.find(parent_compiler)
|
||||
cmp_compilers = partial(spack.pkgsort.compiler_compare, parent_spec.name)
|
||||
compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler)
|
||||
cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name)
|
||||
matches = sorted(compiler_list, cmp=cmp_compilers)
|
||||
if not matches:
|
||||
raise UnavailableCompilerVersionError(parent_compiler)
|
||||
raise UnavailableCompilerVersionError(other_compiler)
|
||||
|
||||
# copy concrete version into parent_compiler
|
||||
# copy concrete version into other_compiler
|
||||
spec.compiler = matches[0].copy()
|
||||
assert(spec.compiler.concrete)
|
||||
return True # things changed.
|
||||
|
@ -210,3 +315,12 @@ class NoValidVersionError(spack.error.SpackError):
|
|||
def __init__(self, spec):
|
||||
super(NoValidVersionError, self).__init__(
|
||||
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions))
|
||||
|
||||
|
||||
class NoBuildError(spack.error.SpackError):
|
||||
"""Raised when a package is configured with the nobuild option, but
|
||||
no satisfactory external versions can be found"""
|
||||
def __init__(self, spec):
|
||||
super(NoBuildError, self).__init__(
|
||||
"The spec '%s' is configured as nobuild, and no matching external installs were found" % spec.name)
|
||||
|
||||
|
|
|
@ -90,9 +90,12 @@
|
|||
import exceptions
|
||||
import sys
|
||||
import copy
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
import inspect
|
||||
import glob
|
||||
import imp
|
||||
import spack.spec
|
||||
import spack.error
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
from external import yaml
|
||||
from external.yaml.error import MarkedYAMLError
|
||||
|
@ -116,6 +119,9 @@ def __init__(self, n, f, m):
|
|||
_ConfigCategory('compilers', 'compilers.yaml', True)
|
||||
_ConfigCategory('mirrors', 'mirrors.yaml', True)
|
||||
_ConfigCategory('preferred', 'preferred.yaml', True)
|
||||
_ConfigCategory('view', 'views.yaml', True)
|
||||
_ConfigCategory('preferred', 'preferred.yaml', True)
|
||||
_ConfigCategory('packages', 'packages.yaml', True)
|
||||
|
||||
"""Names of scopes and their corresponding configuration files."""
|
||||
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
|
||||
|
@ -233,6 +239,55 @@ def get_preferred_config():
|
|||
return get_config('preferred')
|
||||
|
||||
|
||||
@memoized
|
||||
def get_packages_config():
|
||||
"""Get the externals configuration from config files"""
|
||||
package_config = get_config('packages')
|
||||
if not package_config:
|
||||
return {}
|
||||
indexed_packages = {}
|
||||
for p in package_config:
|
||||
package_name = spack.spec.Spec(p.keys()[0]).name
|
||||
if package_name not in indexed_packages:
|
||||
indexed_packages[package_name] = []
|
||||
indexed_packages[package_name].append({ spack.spec.Spec(key) : val for key, val in p.iteritems() })
|
||||
return indexed_packages
|
||||
|
||||
|
||||
def is_spec_nobuild(spec):
|
||||
"""Return true if the spec pkgspec is configured as nobuild"""
|
||||
allpkgs = get_packages_config()
|
||||
name = spec.name
|
||||
if not name in allpkgs:
|
||||
return False
|
||||
for itm in allpkgs[name]:
|
||||
for pkg,conf in itm.iteritems():
|
||||
if pkg.satisfies(spec):
|
||||
if conf.get('nobuild', False):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def spec_externals(spec):
|
||||
"""Return a list of spec, directory pairs for each external location for spec"""
|
||||
allpkgs = get_packages_config()
|
||||
name = spec.name
|
||||
spec_locations = []
|
||||
|
||||
if not name in allpkgs:
|
||||
return []
|
||||
for itm in allpkgs[name]:
|
||||
for pkg,conf in itm.iteritems():
|
||||
if not pkg.satisfies(spec):
|
||||
continue
|
||||
path = conf.get('path', None)
|
||||
if not path:
|
||||
continue
|
||||
spec_locations.append( (pkg, path) )
|
||||
return spec_locations
|
||||
|
||||
|
||||
|
||||
def get_config_scope_dirname(scope):
|
||||
"""For a scope return the config directory"""
|
||||
global config_scopes
|
||||
|
|
|
@ -187,6 +187,14 @@ def hidden_file_paths(self):
|
|||
|
||||
def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
if spec.external:
|
||||
return spec.external
|
||||
|
||||
enabled_variants = (
|
||||
'-' + v.name for v in spec.variants.values()
|
||||
if v.enabled)
|
||||
|
||||
dir_name = "%s-%s-%s" % (
|
||||
spec.name,
|
||||
spec.version,
|
||||
|
|
|
@ -752,6 +752,9 @@ def do_install(self,
|
|||
if not self.spec.concrete:
|
||||
raise ValueError("Can only install concrete packages.")
|
||||
|
||||
if self.spec.external:
|
||||
return
|
||||
|
||||
if os.path.exists(self.prefix):
|
||||
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
|
||||
return
|
||||
|
|
|
@ -35,8 +35,9 @@ def __init__(self):
|
|||
|
||||
#Given a package name, sort component (e.g, version, compiler, ...), and
|
||||
# a second_key (used by providers), return the list
|
||||
def _order_for_package(self, pkgname, component, second_key):
|
||||
def _order_for_package(self, pkgname, component, second_key, test_all=True):
|
||||
pkglist = [pkgname]
|
||||
if test_all:
|
||||
pkglist.append('all')
|
||||
for pkg in pkglist:
|
||||
if not pkg in self.preferred:
|
||||
|
@ -143,6 +144,11 @@ def provider_compare(self, pkgname, provider_str, a, b):
|
|||
return self._spec_compare(pkgname, 'providers', a, b, False, provider_str)
|
||||
|
||||
|
||||
def spec_has_preferred_provider(self, pkgname, provider_str):
|
||||
"""Return True iff the named package has a list of preferred provider"""
|
||||
return bool(self._order_for_package(pkgname, 'providers', provider_str, False))
|
||||
|
||||
|
||||
def version_compare(self, pkgname, a, b):
|
||||
"""Return less-than-0, 0, or greater than 0 if version a of pkgname is
|
||||
respecively less-than, equal-to, or greater-than version b of pkgname.
|
||||
|
|
|
@ -419,6 +419,7 @@ def __init__(self, spec_like, *dep_like, **kwargs):
|
|||
# package.py files for.
|
||||
self._normal = kwargs.get('normal', False)
|
||||
self._concrete = kwargs.get('concrete', False)
|
||||
self.external = None
|
||||
|
||||
# This allows users to construct a spec DAG with literals.
|
||||
# Note that given two specs a and b, Spec(a) copies a, but
|
||||
|
@ -751,7 +752,6 @@ def _concretize_helper(self, presets=None, visited=None):
|
|||
# Concretize virtual dependencies last. Because they're added
|
||||
# to presets below, their constraints will all be merged, but we'll
|
||||
# still need to select a concrete package later.
|
||||
if not self.virtual:
|
||||
changed |= any(
|
||||
(spack.concretizer.concretize_architecture(self),
|
||||
spack.concretizer.concretize_compiler(self),
|
||||
|
@ -789,21 +789,18 @@ def _expand_virtual_packages(self):
|
|||
a problem.
|
||||
"""
|
||||
changed = False
|
||||
while True:
|
||||
virtuals =[v for v in self.traverse() if v.virtual]
|
||||
if not virtuals:
|
||||
return changed
|
||||
|
||||
for spec in virtuals:
|
||||
providers = spack.db.providers_for(spec)
|
||||
concrete = spack.concretizer.choose_provider(self, spec, providers)
|
||||
concrete = concrete.copy()
|
||||
spec._replace_with(concrete)
|
||||
done = False
|
||||
while not done:
|
||||
done = True
|
||||
for spec in list(self.traverse()):
|
||||
if spack.concretizer.concretize_virtual_and_external(spec):
|
||||
done = False
|
||||
changed = True
|
||||
|
||||
# If there are duplicate providers or duplicate provider deps, this
|
||||
# consolidates them and merge constraints.
|
||||
changed |= self.normalize(force=True)
|
||||
return changed
|
||||
|
||||
|
||||
def concretize(self):
|
||||
|
@ -830,7 +827,6 @@ def concretize(self):
|
|||
self._concretize_helper())
|
||||
changed = any(changes)
|
||||
force=True
|
||||
|
||||
self._concrete = True
|
||||
|
||||
|
||||
|
@ -1346,15 +1342,26 @@ def _dup(self, other, **kwargs):
|
|||
Whether deps should be copied too. Set to false to copy a
|
||||
spec but not its dependencies.
|
||||
"""
|
||||
|
||||
# We don't count dependencies as changes here
|
||||
changed = True
|
||||
if hasattr(self, 'name'):
|
||||
changed = (self.name != other.name and self.versions != other.versions and \
|
||||
self.architecture != other.architecture and self.compiler != other.compiler and \
|
||||
self.variants != other.variants and self._normal != other._normal and \
|
||||
self.concrete != other.concrete and self.external != other.external)
|
||||
|
||||
# Local node attributes get copied first.
|
||||
self.name = other.name
|
||||
self.versions = other.versions.copy()
|
||||
self.architecture = other.architecture
|
||||
self.compiler = other.compiler.copy() if other.compiler else None
|
||||
if kwargs.get('cleardeps', True):
|
||||
self.dependents = DependencyMap()
|
||||
self.dependencies = DependencyMap()
|
||||
self.variants = other.variants.copy()
|
||||
self.variants.spec = self
|
||||
self.external = other.external
|
||||
|
||||
# If we copy dependencies, preserve DAG structure in the new spec
|
||||
if kwargs.get('deps', True):
|
||||
|
@ -1372,6 +1379,8 @@ def _dup(self, other, **kwargs):
|
|||
# Since we preserved structure, we can copy _normal safely.
|
||||
self._normal = other._normal
|
||||
self._concrete = other._concrete
|
||||
self.external = other.external
|
||||
return changed
|
||||
|
||||
|
||||
def copy(self, **kwargs):
|
||||
|
@ -1796,6 +1805,7 @@ def spec(self):
|
|||
spec.variants = VariantMap(spec)
|
||||
spec.architecture = None
|
||||
spec.compiler = None
|
||||
spec.external = None
|
||||
spec.dependents = DependencyMap()
|
||||
spec.dependencies = DependencyMap()
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ def setup_dependent_environment(self, module, spec, dep_spec):
|
|||
os.environ['MPICH_F77'] = 'f77'
|
||||
os.environ['MPICH_F90'] = 'f90'
|
||||
|
||||
module.mpicc = join_path(self.prefix.bin, 'mpicc')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = ["--prefix=" + prefix,
|
||||
|
|
|
@ -12,9 +12,16 @@ class Mvapich2(Package):
|
|||
version('2.0', '9fbb68a4111a8b6338e476dc657388b4',
|
||||
url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz')
|
||||
|
||||
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6',
|
||||
url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz')
|
||||
|
||||
provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2
|
||||
provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0
|
||||
|
||||
variant('psm', default=False, description="build with psm")
|
||||
|
||||
variant('pmi', default=False, description="build with pmi")
|
||||
depends_on('pmgr_collective', when='+pmi')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# we'll set different configure flags depending on our environment
|
||||
|
@ -80,7 +87,13 @@ def install(self, spec, prefix):
|
|||
configure_args.append("--with-device=ch3:psm")
|
||||
else:
|
||||
# throw this flag on IB systems
|
||||
configure_args.append("--with-device=ch3:mrail", "--with-rdma=gen2")
|
||||
configure_args.append("--with-device=ch3:mrail")
|
||||
configure_args.append("--with-rdma=gen2")
|
||||
|
||||
if "+pmi" in spec:
|
||||
configure_args.append("--with-pmi=pmgr_collective" % spec['pmgr_collective'].prefix)
|
||||
else:
|
||||
configure_args.append("--with-pmi=slurm")
|
||||
|
||||
# TODO: shared-memory build
|
||||
|
||||
|
@ -93,7 +106,7 @@ def install(self, spec, prefix):
|
|||
"--enable-f77", "--enable-fc", "--enable-cxx",
|
||||
"--enable-shared", "--enable-sharedlibs=gcc",
|
||||
"--enable-debuginfo",
|
||||
"--with-pm=no", "--with-pmi=slurm",
|
||||
"--with-pm=no",
|
||||
"--enable-romio", "--with-file-system=lustre+nfs+ufs",
|
||||
"--disable-mpe", "--without-mpe",
|
||||
"--disable-silent-rules",
|
||||
|
|
Loading…
Reference in a new issue