Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases

Conflicts:
	lib/spack/spack/__init__.py
	var/spack/repos/builtin/packages/gmp/package.py
	var/spack/repos/builtin/packages/openjpeg/package.py
This commit is contained in:
alalazo 2016-07-15 11:02:55 +02:00
commit 893a5565fe
230 changed files with 1162 additions and 530 deletions

View file

@ -1286,6 +1286,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
Additionally, dependencies may be specified for specific use cases:
.. code-block:: python
depends_on("cmake", type="build")
depends_on("libelf", type=("build", "link"))
depends_on("python", type="run")
The dependency types are:
* **"build"**: made available during the project's build. The package will
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
Other projects which depend on this one will not have these modified
(building project X doesn't need project Y's build dependencies).
* **"link"**: the project is linked to by the project. The package will be
added to the current package's ``rpath``.
* **"run"**: the project is used by the project at runtime. The package will
be added to ``PATH`` and ``PYTHONPATH``.
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases
``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for
use by dependencies which are not expressed via a linker (e.g., Python or Lua
module loading).
.. _setup-dependent-environment:
``setup_dependent_environment()``

View file

@ -182,10 +182,13 @@
'EditableMakefile',
'Version',
'when',
'ver']
'ver',
'alldeps',
'nolinks']
from spack.package import Package, ExtensionConflictError
from spack.package import CMakePackage, AutotoolsPackage, EditableMakefile
from spack.version import Version, ver
from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when
import llnl.util.filesystem

View file

@ -254,7 +254,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
dep_prefixes = [d.prefix
for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
@ -337,10 +338,6 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
# TODO: shouldn't really use "which" here. Consider adding notion
# TODO: of build dependencies, as opposed to link dependencies.
# TODO: Currently, everything is a link dependency, but tools like
# TODO: this shouldn't be.
m.cmake = Executable('cmake')
m.ctest = Executable('ctest')
@ -380,9 +377,10 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
deps = pkg.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
@ -441,7 +439,7 @@ def setup_package(pkg, dirty=False):
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
for dspec in pkg.spec.traverse(order='post', root=False):
for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,

View file

@ -51,7 +51,7 @@ def fetch(parser, args):
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue

View file

@ -81,12 +81,14 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
print
print "Dependencies:"
if pkg.dependencies:
colify(pkg.dependencies, indent=4)
else:
print " None"
for deptype in ('build', 'link', 'run'):
print
print "%s Dependencies:" % deptype.capitalize()
deps = pkg.dependencies(deptype)
if deps:
colify(deps, indent=4)
else:
print " None"
print
print "Virtual packages: "

View file

@ -179,7 +179,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)

View file

@ -87,7 +87,7 @@ def _find_modules(spec, modules_list):
return
if flags.recurse_dependencies:
for dep in spec.dependencies.values():
for dep in spec.dependencies():
_find_modules(dep, modules_list)
mod = module_types[mtype](spec)

View file

@ -80,11 +80,15 @@ def print_rst_package_list():
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print
for deptype in ('build', 'link', 'run'):
deps = pkg.dependencies(deptype)
if deps:
print "%s Dependencies" % deptype.capitalize()
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in build_deps)
print
print "Description:"
print pkg.format_doc(indent=2)
print

View file

@ -44,7 +44,7 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND verison.")
help="specs to use for install. Must contain package AND version.")
def setup(self, args):

View file

@ -133,7 +133,12 @@ def fetch_log(path):
def failed_dependencies(spec):
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
def get_deps(deptype):
return set(item for item in spec.dependencies(deptype)
if not spack.repo.get(item).installed)
link_deps = get_deps('link')
run_deps = get_deps('run')
return link_deps.union(run_deps)
def get_top_spec_or_die(args):

View file

@ -103,7 +103,7 @@ def cmp_externals(a, b):
usable.sort(cmp=cmp_externals)
return usable
# XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@ -394,8 +394,10 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
dagiter = chain(spec.traverse(direction='parents', root=False),
spec.traverse(direction='children', root=False))
deptype = ('build', 'link')
dagiter = chain(
spec.traverse(direction='parents', deptype=deptype, root=False),
spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@ -403,7 +405,7 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse():
for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue
if id(relative) in visited: continue
if condition(relative):

View file

@ -215,9 +215,14 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
# Add dependencies from other records in the install DB to
# form a full spec.
if 'dependencies' in spec_dict[spec.name]:
for dep_hash in spec_dict[spec.name]['dependencies'].values():
for dep in spec_dict[spec.name]['dependencies'].values():
if type(dep) == tuple:
dep_hash, deptypes = dep
else:
dep_hash = dep
deptypes = spack.alldeps
child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
spec._add_dependency(child)
spec._add_dependency(child, deptypes)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@ -334,7 +339,10 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies.values():
# XXX(deptype): This checks all dependencies, but build
# dependencies might be able to be dropped in the
# future.
for dep in rec.spec.dependencies():
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@ -406,7 +414,7 @@ def _add(self, spec, path, directory_layout=None, explicit=False):
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
for dep in spec.dependencies.values():
for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)
def _increment_ref_count(self, spec, directory_layout=None):
@ -421,7 +429,7 @@ def _increment_ref_count(self, spec, directory_layout=None):
self._data[key] = InstallRecord(spec.copy(), path, installed)
for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._increment_ref_count(dep)
self._data[key].ref_count += 1
@ -466,7 +474,7 @@ def _decrement_ref_count(self, spec):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._decrement_ref_count(dep)
def _remove(self, spec):
@ -480,7 +488,7 @@ def _remove(self, spec):
return rec.spec
del self._data[key]
for dep in rec.spec.dependencies.values():
for dep in rec.spec.dependencies('link'):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a

View file

@ -171,7 +171,7 @@ def version(pkg, ver, checksum=None, **kwargs):
pkg.versions[Version(ver)] = kwargs
def _depends_on(pkg, spec, when=None):
def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
@ -180,10 +180,29 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
if type is None:
# The default deptype is build and link because the common case is to
# build against a library which then turns into a runtime dependency
# due to the linker.
# XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
# but is most backwards-compatible.
type = ('build', 'link')
if isinstance(type, str):
type = (type,)
for deptype in type:
if deptype not in spack.spec.alldeps:
raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set())
for deptype in type:
pkg_deptypes.add(deptype)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
@ -191,13 +210,13 @@ def _depends_on(pkg, spec, when=None):
conditions[when_spec] = dep_spec
@directive('dependencies')
def depends_on(pkg, spec, when=None):
@directive(('dependencies', '_deptypes'))
def depends_on(pkg, spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply."""
_depends_on(pkg, spec, when=when)
_depends_on(pkg, spec, when=when, type=type)
@directive(('extendees', 'dependencies'))
@directive(('extendees', 'dependencies', '_deptypes'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
@ -326,3 +345,13 @@ def __init__(self, directive, package):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package
class UnknownDependencyTypeError(DirectiveError):
"""This is raised when a dependency is of an unknown type."""
def __init__(self, directive, package, deptype):
super(UnknownDependencyTypeError, self).__init__(
directive,
"Package '%s' cannot depend on a package via %s." %
(package, deptype))
self.package = package

View file

@ -285,7 +285,7 @@ def check_installed(self, spec):
return path
if spec.dag_hash() == installed_spec.dag_hash():
raise SpecHashCollisionError(installed_hash, spec_hash)
raise SpecHashCollisionError(spec, installed_spec)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
@ -431,7 +431,7 @@ class SpecHashCollisionError(DirectoryLayoutError):
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
% installed_spec, new_spec)
% (installed_spec, new_spec))
class RemoveFailedError(DirectoryLayoutError):

View file

@ -80,12 +80,14 @@ def topological_sort(spec, **kwargs):
"""
reverse = kwargs.get('reverse', False)
# XXX(deptype): iterate over a certain kind of dependency. Maybe color
# edges based on the type of dependency?
if not reverse:
parents = lambda s: s.dependents
children = lambda s: s.dependencies
parents = lambda s: s.dependents()
children = lambda s: s.dependencies()
else:
parents = lambda s: s.dependencies
children = lambda s: s.dependents
parents = lambda s: s.dependencies()
children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
spec = spec.copy()

View file

@ -120,7 +120,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
return [xx for _, xx in spec.dependencies.items()]
return spec.dependencies()
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits

View file

@ -730,7 +730,7 @@ def extendee_spec(self):
name = next(iter(self.extendees))
# If the extendee is in the spec's deps already, return that.
for dep in self.spec.traverse():
for dep in self.spec.traverse(deptypes=('link', 'run')):
if name == dep.name:
return dep
@ -795,12 +795,13 @@ def preorder_traversal(self, visited=None, **kwargs):
yield self
for name in sorted(self.dependencies.keys()):
spec = self.dependencies[name]
dep_spec = self.get_dependency(name)
spec = dep_spec.spec
# currently, we do not descend into virtual dependencies, as this
# Currently, we do not descend into virtual dependencies, as this
# makes doing a sensible traversal much harder. We just assume
# that ANY of the virtual deps will work, which might not be true
# (due to conflicts or unsatisfiable specs). For now this is ok
# (due to conflicts or unsatisfiable specs). For now this is ok,
# but we might want to reinvestigate if we start using a lot of
# complicated virtual dependencies
# TODO: reinvestigate this.
@ -838,7 +839,9 @@ def installed_dependents(self):
for spec in spack.installed_db.query():
if self.name == spec.name:
continue
for dep in spec.traverse():
# XXX(deptype): Should build dependencies not count here?
#for dep in spec.traverse(deptype=('run')):
for dep in spec.traverse(deptype=spack.alldeps):
if self.spec == dep:
dependents.append(spec)
return dependents
@ -1060,7 +1063,7 @@ def do_install(self,
run_tests -- Run tests within the package's install()
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages.")
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# No installation needed if package is external
if self.spec.external:
@ -1252,7 +1255,7 @@ def check_paths(path_list, filetype, predicate):
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
for dep in self.spec.dependencies.values():
for dep in self.spec.dependencies():
dep.package.do_install(**kwargs)
@property
@ -1426,7 +1429,7 @@ def _sanity_check_extension(self):
(self.name, self.extendee.name))
def do_activate(self, force=False):
"""Called on an etension to invoke the extendee's activate method.
"""Called on an extension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
@ -1438,7 +1441,7 @@ def do_activate(self, force=False):
# Activate any package dependencies that are also extensions.
if not force:
for spec in self.spec.traverse(root=False):
for spec in self.spec.traverse(root=False, deptype='run'):
if spec.package.extends(self.extendee_spec):
if not spec.package.activated:
spec.package.do_activate(force=force)
@ -1484,7 +1487,7 @@ def do_deactivate(self, **kwargs):
for name, aspec in activated.items():
if aspec == self.spec:
continue
for dep in aspec.traverse():
for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
raise ActivationError(
"Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
@ -1570,9 +1573,10 @@ def fetch_remote_versions(self):
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
deps = self.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
return rpaths

View file

@ -155,6 +155,9 @@
every time we call str()"""
_any_version = VersionList([':'])
alldeps = ('build', 'link', 'run')
nolink = ('build', 'run')
def index_specs(specs):
"""Take a list of specs and return a dict of lists. Dict is
@ -291,6 +294,27 @@ def __repr__(self):
return str(self)
@key_ordering
class DependencySpec(object):
"""
Dependencies have conditions in which they apply.
This stores both what is depended on and why it is a dependency.
"""
def __init__(self, spec, deptypes):
self.spec = spec
self.deptypes = deptypes
def _cmp_key(self):
return self.spec
def copy(self):
return DependencySpec(self.spec.copy(), self.deptype)
def __str__(self):
return str(self.spec)
@key_ordering
class VariantSpec(object):
@ -440,11 +464,11 @@ class DependencyMap(HashableMap):
The DependencyMap is keyed by name. """
@property
def concrete(self):
return all(d.concrete for d in self.values())
return all(d.spec.concrete for d in self.values())
def __str__(self):
return ''.join(
["^" + str(self[name]) for name in sorted(self.keys())])
["^" + str(self[name].spec) for name in sorted(self.keys())])
@key_ordering
@ -472,13 +496,13 @@ def __init__(self, spec_like, *dep_like, **kwargs):
# writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
self.dependents = other.dependents
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
self.compiler_flags = other.compiler_flags
self.compiler_flags.spec = self
self.dependencies = other.dependencies
self._dependencies = other._dependencies
self._dependents = other._dependents
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
@ -500,7 +524,49 @@ def __init__(self, spec_like, *dep_like, **kwargs):
# Spec(a, b) will copy a but just add b as a dep.
for dep in dep_like:
spec = dep if isinstance(dep, Spec) else Spec(dep)
self._add_dependency(spec)
# XXX(deptype): default deptypes
self._add_dependency(spec, ('build', 'link'))
def get_dependency(self, name):
dep = self._dependencies.get(name)
if dep is not None:
return dep
raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name))
def _deptype_norm(self, deptype):
if deptype is None:
return alldeps
# Force deptype to be a set object so that we can do set intersections.
if isinstance(deptype, str):
return (deptype,)
return deptype
def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [dep.spec
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies(self, deptype=None):
return self._find_deps(self._dependencies, deptype)
def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [(dep.spec.name, dep)
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependencies, deptype))
def dependents_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependents, deptype))
#
# Private routines here are called by the parser when building a spec.
@ -609,13 +675,13 @@ def _set_target(self, value):
if self.architecture.platform:
self.architecture.target = self.architecture.platform.target(value)
def _add_dependency(self, spec):
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
if spec.name in self.dependencies:
if spec.name in self._dependencies:
raise DuplicateDependencyError(
"Cannot depend on '%s' twice" % spec)
self.dependencies[spec.name] = spec
spec.dependents[self.name] = self
self._dependencies[spec.name] = DependencySpec(spec, deptypes)
spec._dependents[self.name] = DependencySpec(self, deptypes)
#
# Public interface
@ -632,15 +698,15 @@ def root(self):
installed). This will throw an assertion error if that is not
the case.
"""
if not self.dependents:
if not self._dependents:
return self
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
depiter = iter(self.dependents.values())
first_root = next(depiter).root
assert(all(first_root is d.root for d in depiter))
depiter = iter(self._dependents.values())
first_root = next(depiter).spec.root
assert(all(first_root is d.spec.root for d in depiter))
return first_root
@property
@ -687,10 +753,21 @@ def concrete(self):
and self.architecture.concrete
and self.compiler and self.compiler.concrete
and self.compiler_flags.concrete
and self.dependencies.concrete)
and self._dependencies.concrete)
return self._concrete
def traverse(self, visited=None, d=0, **kwargs):
def traverse(self, visited=None, deptype=None, **kwargs):
traversal = self.traverse_with_deptype(visited=visited,
deptype=deptype,
**kwargs)
if kwargs.get('depth', False):
return [(s[0], s[1].spec) for s in traversal]
else:
return [s.spec for s in traversal]
def traverse_with_deptype(self, visited=None, d=0, deptype=None,
deptype_query=None, _self_deptype=None,
**kwargs):
"""Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
@ -742,6 +819,12 @@ def traverse(self, visited=None, d=0, **kwargs):
direction = kwargs.get('direction', 'children')
order = kwargs.get('order', 'pre')
if deptype is None:
deptype = alldeps
if deptype_query is None:
deptype_query = ('link', 'run')
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
@ -759,30 +842,37 @@ def validate(name, val, allowed_values):
if key in visited and cover == 'nodes':
return
# Determine whether and what to yield for this node.
def return_val(res):
return (d, res) if depth else res
yield_me = yield_root or d > 0
result = (d, self) if depth else self
# Preorder traversal yields before successors
if yield_me and order == 'pre':
yield result
yield return_val(DependencySpec(self, _self_deptype))
deps = self.dependencies_dict(deptype)
# Edge traversal yields but skips children of visited nodes
if not (key in visited and cover == 'edges'):
# This code determines direction and yields the children/parents
successors = self.dependencies
successors = deps
if direction == 'parents':
successors = self.dependents
successors = self.dependents_dict()
visited.add(key)
for name in sorted(successors):
child = successors[name]
for elt in child.traverse(visited, d + 1, **kwargs):
children = child.spec.traverse_with_deptype(
visited, d=d + 1, deptype=deptype_query,
deptype_query=deptype_query,
_self_deptype=child.deptypes, **kwargs)
for elt in children:
yield elt
# Postorder traversal yields after successors
if yield_me and order == 'post':
yield result
yield return_val(DependencySpec(self, _self_deptype))
@property
def short_spec(self):
@ -807,6 +897,7 @@ def dag_hash(self, length=None):
if self._hash:
return self._hash[:length]
else:
# XXX(deptype): ignore 'build' dependencies here
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
@ -819,11 +910,13 @@ def to_node_dict(self):
params = dict((name, v.value) for name, v in self.variants.items())
params.update(dict((name, value)
for name, value in self.compiler_flags.items()))
deps = self.dependencies_dict(deptype=('link', 'run'))
d = {
'parameters' : params,
'arch' : self.architecture,
'dependencies' : dict((d, self.dependencies[d].dag_hash())
for d in sorted(self.dependencies))
'parameters': params,
'arch': self.architecture,
'dependencies': dict((d, (deps[d].spec.dag_hash(),
deps[d].deptypes))
for d in sorted(deps.keys()))
}
# Older concrete specs do not have a namespace. Omit for
@ -848,7 +941,7 @@ def to_node_dict(self):
def to_yaml(self, stream=None):
node_list = []
for s in self.traverse(order='pre'):
for s in self.traverse(order='pre', deptype=('link', 'run')):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
@ -889,6 +982,11 @@ def from_node_dict(node):
raise SpackRecordError(
"Did not find a valid format for variants in YAML file")
# XXX(deptypes): why are dependencies not meant to be read here?
#for name, dep_info in node['dependencies'].items():
# (dag_hash, deptypes) = dep_info
# spec._dependencies[name] = DependencySpec(dag_hash, deptypes)
return spec
@staticmethod
@ -919,8 +1017,10 @@ def from_yaml(stream):
for node in yfile['spec']:
name = next(iter(node))
for dep_name in node[name]['dependencies']:
deps[name].dependencies[dep_name] = deps[dep_name]
for dep_name, (dep, deptypes) in \
node[name]['dependencies'].items():
deps[name]._dependencies[dep_name] = \
DependencySpec(deps[dep_name], deptypes)
return spec
def _concretize_helper(self, presets=None, visited=None):
@ -940,8 +1040,9 @@ def _concretize_helper(self, presets=None, visited=None):
changed = False
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self.dependencies.keys()):
changed |= self.dependencies[name]._concretize_helper(presets, visited)
for name in sorted(self._dependencies.keys()):
changed |= self._dependencies[
name].spec._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
@ -965,13 +1066,16 @@ def _concretize_helper(self, presets=None, visited=None):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
for name, dependent in self.dependents.items():
for name, dep_spec in self._dependents.items():
dependent = dep_spec.spec
deptypes = dep_spec.deptypes
# remove self from all dependents.
del dependent.dependencies[self.name]
del dependent._dependencies[self.name]
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent.dependencies:
dependent._add_dependency(concrete)
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, deptypes)
def _replace_node(self, replacement):
"""Replace this spec with another.
@ -982,13 +1086,15 @@ def _replace_node(self, replacement):
to be normalized.
"""
for name, dependent in self.dependents.items():
del dependent.dependencies[self.name]
dependent._add_dependency(replacement)
for name, dep_spec in self._dependents.items():
dependent = dep_spec.spec
deptypes = dep_spec.deptypes
del dependent._dependencies[self.name]
dependent._add_dependency(replacement, deptypes)
for name, dep in self.dependencies.items():
del dep.dependents[self.name]
del self.dependencies[dep.name]
for name, dep_spec in self._dependencies.items():
del dep_spec.spec.dependents[self.name]
del self._dependencies[dep.name]
def _expand_virtual_packages(self):
"""Find virtual packages in this spec, replace them with providers,
@ -1008,13 +1114,14 @@ def _expand_virtual_packages(self):
a problem.
"""
# Make an index of stuff this spec already provides
# XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
while not done:
done = True
# XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@ -1054,10 +1161,10 @@ def _expand_virtual_packages(self):
# If replacement is external then trim the dependencies
if replacement.external or replacement.external_module:
if (spec.dependencies):
if (spec._dependencies):
changed = True
spec.dependencies = DependencyMap()
replacement.dependencies = DependencyMap()
spec._dependencies = DependencyMap()
replacement._dependencies = DependencyMap()
replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
@ -1068,7 +1175,7 @@ def feq(cfield, sfield):
feq(replacement.versions, spec.versions) and
feq(replacement.compiler, spec.compiler) and
feq(replacement.architecture, spec.architecture) and
feq(replacement.dependencies, spec.dependencies) and
feq(replacement._dependencies, spec._dependencies) and
feq(replacement.variants, spec.variants) and
feq(replacement.external, spec.external) and
feq(replacement.external_module, spec.external_module)):
@ -1116,7 +1223,7 @@ def concretize(self):
changed = any(changes)
force = True
for s in self.traverse():
for s in self.traverse(deptype_query=alldeps):
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
@ -1146,7 +1253,7 @@ def _mark_concrete(self):
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
for s in self.traverse():
for s in self.traverse(deptype_query=alldeps):
s._normal = True
s._concrete = True
@ -1159,6 +1266,13 @@ def concretized(self):
return clone
def flat_dependencies(self, **kwargs):
flat_deps = DependencyMap()
flat_deps_deptypes = self.flat_dependencies_with_deptype(**kwargs)
for name, depspec in flat_deps_deptypes.items():
flat_deps[name] = depspec.spec
return flat_deps
def flat_dependencies_with_deptype(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
@ -1169,23 +1283,31 @@ def flat_dependencies(self, **kwargs):
returns them.
"""
copy = kwargs.get('copy', True)
deptype_query = kwargs.get('deptype_query')
flat_deps = DependencyMap()
try:
for spec in self.traverse(root=False):
deptree = self.traverse_with_deptype(root=False,
deptype_query=deptype_query)
for depspec in deptree:
spec = depspec.spec
deptypes = depspec.deptypes
if spec.name not in flat_deps:
if copy:
flat_deps[spec.name] = spec.copy(deps=False)
dep_spec = DependencySpec(spec.copy(deps=False),
deptypes)
else:
flat_deps[spec.name] = spec
dep_spec = DependencySpec(spec, deptypes)
flat_deps[spec.name] = dep_spec
else:
flat_deps[spec.name].constrain(spec)
flat_deps[spec.name].spec.constrain(spec)
if not copy:
for dep in flat_deps.values():
dep.dependencies.clear()
dep.dependents.clear()
self.dependencies.clear()
for depspec in flat_deps.values():
depspec.spec._dependencies.clear()
depspec.spec._dependents.clear()
self._dependencies.clear()
return flat_deps
@ -1200,17 +1322,11 @@ def index(self):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
# XXX(deptype): use a deptype kwarg.
for spec in self.traverse():
dm[spec.name] = spec
return dm
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
conflict, throw an exception. """
for dep in self.flat_dependencies(copy=False):
self._add_dependency(dep)
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
@ -1267,7 +1383,8 @@ def _find_provider(self, vdep, provider_index):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
def _merge_dependency(self, dep, visited, spec_deps, provider_index):
def _merge_dependency(self, dep, deptypes, visited, spec_deps,
provider_index):
"""Merge the dependency into this spec.
This is the core of normalize(). There are some basic steps:
@ -1294,7 +1411,9 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index):
dep = provider
else:
index = ProviderIndex([dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
for vspec in (v.spec
for v in spec_deps.values()
if v.spec.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
@ -1307,25 +1426,25 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index):
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
spec_deps[dep.name] = DependencySpec(dep.copy(), deptypes)
changed = True
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
changed |= spec_deps[dep.name].spec.constrain(dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
e.required, e.provided)
e.message %= (spec_deps[dep.name].spec, dep.name,
e.constraint_type, e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
if dep.name not in self.dependencies:
self._add_dependency(dependency)
if dep.name not in self._dependencies:
self._add_dependency(dependency.spec, dependency.deptypes)
changed |= dependency._normalize_helper(
changed |= dependency.spec._normalize_helper(
visited, spec_deps, provider_index)
return changed
@ -1351,10 +1470,11 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
deptypes = pkg._deptypes[dep_name]
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
pkg_dep, visited, spec_deps, provider_index)
pkg_dep, deptypes, visited, spec_deps, provider_index)
any_change |= changed
return any_change
@ -1385,11 +1505,13 @@ def normalize(self, force=False):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
# Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies(copy=False)
spec_deps = self.flat_dependencies_with_deptype(copy=False,
deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
provider_index = ProviderIndex(spec_deps.values(), restrict=True)
provider_index = ProviderIndex([s.spec for s in spec_deps.values()],
restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
@ -1510,7 +1632,7 @@ def _constrain_dependencies(self, other):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
if not self.dependencies or not other.dependencies:
if not self._dependencies or not other._dependencies:
return False
# TODO: might want more detail than this, e.g. specific deps
@ -1526,13 +1648,17 @@ def _constrain_dependencies(self, other):
# Update with additional constraints from other spec
for name in other.dep_difference(self):
self._add_dependency(other[name].copy())
dep_spec_copy = other.get_dependency(name)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
# XXX(deptype): handle deptypes via deptype kwarg.
common = set(
s.name for s in self.traverse(root=False))
common.intersection_update(
@ -1657,13 +1783,14 @@ def satisfies_dependencies(self, other, strict=False):
other = self._autospec(other)
if strict:
if other.dependencies and not self.dependencies:
if other._dependencies and not self._dependencies:
return False
if not all(dep in self.dependencies for dep in other.dependencies):
if not all(dep in self._dependencies
for dep in other._dependencies):
return False
elif not self.dependencies or not other.dependencies:
elif not self._dependencies or not other._dependencies:
# if either spec doesn't restrict dependencies then both are
# compatible.
return True
@ -1726,8 +1853,8 @@ def _dup(self, other, **kwargs):
self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None
if kwargs.get('cleardeps', True):
self.dependents = DependencyMap()
self.dependencies = DependencyMap()
self._dependents = DependencyMap()
self._dependencies = DependencyMap()
self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
@ -1739,15 +1866,30 @@ def _dup(self, other, **kwargs):
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
# This copies the deps from other using _dup(deps=False)
new_nodes = other.flat_dependencies()
# XXX(deptype): We can keep different instances of specs here iff
# it is only a 'build' dependency (from its parent).
# All other instances must be shared (due to symbol
# and PATH contention). These should probably search
# for any existing installation which can satisfy the
# build and latch onto that because if 3 things need
# the same build dependency and it is *not*
# available, we only want to build it once.
new_nodes = other.flat_dependencies(deptype_query=alldeps)
new_nodes[self.name] = self
# Hook everything up properly here by traversing.
for spec in other.traverse(cover='nodes'):
parent = new_nodes[spec.name]
for child in spec.dependencies:
if child not in parent.dependencies:
parent._add_dependency(new_nodes[child])
stack = [other]
while stack:
cur_spec = stack.pop(0)
new_spec = new_nodes[cur_spec.name]
for depspec in cur_spec._dependencies.values():
stack.append(depspec.spec)
# XXX(deptype): add any new deptypes that may have appeared
# here.
if depspec.spec.name not in new_spec._dependencies:
new_spec._add_dependency(
new_nodes[depspec.spec.name], depspec.deptypes)
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
@ -1790,7 +1932,7 @@ def __getitem__(self, name):
raise KeyError("No spec with name %s in %s" % (name, self))
def __contains__(self, spec):
"""True if this spec satisfis the provided spec, or if any dependency
"""True if this spec satisfies the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
@ -1814,13 +1956,13 @@ def _eq_dag(self, other, vs, vo):
if self.ne_node(other):
return False
if len(self.dependencies) != len(other.dependencies):
if len(self._dependencies) != len(other._dependencies):
return False
ssorted = [self.dependencies[name]
for name in sorted(self.dependencies)]
osorted = [other.dependencies[name]
for name in sorted(other.dependencies)]
ssorted = [self._dependencies[name].spec
for name in sorted(self._dependencies)]
osorted = [other._dependencies[name].spec
for name in sorted(other._dependencies)]
for s, o in zip(ssorted, osorted):
visited_s = id(s) in vs
@ -1874,9 +2016,10 @@ def _cmp_key(self):
1. A tuple describing this node in the DAG.
2. The hash of each of this node's dependencies' cmp_keys.
"""
dep_dict = self.dependencies_dict(deptype=('link', 'run'))
return self._cmp_node() + (
tuple(hash(self.dependencies[name])
for name in sorted(self.dependencies)),)
tuple(hash(dep_dict[name])
for name in sorted(dep_dict)),)
def colorized(self):
return colorize_spec(self)
@ -2081,8 +2224,8 @@ def __cmp__(self, other):
self.architecture, other.architecture)
#Dependency is not configurable
if self.dependencies != other.dependencies:
return -1 if self.dependencies < other.dependencies else 1
if self._dependencies != other._dependencies:
return -1 if self._dependencies < other._dependencies else 1
#Equal specs
return 0
@ -2196,10 +2339,13 @@ def do_parse(self):
specs.append(self.spec(None))
self.previous = None
if self.accept(HASH):
specs[-1]._add_dependency(self.spec_by_hash())
dep = self.spec_by_hash()
else:
self.expect(ID)
specs[-1]._add_dependency(self.spec(self.token.value))
dep = self.spec(self.token.value)
# XXX(deptype): default deptypes
def_deptypes = ('build', 'link')
specs[-1]._add_dependency(dep, def_deptypes)
else:
# Attempt to construct an anonymous spec, but check that
@ -2263,8 +2409,8 @@ def spec(self, name, check_valid_token=False):
spec.external = None
spec.external_module = None
spec.compiler_flags = FlagMap(spec)
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
spec._dependents = DependencyMap()
spec._dependencies = DependencyMap()
spec.namespace = spec_namespace
spec._hash = None

View file

@ -58,16 +58,39 @@ def mock_open(filename, mode):
class MockSpec(object):
def __init__(self, name, version, hashStr=None):
self.dependencies = {}
self._dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
def _deptype_norm(self, deptype):
if deptype is None:
return spack.alldeps
# Force deptype to be a tuple so that we can do set intersections.
if isinstance(deptype, str):
return (deptype,)
return deptype
def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [dep.spec
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies(self, deptype=None):
return self._find_deps(self._dependencies, deptype)
def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype)
def traverse(self, order=None):
for _, spec in self.dependencies.items():
yield spec
for _, spec in self._dependencies.items():
yield spec.spec
yield self
#allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
#from_iterable = itertools.chain.from_iterable
#allDeps = from_iterable(i.traverse()
# for i in self.dependencies())
#return set(itertools.chain([self], allDeps))
def dag_hash(self):
@ -104,7 +127,7 @@ def mock_fetch_log(path):
specX = MockSpec('X', "1.2.0")
specY = MockSpec('Y', "2.3.8")
specX.dependencies['Y'] = specY
specX._dependencies['Y'] = spack.DependencySpec(specY, spack.alldeps)
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')

View file

@ -197,32 +197,36 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
self.assertTrue('mpi' in spec.dependencies)
self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
self.assertTrue('zmpi' in spec.dependencies)
self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec._dependencies)
self.assertTrue(all('mpi' not in d._dependencies
for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
self.assertTrue('fake' in spec.dependencies['zmpi'])
self.assertTrue('fake' in spec._dependencies['zmpi'].spec)
def test_virtual_is_fully_expanded_for_mpileaks(self):
spec = Spec('mpileaks ^mpi@10.0')
self.assertTrue('mpi' in spec.dependencies)
self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
self.assertTrue('zmpi' in spec.dependencies)
self.assertTrue('callpath' in spec.dependencies)
self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
self.assertTrue('zmpi' in spec._dependencies)
self.assertTrue('callpath' in spec._dependencies)
self.assertTrue('zmpi' in spec._dependencies['callpath'].
spec._dependencies)
self.assertTrue('fake' in spec._dependencies['callpath'].
spec._dependencies['zmpi'].
spec._dependencies)
self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
self.assertTrue(all(not 'mpi' in d._dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)

View file

@ -191,8 +191,7 @@ def initmock(self):
# restore later.
self.saved_deps = {}
def set_pkg_dep(self, pkg_name, spec):
def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps):
"""Alters dependence information for a package.
Adds a dependency on <spec> to pkg.
@ -206,7 +205,9 @@ def set_pkg_dep(self, pkg_name, spec):
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
# Change dep spec
pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
# XXX(deptype): handle deptypes.
pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
pkg._deptypes[spec.name] = set(deptypes)
def cleanmock(self):
@ -216,6 +217,7 @@ def cleanmock(self):
shutil.rmtree(self.temp_config, ignore_errors=True)
spack.config.clear_config_caches()
# XXX(deptype): handle deptypes.
# Restore dependency changes that happened during the test
for pkg_name, (pkg, deps) in self.saved_deps.items():
pkg.dependencies.clear()

View file

@ -103,6 +103,8 @@ class MockGitRepo(MockVCSRepo):
def __init__(self):
super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo')
self.url = 'file://' + self.path
with working_dir(self.path):
git('init')
@ -140,8 +142,6 @@ def __init__(self):
self.r1 = self.rev_hash(self.branch)
self.r1_file = self.branch_file
self.url = self.path
def rev_hash(self, rev):
return git('rev-parse', rev, output=str).strip()

View file

@ -148,10 +148,12 @@ def test_conflicting_spec_constraints(self):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
mpileaks._dependencies['callpath']. \
spec._dependencies['mpich'].spec = Spec('mpich@2.0')
self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
self.assertRaises(spack.spec.InconsistentSpecError,
lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self):
@ -197,15 +199,17 @@ def test_normalize_with_virtual_spec(self):
def check_links(self, spec_to_check):
for spec in spec_to_check.traverse():
for dependent in spec.dependents.values():
for dependent in spec.dependents():
self.assertTrue(
spec.name in dependent.dependencies,
"%s not in dependencies of %s" % (spec.name, dependent.name))
spec.name in dependent.dependencies_dict(),
"%s not in dependencies of %s" %
(spec.name, dependent.name))
for dependency in spec.dependencies.values():
for dependency in spec.dependencies():
self.assertTrue(
spec.name in dependency.dependents,
"%s not in dependents of %s" % (spec.name, dependency.name))
spec.name in dependency.dependents_dict(),
"%s not in dependents of %s" %
(spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self):
@ -442,3 +446,69 @@ def test_copy_concretized(self):
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
"""
Here is the graph with deptypes labeled (assume all packages have a 'dt'
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
'link', 'r' for 'run').
use -bl-> top
top -b-> build1
top -bl-> link1
top -r-> run1
build1 -b-> build2
build1 -bl-> link2
build1 -r-> run2
link1 -bl-> link3
run1 -bl-> link5
run1 -r-> run3
link3 -b-> build2
link3 -bl-> link4
run3 -b-> build3
"""
def test_deptype_traversal(self):
dag = Spec('dtuse')
dag.normalize()
names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4',
'dtrun1', 'dtlink5', 'dtrun3']
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_with_builddeps(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1',
'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3']
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_full(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
'dtrun3', 'dtbuild3']
traversal = dag.traverse(deptype_query=spack.alldeps)
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_pythonpath(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1',
'dtrun3']
traversal = dag.traverse(deptype=spack.nolink, deptype_query='run')
self.assertEqual([x.name for x in traversal], names)

View file

@ -35,8 +35,8 @@
from spack.stage import Stage
from spack.util.executable import which
test_files_dir = join_path(spack.stage_path, '.test')
test_tmp_path = join_path(test_files_dir, 'tmp')
test_files_dir = os.path.realpath(join_path(spack.stage_path, '.test'))
test_tmp_path = os.path.realpath(join_path(test_files_dir, 'tmp'))
archive_dir = 'test-files'
archive_name = archive_dir + '.tar.gz'

View file

@ -38,7 +38,7 @@ class CmakeClient(Package):
version('1.0', '4cb3ff35b2472aae70f542116d616e63')
depends_on('cmake')
depends_on('cmake', type='build')
def setup_environment(self, spack_env, run_env):

View file

@ -0,0 +1,18 @@
from spack import *
class Dtbuild1(Package):
"""Package for use as a build tool for deptypes testing which has its own
deptree"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild2', type='build')
depends_on('dtlink2')
depends_on('dtrun2', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtbuild2(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtbuild3(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtlink1(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtlink3')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink2(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,16 @@
from spack import *
class Dtlink3(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild2', type='build')
depends_on('dtlink4')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink4(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink4-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink5(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink5-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,16 @@
from spack import *
class Dtrun1(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtlink5')
depends_on('dtrun3', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtrun2(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtrun3(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild3', type='build')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,17 @@
from spack import *
class Dttop(Package):
"""Package with a complicated dependency tree"""
homepage = "http://www.example.com"
url = "http://www.example.com/dttop-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild1', type='build')
depends_on('dtlink1')
depends_on('dtrun1', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtuse(Package):
"""Simple package which uses dttop"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtuse-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dttop')
def install(self, spec, prefix):
pass

View file

@ -51,7 +51,7 @@ class Imagemagick(Package):
url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download")
depends_on('jpeg')
depends_on('libtool')
depends_on('libtool', type='build')
depends_on('libpng')
depends_on('freetype')
depends_on('fontconfig')

View file

@ -41,6 +41,7 @@ class Mitos(Package):
depends_on('dyninst@8.2.1:')
depends_on('hwloc')
depends_on('mpi')
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):

View file

@ -123,7 +123,7 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
# Set R_LIBS to include the library dir for the
# extension and any other R extensions it depends on.
r_libs_path = []
for d in extension_spec.traverse():
for d in extension_spec.traverse(deptype=nolink, deptype_query='run'):
if d.package.extends(self.spec):
r_libs_path.append(os.path.join(d.prefix, self.r_lib_dir))

View file

@ -35,6 +35,7 @@ class AdeptUtils(Package):
depends_on("boost")
depends_on("mpi")
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(*std_cmake_args)

View file

@ -33,7 +33,7 @@ class Apex(Package):
depends_on("binutils+libiberty")
depends_on("boost@1.54:")
depends_on("cmake@2.8.12:")
depends_on('cmake@2.8.12:', type='build')
depends_on("activeharmony@4.5:")
depends_on("ompt-openmp")

View file

@ -72,10 +72,10 @@ class ArpackNg(Package):
depends_on('blas')
depends_on('lapack')
depends_on('automake', when='@3.3.0')
depends_on('autoconf', when='@3.3.0')
depends_on('libtool@2.4.2:', when='@3.3.0')
depends_on('cmake@2.8.6:', when='@3.4.0:')
depends_on('automake', when='@3.3.0', type='build')
depends_on('autoconf', when='@3.3.0', type='build')
depends_on('libtool@2.4.2:', when='@3.3.0', type='build')
depends_on('cmake@2.8.6:', when='@3.4.0:', type='build')
depends_on('mpi', when='+mpi')

View file

@ -35,7 +35,7 @@ class Autoconf(Package):
version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b')
version('2.62', '6c1f3b3734999035d77da5024aab4fbd')
depends_on('m4')
depends_on('m4', type='build')
def _make_executable(self, name):
return Executable(join_path(self.prefix.bin, name))

View file

@ -44,6 +44,7 @@ class Automaded(Package):
depends_on('mpi')
depends_on('boost')
depends_on('callpath')
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake("-DSTATE_TRACKER_WITH_CALLPATH=ON", *std_cmake_args)

View file

@ -31,7 +31,7 @@ class Bear(Package):
version('2.0.4', 'fd8afb5e8e18f8737ba06f90bd77d011')
depends_on("cmake")
depends_on('cmake', type='build')
depends_on("python")
def install(self, spec, prefix):

View file

@ -37,8 +37,8 @@ class Bertini(Package):
variant('mpi', default=True, description='Compile in parallel')
depends_on('flex')
depends_on('bison')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('gmp')
depends_on('mpfr')
depends_on('mpi', when='+mpi')

View file

@ -38,9 +38,9 @@ class Binutils(Package):
version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
depends_on('m4')
depends_on('flex')
depends_on('bison')
depends_on('m4', type='build')
depends_on('flex', type='build')
depends_on('bison', type='build')
# Add a patch that creates binutils libiberty_pic.a which is preferred by
# OpenSpeedShop and cbtf-krell

View file

@ -34,7 +34,7 @@ class Bison(Package):
version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
depends_on("m4")
depends_on("m4", type='build')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)

View file

@ -35,6 +35,7 @@ class Boxlib(Package):
version('master', git='https://ccse.lbl.gov/pub/Downloads/BoxLib.git')
depends_on('mpi')
depends_on('cmake', type='build')
def install(self, spec, prefix):
args = std_cmake_args

View file

@ -24,54 +24,59 @@
##############################################################################
from spack import *
class Bzip2(Package):
"""bzip2 is a freely available, patent free high-quality data
compressor. It typically compresses files to within 10% to 15%
of the best available techniques (the PPM family of statistical
compressors), whilst being around twice as fast at compression
and six times faster at decompression.
compressor. It typically compresses files to within 10% to 15%
of the best available techniques (the PPM family of statistical
compressors), whilst being around twice as fast at compression
and six times faster at decompression."""
"""
homepage = "http://www.bzip.org"
url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz"
version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b')
def patch(self):
mf = FileFilter('Makefile-libbz2_so')
mf.filter(r'^CC=gcc', 'CC=cc')
# bzip2 comes with two separate Makefiles for static and dynamic builds
# Tell both to use Spack's compiler wrapper instead of GCC
filter_file(r'^CC=gcc', 'CC=cc', 'Makefile')
filter_file(r'^CC=gcc', 'CC=cc', 'Makefile-libbz2_so')
# Below stuff patches the link line to use RPATHs on Mac OS X.
# Patch the link line to use RPATHs on macOS
if 'darwin' in self.spec.architecture:
v = self.spec.version
v1, v2, v3 = (v.up_to(i) for i in (1,2,3))
v1, v2, v3 = (v.up_to(i) for i in (1, 2, 3))
mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(v2, v3),
'$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'.format(v1, v2, v3, v3), string=True)
kwargs = {'ignore_absent': False, 'backup': False, 'string': True}
mf.filter('$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3),
'$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'.format(v3), string=True)
mf = FileFilter('Makefile-libbz2_so')
mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(v2, v3), # NOQA ignore=E501
'$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'.format(v1, v2, v3, v3), **kwargs) # NOQA ignore=E501
mf.filter('$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3), # NOQA ignore=E501
'$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'.format(v3), **kwargs) # NOQA ignore=E501
mf.filter('rm -f libbz2.so.{0}'.format(v2),
'rm -f libbz2.{0}.dylib'.format(v2), string=True)
'rm -f libbz2.{0}.dylib'.format(v2), **kwargs)
mf.filter('ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2),
'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), string=True)
'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), **kwargs) # NOQA ignore=E501
def install(self, spec, prefix):
# Build the dynamic library first
make('-f', 'Makefile-libbz2_so')
make('clean')
make("install", "PREFIX=%s" % prefix)
# Build the static library and everything else
make()
make('install', 'PREFIX={0}'.format(prefix))
install('bzip2-shared', join_path(prefix.bin, 'bzip2'))
v1, v2, v3 = (self.spec.version.up_to(i) for i in (1,2,3))
v1, v2, v3 = (self.spec.version.up_to(i) for i in (1, 2, 3))
if 'darwin' in self.spec.architecture:
lib = 'libbz2.dylib'
lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) for v in (v1, v2, v3))
lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) for v in (v1, v2, v3)) # NOQA ignore=E501
else:
lib = 'libbz2.so'
lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) for v in (v1, v2, v3))
lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) for v in (v1, v2, v3)) # NOQA ignore=E501
install(lib3, join_path(prefix.lib, lib3))
with working_dir(prefix.lib):

View file

@ -38,7 +38,7 @@ class CBlosc(Package):
version('1.8.1', 'd73d5be01359cf271e9386c90dcf5b05')
version('1.8.0', '5b92ecb287695ba20cc33d30bf221c4f')
depends_on("cmake")
depends_on("cmake", type='build')
depends_on("snappy")
depends_on("zlib")

View file

@ -41,6 +41,7 @@ class Caliper(Package):
depends_on('libunwind')
depends_on('papi')
depends_on('mpi', when='+mpi')
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('build', create=True):

View file

@ -39,6 +39,7 @@ class Callpath(Package):
depends_on("dyninst")
depends_on("adept-utils")
depends_on("mpi")
depends_on('cmake', type='build')
def install(self, spec, prefix):
# TODO: offer options for the walker used.

View file

@ -42,7 +42,7 @@ class Cantera(Package):
variant('matlab', default=False, description='Build the Cantera Matlab toolbox')
# Required dependencies
depends_on('scons')
depends_on('scons', type='build')
# Recommended dependencies
depends_on('blas', when='+lapack')
@ -52,10 +52,10 @@ class Cantera(Package):
# Python module dependencies
extends('python', when='+python')
depends_on('py-numpy', when='+python')
depends_on('py-scipy', when='+python')
depends_on('py-cython', when='+python')
depends_on('py-3to2', when='+python')
depends_on('py-numpy', when='+python', type=nolink)
depends_on('py-scipy', when='+python', type=nolink)
depends_on('py-cython', when='+python', type=nolink)
depends_on('py-3to2', when='+python', type=nolink)
# TODO: these "when" specs don't actually work
# depends_on('py-unittest2', when='+python^python@2.6')
# depends_on('py-unittest2py3k', when='+python^python@3.1')

View file

@ -53,7 +53,7 @@ class CbtfArgonavis(Package):
version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git')
depends_on("cmake@3.0.2")
depends_on("cmake@3.0.2", type='build')
depends_on("boost@1.50.0:")
depends_on("papi")
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")

View file

@ -64,7 +64,7 @@ class CbtfKrell(Package):
variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
# Dependencies for cbtf-krell
depends_on("cmake@3.0.2")
depends_on("cmake@3.0.2", type='build')
# For binutils service
depends_on("binutils@2.24+krellpatch")

View file

@ -53,7 +53,7 @@ class CbtfLanl(Package):
version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
depends_on("cmake@3.0.2")
depends_on("cmake@3.0.2", type='build')
# Dependencies for cbtf-krell
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
depends_on("xerces-c@3.1.1:")

View file

@ -57,7 +57,7 @@ class Cbtf(Package):
variant('runtime', default=False, description="build only the runtime libraries and collectors.")
depends_on("cmake@3.0.2")
depends_on("cmake@3.0.2", type='build')
depends_on("boost@1.50.0:")
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
depends_on("xerces-c@3.1.1:")

View file

@ -40,7 +40,7 @@ class Cereal(Package):
patch("Werror.patch")
depends_on("cmake @2.6.2:")
depends_on('cmake@2.6.2:', type='build')
def install(self, spec, prefix):
# Don't use -Werror

View file

@ -46,7 +46,7 @@ class Cgal(Package):
depends_on('mpfr')
depends_on('gmp')
depends_on('zlib')
depends_on('cmake')
depends_on('cmake', type='build')
# FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
# FIXME : Optional third party libraries missing

View file

@ -40,6 +40,7 @@ class Cleverleaf(Package):
depends_on("SAMRAI@3.8.0:")
depends_on("hdf5+mpi")
depends_on("boost")
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(*std_cmake_args)

View file

@ -47,8 +47,8 @@ class Cmake(Package):
depends_on('ncurses', when='+ncurses')
depends_on('openssl', when='+openssl')
depends_on('qt', when='+qt')
depends_on('python@2.7.11:', when='+doc')
depends_on('py-sphinx', when='+doc')
depends_on('python@2.7.11:', when='+doc', type='build')
depends_on('py-sphinx', when='+doc', type='build')
def url_for_version(self, version):
"""Handle CMake's version-based custom URLs."""

View file

@ -32,6 +32,8 @@ class Cmocka(Package):
version('1.0.1', 'ed861e501a21a92b2af63e466df2015e')
parallel = False
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)

View file

@ -33,6 +33,7 @@ class Cram(Package):
extends('python')
depends_on("mpi")
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(".", *std_cmake_args)

View file

@ -32,10 +32,10 @@ class Czmq(Package):
version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz')
depends_on('libtool')
depends_on('automake')
depends_on('autoconf')
depends_on('pkg-config')
depends_on('libtool', type='build')
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('pkg-config', type='build')
depends_on('zeromq')
def install(self, spec, prefix):

View file

@ -55,6 +55,7 @@ class Dakota(Package):
depends_on('python')
depends_on('boost')
depends_on('cmake', type='build')
def url_for_version(self, version):
return Dakota._url_str.format(version=version)

View file

@ -31,6 +31,8 @@ class Damselfly(Package):
version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895', git='https://github.com/llnl/damselfly.git', tag='v1.0')
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args)

View file

@ -60,7 +60,7 @@ class Dealii(Package):
depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='~mpi') # NOQA: ignore=E501
depends_on("boost@1.59.0:+mpi+thread+system+serialization+iostreams", when='+mpi') # NOQA: ignore=E501
depends_on("bzip2")
depends_on("cmake")
depends_on("cmake", type='build')
depends_on("lapack")
depends_on("muparser")
depends_on("suite-sparse")

View file

@ -31,7 +31,7 @@ class Dia(Package):
version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c')
depends_on('intltool')
depends_on('intltool', type='build')
depends_on('gtkplus@2.6.0:')
depends_on('cairo')
#depends_on('libart') # optional dependency, not yet supported by spack.

View file

@ -24,6 +24,8 @@
##############################################################################
from spack import *
import sys
class Doxygen(Package):
"""Doxygen is the de facto standard tool for generating documentation
@ -41,12 +43,12 @@ class Doxygen(Package):
# graphviz appears to be a run-time optional dependency
variant('graphviz', default=True, description='Build with dot command support from Graphviz.') # NOQA: ignore=E501
depends_on("cmake@2.8.12:")
depends_on("flex")
depends_on("bison")
depends_on("cmake@2.8.12:", type='build')
depends_on("flex", type='build')
depends_on("bison", type='build')
# optional dependencies
depends_on("graphviz", when="+graphviz")
depends_on("graphviz", when="+graphviz", type='run')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)

View file

@ -43,6 +43,7 @@ class Dyninst(Package):
depends_on("libelf")
depends_on("libdwarf")
depends_on("boost@1.42:")
depends_on('cmake', type='build')
# new version uses cmake
def install(self, spec, prefix):

View file

@ -47,7 +47,7 @@ class Eigen(Package):
variant('mpfr', default=True, description='Enables support for multi-precisions floating points via mpfr')
# TODO : dependency on googlehash, superlu, adolc missing
depends_on('cmake')
depends_on('cmake', type='build')
depends_on('metis@5:', when='+metis')
depends_on('scotch', when='+scotch')
depends_on('fftw', when='+fftw')

View file

@ -35,7 +35,7 @@ class EnvironmentModules(Package):
version('3.2.10', '8b097fdcb90c514d7540bb55a3cb90fb')
# Dependencies:
depends_on('tcl')
depends_on('tcl', type=alldeps)
def install(self, spec, prefix):
tcl_spec = spec['tcl']

View file

@ -44,9 +44,7 @@ class Exodusii(Package):
version('2016-02-08', git='https://github.com/gsjaardema/seacas.git', commit='dcf3529')
# TODO: Make this a build dependency once build dependencies are supported
# (see: https://github.com/LLNL/spack/pull/378).
depends_on('cmake@2.8.7:')
depends_on('cmake@2.8.7:', type='build')
depends_on('hdf5~shared~mpi')
depends_on('netcdf~mpi')

View file

@ -31,7 +31,7 @@ class Expat(Package):
version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd')
depends_on('cmake')
depends_on('cmake', type='build')
def install(self, spec, prefix):

View file

@ -34,7 +34,7 @@ class Flex(Package):
version('2.6.0', '5724bcffed4ebe39e9b55a9be80859ec')
version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
depends_on("bison")
depends_on("bison", type='build')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)

View file

@ -45,8 +45,8 @@ class Flux(Package):
depends_on("py-cffi")
# TODO: This provides a catalog, hacked with environment below for now
depends_on("docbook-xml")
depends_on("asciidoc")
depends_on("docbook-xml", type='build')
depends_on("asciidoc", type='build')
def install(self, spec, prefix):
# Bootstrap with autotools

View file

@ -29,6 +29,7 @@ class FoamExtend(Package):
depends_on('python')
depends_on('flex@:2.5.99')
depends_on('zlib')
depends_on('cmake', type='build')
depends_on('scotch ~ metis', when='~ptscotch+scotch')
depends_on('scotch ~ metis + mpi', when='+ptscotch')

View file

@ -62,7 +62,7 @@ class Gdal(Package):
depends_on("libpng")
depends_on("zlib")
depends_on("proj")
depends_on("py-numpy")
depends_on("py-numpy", type=nolink)
parallel = False

View file

@ -41,7 +41,7 @@ class Gdb(Package):
version('7.9', '8f8ced422fe462a00e0135a643544f17')
version('7.8.2', '8b0ea8b3559d3d90b3ff4952f0aeafbc')
depends_on('texinfo')
depends_on('texinfo', type='build')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)

View file

@ -37,6 +37,8 @@ class Gflags(Package):
version('2.1.2', 'ac432de923f9de1e9780b5254884599f')
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake("-DCMAKE_INSTALL_PREFIX=" + prefix,
"-DBUILD_SHARED_LIBS=ON")

View file

@ -49,7 +49,7 @@ class Git(Package):
depends_on("openssl")
depends_on("autoconf")
depends_on("autoconf", type='build')
depends_on("curl")
depends_on("expat")

View file

@ -38,8 +38,8 @@ class Glib(Package):
depends_on("libffi")
depends_on("zlib")
depends_on("pkg-config")
depends_on('gettext', sys.platform == 'darwin')
depends_on("pkg-config", type='build')
depends_on('gettext', when=sys.platform == 'darwin')
# The following patch is needed for gcc-6.1
patch('g_date_strftime.patch')

View file

@ -35,8 +35,8 @@ class Glm(Package):
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
depends_on ("cmake")
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):

View file

@ -34,7 +34,7 @@ class Global(Package):
version('6.5', 'dfec818b4f53d91721e247cf7b218078')
depends_on('exuberant-ctags')
depends_on('exuberant-ctags', type=nolink)
depends_on('ncurses')
def install(self, spec, prefix):

View file

@ -36,4 +36,4 @@ class Gmp(AutotoolsPackage):
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
depends_on("m4")
depends_on("m4", type='build')

View file

@ -53,7 +53,7 @@ class Gmsh(Package):
depends_on('blas')
depends_on('lapack')
depends_on('cmake@2.8:')
depends_on('cmake@2.8:', type='build')
depends_on('gmp')
depends_on('mpi', when='+mpi')
# Assumes OpenGL with GLU is already provided by the system:

View file

@ -0,0 +1,41 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class GnuProlog(Package):
"""A free Prolog compiler with constraint solving over finite domains."""
homepage = "http://www.gprolog.org/"
url = "http://www.gprolog.org/gprolog-1.4.4.tar.gz"
version('1.4.4', '37009da471e5217ff637ad1c516448c8')
parallel = False
def install(self, spec, prefix):
with working_dir('src'):
configure('--with-install-dir=%s' % prefix,
'--without-links-dir')
make()
make('install')

View file

@ -24,7 +24,7 @@ class Go(Package):
# to-do, make non-c self-hosting compilers feasible without backflips
# should be a dep on external go compiler
depends_on('go-bootstrap')
depends_on('go-bootstrap', type='build')
depends_on('git')
def install(self, spec, prefix):

View file

@ -31,7 +31,7 @@ class Googletest(Package):
version('1.7.0', '5eaf03ed925a47b37c8e1d559eb19bc4')
depends_on("cmake")
depends_on("cmake", type='build')
def install(self, spec, prefix):
which('cmake')('.', *std_cmake_args)

View file

@ -31,6 +31,8 @@ class Graphlib(Package):
version('2.0.0', '43c6df84f1d38ba5a5dce0ae19371a70')
depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(".", *std_cmake_args)

View file

@ -44,7 +44,7 @@ class Graphviz(Package):
depends_on("swig")
depends_on("python")
depends_on("ghostscript")
depends_on("pkg-config")
depends_on("pkg-config", type='build')
def install(self, spec, prefix):
options = ['--prefix=%s' % prefix]

View file

@ -50,6 +50,7 @@ class Gromacs(Package):
depends_on('mpi', when='+mpi')
depends_on('fftw')
depends_on('cmake', type='build')
# TODO : add GPU support

View file

@ -53,7 +53,7 @@ class Hdf5Blosc(Package):
depends_on("c-blosc")
depends_on("hdf5")
depends_on("libtool")
depends_on("libtool", type='build')
parallel = False

View file

@ -45,12 +45,12 @@ class HoomdBlue(Package):
variant('doc', default=True, description='Generate documentation')
extends('python')
depends_on('py-numpy')
depends_on('py-numpy', type=nolink)
depends_on('boost+python')
depends_on('cmake')
depends_on('cmake', type='build')
depends_on('mpi', when='+mpi')
depends_on('cuda', when='+cuda')
depends_on('doxygen', when='+doc')
depends_on('doxygen', when='+doc', type='build')
def install(self, spec, prefix):

View file

@ -26,13 +26,13 @@ class Ibmisc(CMakePackage):
depends_on('netcdf-cxx4', when='+netcdf')
depends_on('udunits2', when='+udunits2')
depends_on('googletest', when='+googletest')
depends_on('py-cython', when='+python')
depends_on('py-numpy', when='+python')
depends_on('py-cython', when='+python', type=nolink)
depends_on('py-numpy', when='+python', type=nolink)
depends_on('boost', when='+boost')
# Build dependencies
depends_on('cmake')
depends_on('doxygen')
depends_on('cmake', type='build')
depends_on('doxygen', type='build')
def cmake_args(self):
spec = self.spec

View file

@ -38,7 +38,7 @@ class Ipopt(Package):
depends_on("blas")
depends_on("lapack")
depends_on("pkg-config")
depends_on("pkg-config", type='build')
depends_on("mumps+double~mpi")
def install(self, spec, prefix):

View file

@ -44,16 +44,16 @@ class Julia(Package):
patch('openblas.patch', when='@0.4:0.4.5')
# Build-time dependencies:
# depends_on("awk")
# depends_on("m4")
# depends_on("pkg-config")
# depends_on("awk", type='build')
# depends_on("m4", type='build')
# depends_on("pkg-config", type='build')
# Combined build-time and run-time dependencies:
depends_on("binutils")
depends_on("cmake @2.8:")
depends_on("git")
depends_on("openssl")
depends_on("python @2.7:2.999")
depends_on("binutils", type=nolink)
depends_on("cmake @2.8:", type=nolink)
depends_on("git", type=nolink)
depends_on("openssl", type=nolink)
depends_on("python @2.7:2.999", type=nolink)
# I think that Julia requires the dependencies above, but it
# builds fine (on my system) without these. We should enable them
@ -93,8 +93,8 @@ class Julia(Package):
# USE_SYSTEM_LIBGIT2=0
# Run-time dependencies for Julia packages:
depends_on("hdf5")
depends_on("mpi")
depends_on("hdf5", type='run')
depends_on("mpi", type='run')
def install(self, spec, prefix):
# Explicitly setting CC, CXX, or FC breaks building libuv, one

Some files were not shown because too many files have changed in this diff Show more