spack: introduce dependency types

deptypes: allow for different queries

For things like Python modules, they are required for the build and
runtime, but rather than adding a new parameter for what to query across
the dependency DAG, just expose the recursive query parameter.
This commit is contained in:
Ben Boeckel 2016-01-25 10:26:35 -05:00
parent bae97d17d0
commit 45c675fe7f
34 changed files with 697 additions and 182 deletions

View file

@ -1286,6 +1286,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
Additionally, dependencies may be specified for specific use cases:
.. code-block:: python
depends_on("cmake", type="build")
depends_on("libelf", type=("build", "link"))
depends_on("python", type="run")
The dependency types are:
* **"build"**: made available during the project's build. The package will
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
Other projects which depend on this one will not have these modified
(building project X doesn't need project Y's build dependencies).
* **"link"**: the project is linked to by the project. The package will be
added to the current package's ``rpath``.
* **"run"**: the project is used by the project at runtime. The package will
be added to ``PATH`` and ``PYTHONPATH``.
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases
``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for
use by dependencies which are not expressed via a linker (e.g., Python or Lua
module loading).
.. _setup-dependent-environment:
``setup_dependent_environment()``

View file

@ -177,10 +177,11 @@
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'StagedPackage', 'CMakePackage', \
'Version', 'when', 'ver']
'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when
import llnl.util.filesystem

View file

@ -254,7 +254,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
dep_prefixes = [d.prefix
for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
@ -337,10 +338,6 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
# TODO: shouldn't really use "which" here. Consider adding notion
# TODO: of build dependencies, as opposed to link dependencies.
# TODO: Currently, everything is a link dependency, but tools like
# TODO: this shouldn't be.
m.cmake = Executable('cmake')
m.ctest = Executable('ctest')
@ -388,9 +385,10 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
deps = pkg.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
@ -449,7 +447,7 @@ def setup_package(pkg, dirty=False):
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
for dspec in pkg.spec.traverse(order='post', root=False):
for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,

View file

@ -51,7 +51,7 @@ def fetch(parser, args):
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue

View file

@ -81,10 +81,12 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
for deptype in ('build', 'link', 'run'):
print
print "Dependencies:"
if pkg.dependencies:
colify(pkg.dependencies, indent=4)
print "%s Dependencies:" % deptype.capitalize()
deps = pkg.dependencies(deptype)
if deps:
colify(deps, indent=4)
else:
print " None"

View file

@ -179,7 +179,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)

View file

@ -87,7 +87,7 @@ def _find_modules(spec, modules_list):
return
if flags.recurse_dependencies:
for dep in spec.dependencies.values():
for dep in spec.dependencies():
_find_modules(dep, modules_list)
mod = module_types[mtype](spec)

View file

@ -80,11 +80,15 @@ def print_rst_package_list():
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
for deptype in ('build', 'link', 'run'):
deps = pkg.dependencies(deptype)
if deps:
print "%s Dependencies" % deptype.capitalize()
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
for d in build_deps)
print
print "Description:"
print pkg.format_doc(indent=2)
print

View file

@ -133,7 +133,12 @@ def fetch_log(path):
def failed_dependencies(spec):
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
def get_deps(deptype):
return set(item for item in spec.dependencies(deptype)
if not spack.repo.get(item).installed)
link_deps = get_deps('link')
run_deps = get_deps('run')
return link_deps.union(run_deps)
def get_top_spec_or_die(args):

View file

@ -103,7 +103,7 @@ def cmp_externals(a, b):
usable.sort(cmp=cmp_externals)
return usable
# XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@ -394,8 +394,10 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
dagiter = chain(spec.traverse(direction='parents', root=False),
spec.traverse(direction='children', root=False))
deptype = ('build', 'link')
dagiter = chain(
spec.traverse(direction='parents', deptype=deptype, root=False),
spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@ -403,7 +405,7 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse():
for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue
if id(relative) in visited: continue
if condition(relative):

View file

@ -215,9 +215,14 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
# Add dependencies from other records in the install DB to
# form a full spec.
if 'dependencies' in spec_dict[spec.name]:
for dep_hash in spec_dict[spec.name]['dependencies'].values():
for dep in spec_dict[spec.name]['dependencies'].values():
if type(dep) == tuple:
dep_hash, deptypes = dep
else:
dep_hash = dep
deptypes = spack.alldeps
child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
spec._add_dependency(child)
spec._add_dependency(child, deptypes)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@ -334,7 +339,10 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies.values():
# XXX(deptype): This checks all dependencies, but build
# dependencies might be able to be dropped in the
# future.
for dep in rec.spec.dependencies():
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@ -406,7 +414,7 @@ def _add(self, spec, path, directory_layout=None, explicit=False):
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
for dep in spec.dependencies.values():
for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)
def _increment_ref_count(self, spec, directory_layout=None):
@ -421,7 +429,7 @@ def _increment_ref_count(self, spec, directory_layout=None):
self._data[key] = InstallRecord(spec.copy(), path, installed)
for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._increment_ref_count(dep)
self._data[key].ref_count += 1
@ -466,7 +474,7 @@ def _decrement_ref_count(self, spec):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._decrement_ref_count(dep)
def _remove(self, spec):
@ -480,7 +488,7 @@ def _remove(self, spec):
return rec.spec
del self._data[key]
for dep in rec.spec.dependencies.values():
for dep in rec.spec.dependencies('link'):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a

View file

@ -171,7 +171,7 @@ def version(pkg, ver, checksum=None, **kwargs):
pkg.versions[Version(ver)] = kwargs
def _depends_on(pkg, spec, when=None):
def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
@ -180,10 +180,29 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
if type is None:
# The default deptype is build and link because the common case is to
# build against a library which then turns into a runtime dependency
# due to the linker.
# XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
# but is most backwards-compatible.
type = ('build', 'link')
if isinstance(type, str):
type = (type,)
for deptype in type:
if deptype not in spack.spec.alldeps:
raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set())
for deptype in type:
pkg_deptypes.add(deptype)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
@ -191,13 +210,13 @@ def _depends_on(pkg, spec, when=None):
conditions[when_spec] = dep_spec
@directive('dependencies')
def depends_on(pkg, spec, when=None):
@directive(('dependencies', '_deptypes'))
def depends_on(pkg, spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply."""
_depends_on(pkg, spec, when=when)
_depends_on(pkg, spec, when=when, type=type)
@directive(('extendees', 'dependencies'))
@directive(('extendees', 'dependencies', '_deptypes'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
@ -326,3 +345,13 @@ def __init__(self, directive, package):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package
class UnknownDependencyTypeError(DirectiveError):
"""This is raised when a dependency is of an unknown type."""
def __init__(self, directive, package, deptype):
super(UnknownDependencyTypeError, self).__init__(
directive,
"Package '%s' cannot depend on a package via %s." %
(package, deptype))
self.package = package

View file

@ -80,12 +80,14 @@ def topological_sort(spec, **kwargs):
"""
reverse = kwargs.get('reverse', False)
# XXX(deptype): iterate over a certain kind of dependency. Maybe color
# edges based on the type of dependency?
if not reverse:
parents = lambda s: s.dependents
children = lambda s: s.dependencies
parents = lambda s: s.dependents()
children = lambda s: s.dependencies()
else:
parents = lambda s: s.dependencies
children = lambda s: s.dependents
parents = lambda s: s.dependencies()
children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
spec = spec.copy()

View file

@ -120,7 +120,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
return [xx for _, xx in spec.dependencies.items()]
return spec.dependencies()
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits

View file

@ -577,7 +577,7 @@ def extendee_spec(self):
name = next(iter(self.extendees))
# If the extendee is in the spec's deps already, return that.
for dep in self.spec.traverse():
for dep in self.spec.traverse(deptypes=('link', 'run')):
if name == dep.name:
return dep
@ -642,7 +642,8 @@ def preorder_traversal(self, visited=None, **kwargs):
yield self
for name in sorted(self.dependencies.keys()):
spec = self.dependencies[name]
dep_spec = self.get_dependency(name)
spec = dep_spec.spec
# Currently, we do not descend into virtual dependencies, as this
# makes doing a sensible traversal much harder. We just assume
@ -685,7 +686,9 @@ def installed_dependents(self):
for spec in spack.installed_db.query():
if self.name == spec.name:
continue
for dep in spec.traverse():
# XXX(deptype): Should build dependencies not count here?
#for dep in spec.traverse(deptype=('run')):
for dep in spec.traverse(deptype=spack.alldeps):
if self.spec == dep:
dependents.append(spec)
return dependents
@ -1089,7 +1092,7 @@ def check_paths(path_list, filetype, predicate):
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
for dep in self.spec.dependencies.values():
for dep in self.spec.dependencies():
dep.package.do_install(**kwargs)
@property
@ -1282,7 +1285,7 @@ def do_activate(self, force=False):
# Activate any package dependencies that are also extensions.
if not force:
for spec in self.spec.traverse(root=False):
for spec in self.spec.traverse(root=False, deptype='run'):
if spec.package.extends(self.extendee_spec):
if not spec.package.activated:
spec.package.do_activate(force=force)
@ -1328,7 +1331,7 @@ def do_deactivate(self, **kwargs):
for name, aspec in activated.items():
if aspec == self.spec:
continue
for dep in aspec.traverse():
for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
raise ActivationError(
"Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
@ -1414,9 +1417,10 @@ def fetch_remote_versions(self):
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
deps = self.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
return rpaths

View file

@ -155,6 +155,9 @@
every time we call str()"""
_any_version = VersionList([':'])
alldeps = ('build', 'link', 'run')
nolink = ('build', 'run')
def index_specs(specs):
"""Take a list of specs and return a dict of lists. Dict is
@ -291,6 +294,27 @@ def __repr__(self):
return str(self)
@key_ordering
class DependencySpec(object):
"""
Dependencies have conditions in which they apply.
This stores both what is depended on and why it is a dependency.
"""
def __init__(self, spec, deptypes):
self.spec = spec
self.deptypes = deptypes
def _cmp_key(self):
return self.spec
def copy(self):
return DependencySpec(self.spec.copy(), self.deptype)
def __str__(self):
return str(self.spec)
@key_ordering
class VariantSpec(object):
@ -440,11 +464,11 @@ class DependencyMap(HashableMap):
The DependencyMap is keyed by name. """
@property
def concrete(self):
return all(d.concrete for d in self.values())
return all(d.spec.concrete for d in self.values())
def __str__(self):
return ''.join(
["^" + str(self[name]) for name in sorted(self.keys())])
["^" + str(self[name].spec) for name in sorted(self.keys())])
@key_ordering
@ -472,13 +496,13 @@ def __init__(self, spec_like, *dep_like, **kwargs):
# writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
self.dependents = other.dependents
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
self.compiler_flags = other.compiler_flags
self.compiler_flags.spec = self
self.dependencies = other.dependencies
self._dependencies = other._dependencies
self._dependents = other._dependents
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
@ -500,7 +524,49 @@ def __init__(self, spec_like, *dep_like, **kwargs):
# Spec(a, b) will copy a but just add b as a dep.
for dep in dep_like:
spec = dep if isinstance(dep, Spec) else Spec(dep)
self._add_dependency(spec)
# XXX(deptype): default deptypes
self._add_dependency(spec, ('build', 'link'))
def get_dependency(self, name):
dep = self._dependencies.get(name)
if dep is not None:
return dep
raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name))
def _deptype_norm(self, deptype):
if deptype is None:
return alldeps
# Force deptype to be a set object so that we can do set intersections.
if isinstance(deptype, str):
return (deptype,)
return deptype
def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [dep.spec
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies(self, deptype=None):
return self._find_deps(self._dependencies, deptype)
def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [(dep.spec.name, dep)
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependencies, deptype))
def dependents_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependents, deptype))
#
# Private routines here are called by the parser when building a spec.
@ -609,13 +675,13 @@ def _set_target(self, value):
if self.architecture.platform:
self.architecture.target = self.architecture.platform.target(value)
def _add_dependency(self, spec):
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
if spec.name in self.dependencies:
if spec.name in self._dependencies:
raise DuplicateDependencyError(
"Cannot depend on '%s' twice" % spec)
self.dependencies[spec.name] = spec
spec.dependents[self.name] = self
self._dependencies[spec.name] = DependencySpec(spec, deptypes)
spec._dependents[self.name] = DependencySpec(self, deptypes)
#
# Public interface
@ -632,15 +698,15 @@ def root(self):
installed). This will throw an assertion error if that is not
the case.
"""
if not self.dependents:
if not self._dependents:
return self
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
depiter = iter(self.dependents.values())
first_root = next(depiter).root
assert(all(first_root is d.root for d in depiter))
depiter = iter(self._dependents.values())
first_root = next(depiter).spec.root
assert(all(first_root is d.spec.root for d in depiter))
return first_root
@property
@ -687,10 +753,21 @@ def concrete(self):
and self.architecture.concrete
and self.compiler and self.compiler.concrete
and self.compiler_flags.concrete
and self.dependencies.concrete)
and self._dependencies.concrete)
return self._concrete
def traverse(self, visited=None, d=0, **kwargs):
def traverse(self, visited=None, deptype=None, **kwargs):
traversal = self.traverse_with_deptype(visited=visited,
deptype=deptype,
**kwargs)
if kwargs.get('depth', False):
return [(s[0], s[1].spec) for s in traversal]
else:
return [s.spec for s in traversal]
def traverse_with_deptype(self, visited=None, d=0, deptype=None,
deptype_query=None, _self_deptype=None,
**kwargs):
"""Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
@ -742,6 +819,12 @@ def traverse(self, visited=None, d=0, **kwargs):
direction = kwargs.get('direction', 'children')
order = kwargs.get('order', 'pre')
if deptype is None:
deptype = alldeps
if deptype_query is None:
deptype_query = ('link', 'run')
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
@ -759,30 +842,37 @@ def validate(name, val, allowed_values):
if key in visited and cover == 'nodes':
return
# Determine whether and what to yield for this node.
def return_val(res):
return (d, res) if depth else res
yield_me = yield_root or d > 0
result = (d, self) if depth else self
# Preorder traversal yields before successors
if yield_me and order == 'pre':
yield result
yield return_val(DependencySpec(self, _self_deptype))
deps = self.dependencies_dict(deptype)
# Edge traversal yields but skips children of visited nodes
if not (key in visited and cover == 'edges'):
# This code determines direction and yields the children/parents
successors = self.dependencies
successors = deps
if direction == 'parents':
successors = self.dependents
successors = self.dependents_dict()
visited.add(key)
for name in sorted(successors):
child = successors[name]
for elt in child.traverse(visited, d + 1, **kwargs):
children = child.spec.traverse_with_deptype(
visited, d=d + 1, deptype=deptype_query,
deptype_query=deptype_query,
_self_deptype=child.deptypes, **kwargs)
for elt in children:
yield elt
# Postorder traversal yields after successors
if yield_me and order == 'post':
yield result
yield return_val(DependencySpec(self, _self_deptype))
@property
def short_spec(self):
@ -807,6 +897,7 @@ def dag_hash(self, length=None):
if self._hash:
return self._hash[:length]
else:
# XXX(deptype): ignore 'build' dependencies here
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
@ -819,11 +910,13 @@ def to_node_dict(self):
params = dict((name, v.value) for name, v in self.variants.items())
params.update(dict((name, value)
for name, value in self.compiler_flags.items()))
deps = self.dependencies_dict(deptype=('link', 'run'))
d = {
'parameters': params,
'arch': self.architecture,
'dependencies' : dict((d, self.dependencies[d].dag_hash())
for d in sorted(self.dependencies))
'dependencies': dict((d, (deps[d].spec.dag_hash(),
deps[d].deptypes))
for d in sorted(deps.keys()))
}
# Older concrete specs do not have a namespace. Omit for
@ -848,7 +941,7 @@ def to_node_dict(self):
def to_yaml(self, stream=None):
node_list = []
for s in self.traverse(order='pre'):
for s in self.traverse(order='pre', deptype=('link', 'run')):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
@ -889,6 +982,11 @@ def from_node_dict(node):
raise SpackRecordError(
"Did not find a valid format for variants in YAML file")
# XXX(deptypes): why are dependencies not meant to be read here?
#for name, dep_info in node['dependencies'].items():
# (dag_hash, deptypes) = dep_info
# spec._dependencies[name] = DependencySpec(dag_hash, deptypes)
return spec
@staticmethod
@ -919,8 +1017,10 @@ def from_yaml(stream):
for node in yfile['spec']:
name = next(iter(node))
for dep_name in node[name]['dependencies']:
deps[name].dependencies[dep_name] = deps[dep_name]
for dep_name, (dep, deptypes) in \
node[name]['dependencies'].items():
deps[name]._dependencies[dep_name] = \
DependencySpec(deps[dep_name], deptypes)
return spec
def _concretize_helper(self, presets=None, visited=None):
@ -940,8 +1040,9 @@ def _concretize_helper(self, presets=None, visited=None):
changed = False
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self.dependencies.keys()):
changed |= self.dependencies[name]._concretize_helper(presets, visited)
for name in sorted(self._dependencies.keys()):
changed |= self._dependencies[
name].spec._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
@ -965,13 +1066,16 @@ def _concretize_helper(self, presets=None, visited=None):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
for name, dependent in self.dependents.items():
for name, dep_spec in self._dependents.items():
dependent = dep_spec.spec
deptypes = dep_spec.deptypes
# remove self from all dependents.
del dependent.dependencies[self.name]
del dependent._dependencies[self.name]
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent.dependencies:
dependent._add_dependency(concrete)
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, deptypes)
def _replace_node(self, replacement):
"""Replace this spec with another.
@ -982,13 +1086,15 @@ def _replace_node(self, replacement):
to be normalized.
"""
for name, dependent in self.dependents.items():
del dependent.dependencies[self.name]
dependent._add_dependency(replacement)
for name, dep_spec in self._dependents.items():
dependent = dep_spec.spec
deptypes = dep_spec.deptypes
del dependent._dependencies[self.name]
dependent._add_dependency(replacement, deptypes)
for name, dep in self.dependencies.items():
del dep.dependents[self.name]
del self.dependencies[dep.name]
for name, dep_spec in self._dependencies.items():
del dep_spec.spec.dependents[self.name]
del self._dependencies[dep.name]
def _expand_virtual_packages(self):
"""Find virtual packages in this spec, replace them with providers,
@ -1008,13 +1114,14 @@ def _expand_virtual_packages(self):
a problem.
"""
# Make an index of stuff this spec already provides
# XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
while not done:
done = True
# XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@ -1054,10 +1161,10 @@ def _expand_virtual_packages(self):
# If replacement is external then trim the dependencies
if replacement.external or replacement.external_module:
if (spec.dependencies):
if (spec._dependencies):
changed = True
spec.dependencies = DependencyMap()
replacement.dependencies = DependencyMap()
spec._dependencies = DependencyMap()
replacement._dependencies = DependencyMap()
replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
@ -1068,7 +1175,7 @@ def feq(cfield, sfield):
feq(replacement.versions, spec.versions) and
feq(replacement.compiler, spec.compiler) and
feq(replacement.architecture, spec.architecture) and
feq(replacement.dependencies, spec.dependencies) and
feq(replacement._dependencies, spec._dependencies) and
feq(replacement.variants, spec.variants) and
feq(replacement.external, spec.external) and
feq(replacement.external_module, spec.external_module)):
@ -1146,7 +1253,7 @@ def _mark_concrete(self):
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
for s in self.traverse():
for s in self.traverse(deptype_query=alldeps):
s._normal = True
s._concrete = True
@ -1159,6 +1266,13 @@ def concretized(self):
return clone
def flat_dependencies(self, **kwargs):
flat_deps = DependencyMap()
flat_deps_deptypes = self.flat_dependencies_with_deptype(**kwargs)
for name, depspec in flat_deps_deptypes.items():
flat_deps[name] = depspec.spec
return flat_deps
def flat_dependencies_with_deptype(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
@ -1169,23 +1283,31 @@ def flat_dependencies(self, **kwargs):
returns them.
"""
copy = kwargs.get('copy', True)
deptype_query = kwargs.get('deptype_query')
flat_deps = DependencyMap()
try:
for spec in self.traverse(root=False):
deptree = self.traverse_with_deptype(root=False,
deptype_query=deptype_query)
for depspec in deptree:
spec = depspec.spec
deptypes = depspec.deptypes
if spec.name not in flat_deps:
if copy:
flat_deps[spec.name] = spec.copy(deps=False)
dep_spec = DependencySpec(spec.copy(deps=False),
deptypes)
else:
flat_deps[spec.name] = spec
dep_spec = DependencySpec(spec, deptypes)
flat_deps[spec.name] = dep_spec
else:
flat_deps[spec.name].constrain(spec)
flat_deps[spec.name].spec.constrain(spec)
if not copy:
for dep in flat_deps.values():
dep.dependencies.clear()
dep.dependents.clear()
self.dependencies.clear()
for depspec in flat_deps.values():
depspec.spec._dependencies.clear()
depspec.spec._dependents.clear()
self._dependencies.clear()
return flat_deps
@ -1200,17 +1322,11 @@ def index(self):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
# XXX(deptype): use a deptype kwarg.
for spec in self.traverse():
dm[spec.name] = spec
return dm
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
conflict, throw an exception. """
for dep in self.flat_dependencies(copy=False):
self._add_dependency(dep)
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
@ -1267,7 +1383,8 @@ def _find_provider(self, vdep, provider_index):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
def _merge_dependency(self, dep, visited, spec_deps, provider_index):
def _merge_dependency(self, dep, deptypes, visited, spec_deps,
provider_index):
"""Merge the dependency into this spec.
This is the core of normalize(). There are some basic steps:
@ -1294,7 +1411,9 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index):
dep = provider
else:
index = ProviderIndex([dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
for vspec in (v.spec
for v in spec_deps.values()
if v.spec.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
@ -1307,25 +1426,25 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index):
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
spec_deps[dep.name] = DependencySpec(dep.copy(), deptypes)
changed = True
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
changed |= spec_deps[dep.name].spec.constrain(dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
e.required, e.provided)
e.message %= (spec_deps[dep.name].spec, dep.name,
e.constraint_type, e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
if dep.name not in self.dependencies:
self._add_dependency(dependency)
if dep.name not in self._dependencies:
self._add_dependency(dependency.spec, dependency.deptypes)
changed |= dependency._normalize_helper(
changed |= dependency.spec._normalize_helper(
visited, spec_deps, provider_index)
return changed
@ -1351,10 +1470,11 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
deptypes = pkg._deptypes[dep_name]
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
pkg_dep, visited, spec_deps, provider_index)
pkg_dep, deptypes, visited, spec_deps, provider_index)
any_change |= changed
return any_change
@ -1385,11 +1505,13 @@ def normalize(self, force=False):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
# Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies(copy=False)
spec_deps = self.flat_dependencies_with_deptype(copy=False,
deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
provider_index = ProviderIndex(spec_deps.values(), restrict=True)
provider_index = ProviderIndex([s.spec for s in spec_deps.values()],
restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
@ -1510,7 +1632,7 @@ def _constrain_dependencies(self, other):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
if not self.dependencies or not other.dependencies:
if not self._dependencies or not other._dependencies:
return False
# TODO: might want more detail than this, e.g. specific deps
@ -1526,13 +1648,17 @@ def _constrain_dependencies(self, other):
# Update with additional constraints from other spec
for name in other.dep_difference(self):
self._add_dependency(other[name].copy())
dep_spec_copy = other.get_dependency(name)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
# XXX(deptype): handle deptypes via deptype kwarg.
common = set(
s.name for s in self.traverse(root=False))
common.intersection_update(
@ -1657,13 +1783,14 @@ def satisfies_dependencies(self, other, strict=False):
other = self._autospec(other)
if strict:
if other.dependencies and not self.dependencies:
if other._dependencies and not self._dependencies:
return False
if not all(dep in self.dependencies for dep in other.dependencies):
if not all(dep in self._dependencies
for dep in other._dependencies):
return False
elif not self.dependencies or not other.dependencies:
elif not self._dependencies or not other._dependencies:
# if either spec doesn't restrict dependencies then both are
# compatible.
return True
@ -1726,8 +1853,8 @@ def _dup(self, other, **kwargs):
self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None
if kwargs.get('cleardeps', True):
self.dependents = DependencyMap()
self.dependencies = DependencyMap()
self._dependents = DependencyMap()
self._dependencies = DependencyMap()
self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
@ -1739,15 +1866,30 @@ def _dup(self, other, **kwargs):
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
# This copies the deps from other using _dup(deps=False)
new_nodes = other.flat_dependencies()
# XXX(deptype): We can keep different instances of specs here iff
# it is only a 'build' dependency (from its parent).
# All other instances must be shared (due to symbol
# and PATH contention). These should probably search
# for any existing installation which can satisfy the
# build and latch onto that because if 3 things need
# the same build dependency and it is *not*
# available, we only want to build it once.
new_nodes = other.flat_dependencies(deptype_query=alldeps)
new_nodes[self.name] = self
# Hook everything up properly here by traversing.
for spec in other.traverse(cover='nodes'):
parent = new_nodes[spec.name]
for child in spec.dependencies:
if child not in parent.dependencies:
parent._add_dependency(new_nodes[child])
stack = [other]
while stack:
cur_spec = stack.pop(0)
new_spec = new_nodes[cur_spec.name]
for depspec in cur_spec._dependencies.values():
stack.append(depspec.spec)
# XXX(deptype): add any new deptypes that may have appeared
# here.
if depspec.spec.name not in new_spec._dependencies:
new_spec._add_dependency(
new_nodes[depspec.spec.name], depspec.deptypes)
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
@ -1814,13 +1956,13 @@ def _eq_dag(self, other, vs, vo):
if self.ne_node(other):
return False
if len(self.dependencies) != len(other.dependencies):
if len(self._dependencies) != len(other._dependencies):
return False
ssorted = [self.dependencies[name]
for name in sorted(self.dependencies)]
osorted = [other.dependencies[name]
for name in sorted(other.dependencies)]
ssorted = [self._dependencies[name].spec
for name in sorted(self._dependencies)]
osorted = [other._dependencies[name].spec
for name in sorted(other._dependencies)]
for s, o in zip(ssorted, osorted):
visited_s = id(s) in vs
@ -1874,9 +2016,10 @@ def _cmp_key(self):
1. A tuple describing this node in the DAG.
2. The hash of each of this node's dependencies' cmp_keys.
"""
dep_dict = self.dependencies_dict(deptype=('link', 'run'))
return self._cmp_node() + (
tuple(hash(self.dependencies[name])
for name in sorted(self.dependencies)),)
tuple(hash(dep_dict[name])
for name in sorted(dep_dict)),)
def colorized(self):
return colorize_spec(self)
@ -2081,8 +2224,8 @@ def __cmp__(self, other):
self.architecture, other.architecture)
#Dependency is not configurable
if self.dependencies != other.dependencies:
return -1 if self.dependencies < other.dependencies else 1
if self._dependencies != other._dependencies:
return -1 if self._dependencies < other._dependencies else 1
#Equal specs
return 0
@ -2196,10 +2339,13 @@ def do_parse(self):
specs.append(self.spec(None))
self.previous = None
if self.accept(HASH):
specs[-1]._add_dependency(self.spec_by_hash())
dep = self.spec_by_hash()
else:
self.expect(ID)
specs[-1]._add_dependency(self.spec(self.token.value))
dep = self.spec(self.token.value)
# XXX(deptype): default deptypes
def_deptypes = ('build', 'link')
specs[-1]._add_dependency(dep, def_deptypes)
else:
# Attempt to construct an anonymous spec, but check that
@ -2263,8 +2409,8 @@ def spec(self, name, check_valid_token=False):
spec.external = None
spec.external_module = None
spec.compiler_flags = FlagMap(spec)
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
spec._dependents = DependencyMap()
spec._dependencies = DependencyMap()
spec.namespace = spec_namespace
spec._hash = None

View file

@ -58,16 +58,39 @@ def mock_open(filename, mode):
class MockSpec(object):
def __init__(self, name, version, hashStr=None):
self.dependencies = {}
self._dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
def _deptype_norm(self, deptype):
if deptype is None:
return spack.alldeps
# Force deptype to be a tuple so that we can do set intersections.
if isinstance(deptype, str):
return (deptype,)
return deptype
def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype)
return [dep.spec
for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)]
def dependencies(self, deptype=None):
return self._find_deps(self._dependencies, deptype)
def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype)
def traverse(self, order=None):
for _, spec in self.dependencies.items():
yield spec
for _, spec in self._dependencies.items():
yield spec.spec
yield self
#allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
#from_iterable = itertools.chain.from_iterable
#allDeps = from_iterable(i.traverse()
# for i in self.dependencies())
#return set(itertools.chain([self], allDeps))
def dag_hash(self):
@ -104,7 +127,7 @@ def mock_fetch_log(path):
specX = MockSpec('X', "1.2.0")
specY = MockSpec('Y', "2.3.8")
specX.dependencies['Y'] = specY
specX._dependencies['Y'] = spack.DependencySpec(specY, spack.alldeps)
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')

View file

@ -197,32 +197,36 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
self.assertTrue('mpi' in spec.dependencies)
self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
self.assertTrue('zmpi' in spec.dependencies)
self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec._dependencies)
self.assertTrue(all('mpi' not in d._dependencies
for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
self.assertTrue('fake' in spec.dependencies['zmpi'])
self.assertTrue('fake' in spec._dependencies['zmpi'].spec)
def test_virtual_is_fully_expanded_for_mpileaks(self):
spec = Spec('mpileaks ^mpi@10.0')
self.assertTrue('mpi' in spec.dependencies)
self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
self.assertTrue('zmpi' in spec.dependencies)
self.assertTrue('callpath' in spec.dependencies)
self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
self.assertTrue('zmpi' in spec._dependencies)
self.assertTrue('callpath' in spec._dependencies)
self.assertTrue('zmpi' in spec._dependencies['callpath'].
spec._dependencies)
self.assertTrue('fake' in spec._dependencies['callpath'].
spec._dependencies['zmpi'].
spec._dependencies)
self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
self.assertTrue(all(not 'mpi' in d._dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)

View file

@ -191,8 +191,7 @@ def initmock(self):
# restore later.
self.saved_deps = {}
def set_pkg_dep(self, pkg_name, spec):
def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps):
"""Alters dependence information for a package.
Adds a dependency on <spec> to pkg.
@ -206,7 +205,9 @@ def set_pkg_dep(self, pkg_name, spec):
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
# Change dep spec
# XXX(deptype): handle deptypes.
pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
pkg._deptypes[spec.name] = set(deptypes)
def cleanmock(self):
@ -216,6 +217,7 @@ def cleanmock(self):
shutil.rmtree(self.temp_config, ignore_errors=True)
spack.config.clear_config_caches()
# XXX(deptype): handle deptypes.
# Restore dependency changes that happened during the test
for pkg_name, (pkg, deps) in self.saved_deps.items():
pkg.dependencies.clear()

View file

@ -148,10 +148,12 @@ def test_conflicting_spec_constraints(self):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
mpileaks._dependencies['callpath']. \
spec._dependencies['mpich'].spec = Spec('mpich@2.0')
self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
self.assertRaises(spack.spec.InconsistentSpecError,
lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self):
@ -197,15 +199,17 @@ def test_normalize_with_virtual_spec(self):
def check_links(self, spec_to_check):
for spec in spec_to_check.traverse():
for dependent in spec.dependents.values():
for dependent in spec.dependents():
self.assertTrue(
spec.name in dependent.dependencies,
"%s not in dependencies of %s" % (spec.name, dependent.name))
spec.name in dependent.dependencies_dict(),
"%s not in dependencies of %s" %
(spec.name, dependent.name))
for dependency in spec.dependencies.values():
for dependency in spec.dependencies():
self.assertTrue(
spec.name in dependency.dependents,
"%s not in dependents of %s" % (spec.name, dependency.name))
spec.name in dependency.dependents_dict(),
"%s not in dependents of %s" %
(spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self):
@ -442,3 +446,69 @@ def test_copy_concretized(self):
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
"""
Here is the graph with deptypes labeled (assume all packages have a 'dt'
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
'link', 'r' for 'run').
use -bl-> top
top -b-> build1
top -bl-> link1
top -r-> run1
build1 -b-> build2
build1 -bl-> link2
build1 -r-> run2
link1 -bl-> link3
run1 -bl-> link5
run1 -r-> run3
link3 -b-> build2
link3 -bl-> link4
run3 -b-> build3
"""
def test_deptype_traversal(self):
dag = Spec('dtuse')
dag.normalize()
names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4',
'dtrun1', 'dtlink5', 'dtrun3']
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_with_builddeps(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1',
'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3']
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_full(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
'dtrun3', 'dtbuild3']
traversal = dag.traverse(deptype_query=spack.alldeps)
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_pythonpath(self):
dag = Spec('dttop')
dag.normalize()
names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1',
'dtrun3']
traversal = dag.traverse(deptype=spack.nolink, deptype_query='run')
self.assertEqual([x.name for x in traversal], names)

View file

@ -38,7 +38,7 @@ class CmakeClient(Package):
version('1.0', '4cb3ff35b2472aae70f542116d616e63')
depends_on('cmake')
depends_on('cmake', type='build')
def setup_environment(self, spack_env, run_env):

View file

@ -0,0 +1,18 @@
from spack import *
class Dtbuild1(Package):
"""Package for use as a build tool for deptypes testing which has its own
deptree"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild2', type='build')
depends_on('dtlink2')
depends_on('dtrun2', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtbuild2(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtbuild3(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtlink1(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtlink3')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink2(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,16 @@
from spack import *
class Dtlink3(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild2', type='build')
depends_on('dtlink4')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink4(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink4-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtlink5(Package):
"""Simple package which acts as a link dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtlink5-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,16 @@
from spack import *
class Dtrun1(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun1-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtlink5')
depends_on('dtrun3', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,13 @@
from spack import *
class Dtrun2(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtrun3(Package):
"""Simple package which acts as a run dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtrun3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild3', type='build')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,17 @@
from spack import *
class Dttop(Package):
"""Package with a complicated dependency tree"""
homepage = "http://www.example.com"
url = "http://www.example.com/dttop-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dtbuild1', type='build')
depends_on('dtlink1')
depends_on('dtrun1', type='run')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,15 @@
from spack import *
class Dtuse(Package):
"""Simple package which uses dttop"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtuse-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dttop')
def install(self, spec, prefix):
pass