Allow for multiple dependencies/dependents from the same package (#28673)

Change the internal representation of `Spec` to allow for multiple dependencies or
dependents stemming from the same package. This change permits to represent cases
which are frequent in cross compiled environments or to bootstrap compilers.

Modifications:
- [x] Substitute `DependencyMap` with `_EdgeMap`. The main differences are that the
      latter does not support direct item assignment and can be modified only through its
      API. It also provides a `select_by` method to query items.
- [x] Reworked a few public APIs of `Spec` to get list of dependencies or related edges.
- [x] Added unit tests to prevent regression on #11983 and prove the synthetic construction
      of specs with multiple deps from the same package. 

Since #22845 went in first, this PR reuses that format and thus it should not change hashes.
The same package may be present multiple times in the list of dependencies with different
associated specs (each with its own hash).
This commit is contained in:
Massimiliano Culpo 2022-03-10 20:53:45 +01:00 committed by GitHub
parent 3d624d204f
commit 2cd5c00923
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 768 additions and 313 deletions

View file

@ -967,3 +967,12 @@ def nullcontext(*args, **kwargs):
class UnhashableArguments(TypeError):
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
def enum(**kwargs):
"""Return an enum-like class.
Args:
**kwargs: explicit dictionary of enums
"""
return type('Enum', (object,), kwargs)

View file

@ -8,9 +8,9 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.graph
import spack.store
from spack.filesystem_view import YamlFilesystemView
from spack.graph import topological_sort
description = "deactivate a package extension"
section = "extensions"
@ -68,11 +68,8 @@ def deactivate(parser, args):
tty.msg("Deactivating %s and all dependencies." %
pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
for name in topo_order:
espec = index[name]
nodes_in_topological_order = spack.graph.topological_sort(spec)
for espec in reversed(nodes_in_topological_order):
epkg = espec.package
if epkg.extends(pkg.extendee_spec):
if epkg.is_activated(view) or args.force:

View file

@ -961,7 +961,7 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies(_tracked_deps):
for dep in rec.spec.dependencies(deptype=_tracked_deps):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@ -1078,7 +1078,7 @@ def _add(
# Retrieve optional arguments
installation_time = installation_time or _now()
for dep in spec.dependencies(_tracked_deps):
for dep in spec.dependencies(deptype=_tracked_deps):
dkey = dep.dag_hash()
if dkey not in self._data:
extra_args = {
@ -1120,9 +1120,7 @@ def _add(
)
# Connect dependencies from the DB to the new copy.
for name, dep in six.iteritems(
spec.dependencies_dict(_tracked_deps)
):
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, dep.deptypes)
@ -1185,7 +1183,7 @@ def _decrement_ref_count(self, spec):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies(_tracked_deps):
for dep in spec.dependencies(deptype=_tracked_deps):
self._decrement_ref_count(dep)
def _increment_ref_count(self, spec):
@ -1213,13 +1211,10 @@ def _remove(self, spec):
del self._data[key]
for dep in rec.spec.dependencies(_tracked_deps):
# FIXME: the two lines below needs to be updated once #11983 is
# FIXME: fixed. The "if" statement should be deleted and specs are
# FIXME: to be removed from dependents by hash and not by name.
# FIXME: See https://github.com/spack/spack/pull/15777#issuecomment-607818955
if dep._dependents.get(spec.name):
del dep._dependents[spec.name]
# Remove any reference to this node from dependencies and
# decrement the reference count
rec.spec.detach(deptype=_tracked_deps)
for dep in rec.spec.dependencies(deptype=_tracked_deps):
self._decrement_ref_count(dep)
if rec.deprecated_for:

View file

@ -58,7 +58,7 @@ def canonical_deptype(deptype):
if bad:
raise ValueError(
'Invalid dependency types: %s' % ','.join(str(t) for t in bad))
return tuple(sorted(deptype))
return tuple(sorted(set(deptype)))
raise ValueError('Invalid dependency type: %s' % repr(deptype))

View file

@ -42,54 +42,84 @@
can take a number of specs as input.
"""
import heapq
import itertools
import sys
from heapq import heapify, heappop, heappush
from llnl.util.tty.color import ColorStream
import llnl.util.tty.color
from spack.dependency import all_deptypes, canonical_deptype
import spack.dependency
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
__all__ = ['graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, reverse=False, deptype='all'):
"""Topological sort for specs.
def node_label(spec):
return spec.format('{name}{@version}{/hash:7}')
Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process.
def topological_sort(spec, deptype='all'):
"""Return a list of dependency specs in topological sorting order.
The spec argument is not modified in by the function.
This function assumes specs don't have cycles, i.e. that we are really
operating with a DAG.
Args:
spec (spack.spec.Spec): the spec to be analyzed
deptype (str or tuple): dependency types to account for when
constructing the list
"""
deptype = canonical_deptype(deptype)
deptype = spack.dependency.canonical_deptype(deptype)
# Work on a copy so this is nondestructive.
spec = spec.copy(deps=deptype)
# Work on a copy so this is nondestructive
spec = spec.copy(deps=True)
nodes = spec.index(deptype=deptype)
parents = lambda s: [p for p in s.dependents() if p.name in nodes]
children = lambda s: s.dependencies()
def dependencies(specs):
"""Return all the dependencies (including transitive) for a spec."""
return list(set(itertools.chain.from_iterable(
s.dependencies(deptype=deptype) for s in specs
)))
if reverse:
parents, children = children, parents
def dependents(specs):
"""Return all the dependents (including those of transitive dependencies)
for a spec.
"""
candidates = list(set(itertools.chain.from_iterable(
s.dependents(deptype=deptype) for s in specs
)))
return [x for x in candidates if x.name in nodes]
topo_order = []
par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
topological_order, children = [], {}
while remaining:
name = heappop(remaining)
topo_order.append(name)
# Map a spec encoded as (id, name) to a list of its transitive dependencies
for spec in itertools.chain.from_iterable(nodes.values()):
children[(id(spec), spec.name)] = [
x for x in dependencies([spec]) if x.name in nodes
]
node = nodes[name]
for dep in children(node):
par[dep.name].remove(node)
if not par[dep.name]:
heappush(remaining, dep.name)
# To return a result that is topologically ordered we need to add nodes
# only after their dependencies. The first nodes we can add are leaf nodes,
# i.e. nodes that have no dependencies.
ready = [
spec for spec in itertools.chain.from_iterable(nodes.values())
if not dependencies([spec])
]
heapq.heapify(ready)
if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
while ready:
# Pop a "ready" node and add it to the topologically ordered list
s = heapq.heappop(ready)
topological_order.append(s)
# Check if adding the last node made other nodes "ready"
for dep in dependents([s]):
children[(id(dep), dep.name)].remove(s)
if not children[(id(dep), dep.name)]:
heapq.heappush(ready, dep)
return topological_order
def find(seq, predicate):
@ -120,7 +150,7 @@ def __init__(self):
self.node_character = 'o'
self.debug = False
self.indent = 0
self.deptype = all_deptypes
self.deptype = spack.dependency.all_deptypes
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@ -131,7 +161,6 @@ def __init__(self):
self._name_to_color = None # Node name to color
self._out = None # Output stream
self._frontier = None # frontier
self._nodes = None # dict from name -> node
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
@ -290,7 +319,10 @@ def advance(to_pos, edges):
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
def _node_line(self, index, name):
def _node_label(self, node):
return node.format('{name}@@{version}{/hash:7}')
def _node_line(self, index, node):
"""Writes a line with a node at index."""
self._indent()
for c in range(index):
@ -301,7 +333,7 @@ def _node_line(self, index, name):
for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._out.write(self._node_label(node))
self._set_state(NODE, index)
self._out.write("\n")
@ -363,29 +395,29 @@ def write(self, spec, color=None, out=None):
if color is None:
color = out.isatty()
self._out = ColorStream(out, color=color)
self._out = llnl.util.tty.color.ColorStream(out, color=color)
# We'll traverse the spec in topo order as we graph it.
topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
# We'll traverse the spec in topological order as we graph it.
nodes_in_topological_order = topological_sort(spec, deptype=self.deptype)
# Work on a copy to be nondestructive
spec = spec.copy()
self._nodes = spec.index()
# Colors associated with each node in the DAG.
# Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
for i, name in enumerate(topo_order))
self._name_to_color = {
spec.full_hash(): self.colors[i % len(self.colors)]
for i, spec in enumerate(nodes_in_topological_order)
}
# Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]]
self._frontier = [[spec.full_hash()]]
while self._frontier:
# Find an unexpanded part of frontier
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
# Expand frontier until there are enough columns for all
# children.
# Expand frontier until there are enough columns for all children.
# Figure out how many back connections there are and
# sort them so we do them in order
@ -436,8 +468,8 @@ def write(self, spec, color=None, out=None):
else:
# Just allow the expansion here.
name = self._frontier[i].pop(0)
deps = [name]
dep_hash = self._frontier[i].pop(0)
deps = [dep_hash]
self._frontier.insert(i, deps)
self._expand_right_line(i)
@ -453,18 +485,17 @@ def write(self, spec, color=None, out=None):
else:
# Nothing to expand; add dependencies for a node.
name = topo_order.pop()
node = self._nodes[name]
node = nodes_in_topological_order.pop()
# Find the named node in the frontier and draw it.
i = find(self._frontier, lambda f: name in f)
self._node_line(i, name)
i = find(self._frontier, lambda f: node.full_hash() in f)
self._node_line(i, node)
# Replace node with its dependencies
self._frontier.pop(i)
deps = node.dependencies(self.deptype)
deps = node.dependencies(deptype=self.deptype)
if deps:
deps = sorted((d.name for d in deps), reverse=True)
deps = sorted((d.full_hash() for d in deps), reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
@ -478,7 +509,7 @@ def graph_ascii(spec, node='o', out=None, debug=False,
graph.indent = indent
graph.node_character = node
if deptype:
graph.deptype = canonical_deptype(deptype)
graph.deptype = spack.dependency.canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
@ -499,7 +530,7 @@ def graph_dot(specs, deptype='all', static=False, out=None):
if out is None:
out = sys.stdout
deptype = canonical_deptype(deptype)
deptype = spack.dependency.canonical_deptype(deptype)
def static_graph(spec, deptype):
pkg = spec.package
@ -517,7 +548,7 @@ def dynamic_graph(spec, deptypes):
nodes = set() # elements are (node key, node label)
edges = set() # elements are (src key, dest key)
for s in spec.traverse(deptype=deptype):
nodes.add((s.dag_hash(), s.name))
nodes.add((s.dag_hash(), node_label(s)))
for d in s.dependencies(deptype=deptype):
edge = (s.dag_hash(), d.dag_hash())
edges.add(edge)

View file

@ -227,8 +227,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents[pkg.name] = spack.spec.DependencySpec(
pkg.spec, dep, ('build',))
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, ('build',))
)
packages = [(s.package, False) for
s in dep.traverse(order='post', root=False)]
packages.append((dep.package, True))

View file

@ -1259,9 +1259,10 @@ class Body(object):
# add all clauses from dependencies
if transitive:
if spec.concrete:
for dep_name, dep in spec.dependencies_dict().items():
for dtype in dep.deptypes:
clauses.append(fn.depends_on(spec.name, dep_name, dtype))
# TODO: We need to distinguish 2 specs from the same package later
for edge in spec.edges_to_dependencies():
for dtype in edge.deptypes:
clauses.append(fn.depends_on(spec.name, edge.spec.name, dtype))
for dep in spec.traverse(root=False):
if spec.concrete:
@ -1907,12 +1908,18 @@ def external_spec_selected(self, pkg, idx):
)
def depends_on(self, pkg, dep, type):
dependency = self._specs[pkg]._dependencies.get(dep)
if not dependency:
self._specs[pkg]._add_dependency(
self._specs[dep], (type,))
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
# TODO: assertion to be removed when cross-compilation is handled correctly
msg = ("Current solver does not handle multiple dependency edges "
"of the same name")
assert len(dependencies) < 2, msg
if not dependencies:
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
else:
dependency.add_type(type)
# TODO: This assumes that each solve unifies dependencies
dependencies[0].add_type(type)
def reorder_flags(self):
"""Order compiler flags on specs in predefined order.

View file

@ -667,7 +667,7 @@ class DependencySpec(object):
def __init__(self, parent, spec, deptypes):
self.parent = parent
self.spec = spec
self.deptypes = tuple(sorted(set(deptypes)))
self.deptypes = dp.canonical_deptype(deptypes)
def update_deptypes(self, deptypes):
deptypes = set(deptypes)
@ -696,6 +696,9 @@ def __str__(self):
self.deptypes,
self.spec.name if self.spec else None)
def canonical(self):
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
_valid_compiler_flags = [
'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
@ -766,13 +769,119 @@ def __str__(self):
for key in sorted_keys) + cond_symbol
class DependencyMap(lang.HashableMap):
"""Each spec has a DependencyMap containing specs for its dependencies.
The DependencyMap is keyed by name. """
def _sort_by_dep_types(dspec):
# Use negation since False < True for sorting
return tuple(t not in dspec.deptypes for t in ("link", "run", "build", "test"))
#: Enum for edge directions
EdgeDirection = lang.enum(parent=0, child=1)
@lang.lazy_lexicographic_ordering
class _EdgeMap(Mapping):
"""Represent a collection of edges (DependencySpec objects) in the DAG.
Objects of this class are used in Specs to track edges that are
outgoing towards direct dependencies, or edges that are incoming
from direct dependents.
Edges are stored in a dictionary and keyed by package name.
"""
def __init__(self, store_by=EdgeDirection.child):
# Sanitize input arguments
msg = 'unexpected value for "store_by" argument'
assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
#: This dictionary maps a package name to a list of edges
#: i.e. to a list of DependencySpec objects
self.edges = {}
self.store_by_child = (store_by == EdgeDirection.child)
def __getitem__(self, key):
return self.edges[key]
def __iter__(self):
return iter(self.edges)
def __len__(self):
return len(self.edges)
def add(self, edge):
"""Adds a new edge to this object.
Args:
edge (DependencySpec): edge to be added
"""
key = edge.spec.name if self.store_by_child else edge.parent.name
current_list = self.edges.setdefault(key, [])
current_list.append(edge)
current_list.sort(key=_sort_by_dep_types)
def __str__(self):
return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
def _cmp_iter(self):
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
yield item
def copy(self):
"""Copies this object and returns a clone"""
clone = type(self)()
clone.store_by_child = self.store_by_child
# Copy everything from this dict into it.
for dspec in itertools.chain.from_iterable(self.values()):
clone.add(dspec.copy())
return clone
def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
"""Select a list of edges and return them.
If an edge:
- Has *any* of the dependency types passed as argument,
- Matches the parent and/or child name, if passed
then it is selected.
The deptypes argument needs to be canonical, since the method won't
convert it for performance reason.
Args:
parent (str): name of the parent package
child (str): name of the child package
deptypes (tuple): allowed dependency types in canonical form
Returns:
List of DependencySpec objects
"""
if not deptypes:
return []
# Start from all the edges we store
selected = (d for d in itertools.chain.from_iterable(self.values()))
# Filter by parent name
if parent:
selected = (d for d in selected if d.parent.name == parent)
# Filter by child name
if child:
selected = (d for d in selected if d.spec.name == child)
# Filter by allowed dependency types
if deptypes:
selected = (
dep for dep in selected
if not dep.deptypes or
any(d in deptypes for d in dep.deptypes)
)
return list(selected)
def clear(self):
self.edges.clear()
def _command_default_handler(descriptor, spec, cls):
"""Default handler when looking for the 'command' attribute.
@ -1016,7 +1125,9 @@ def __init__(self, spec, name, query_parameters):
super(SpecBuildInterface, self).__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
self.token = spec, name, query_parameters
original_spec = getattr(spec, 'wrapped_obj', spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters
is_virtual = spack.repo.path.is_virtual(name)
self.last_query = QueryState(
name=name,
@ -1027,6 +1138,9 @@ def __init__(self, spec, name, query_parameters):
def __reduce__(self):
return SpecBuildInterface, self.token
def copy(self, *args, **kwargs):
return self.wrapped_obj.copy(*args, **kwargs)
@lang.lazy_lexicographic_ordering(set_hash=False)
class Spec(object):
@ -1070,8 +1184,8 @@ def __init__(self, spec_like=None, normal=False,
self.architecture = None
self.compiler = None
self.compiler_flags = FlagMap(self)
self._dependents = DependencyMap()
self._dependencies = DependencyMap()
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
self.namespace = None
self._hash = None
@ -1143,34 +1257,112 @@ def _format_module_list(modules):
def external(self):
return bool(self.external_path) or bool(self.external_modules)
def get_dependency(self, name):
dep = self._dependencies.get(name)
if dep is not None:
return dep
raise InvalidDependencyError(self.name, name)
def clear_dependencies(self):
"""Trim the dependencies of this spec."""
self._dependencies.clear()
def _find_deps(self, where, deptype):
def clear_edges(self):
"""Trim the dependencies and dependents of this spec."""
self._dependencies.clear()
self._dependents.clear()
def detach(self, deptype='all'):
"""Remove any reference that dependencies have of this node.
Args:
deptype (str or tuple): dependency types tracked by the
current spec
"""
key = self.dag_hash()
# Go through the dependencies
for dep in self.dependencies(deptype=deptype):
# Remove the spec from dependents
if self.name in dep._dependents:
dependents_copy = dep._dependents.edges[self.name]
del dep._dependents.edges[self.name]
for edge in dependents_copy:
if edge.parent.dag_hash() == key:
continue
dep._dependents.add(edge)
def _get_dependency(self, name):
# WARNING: This function is an implementation detail of the
# WARNING: original concretizer. Since with that greedy
# WARNING: algorithm we don't allow multiple nodes from
# WARNING: the same package in a DAG, here we hard-code
# WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name"
deps = self.edges_to_dependencies(name=name)
if len(deps) != 1:
err_msg = 'expected only 1 "{0}" dependency, but got {1}'
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
def edges_from_dependents(self, name=None, deptype='all'):
"""Return a list of edges connecting this node in the DAG
to parents.
Args:
name (str): filter dependents by package name
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
return [
d for d in
self._dependents.select(parent=name, deptypes=deptype)
]
return [dep for dep in where.values()
if deptype and (not dep.deptypes or
any(d in deptype for d in dep.deptypes))]
def edges_to_dependencies(self, name=None, deptype='all'):
"""Return a list of edges connecting this node in the DAG
to children.
def dependencies(self, deptype='all'):
return [d.spec
for d in self._find_deps(self._dependencies, deptype)]
Args:
name (str): filter dependencies by package name
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
return [
d for d in
self._dependencies.select(child=name, deptypes=deptype)
]
def dependents(self, deptype='all'):
return [d.parent
for d in self._find_deps(self._dependents, deptype)]
def dependencies(self, name=None, deptype='all'):
"""Return a list of direct dependencies (nodes in the DAG).
def dependencies_dict(self, deptype='all'):
return dict((d.spec.name, d)
for d in self._find_deps(self._dependencies, deptype))
Args:
name (str): filter dependencies by package name
deptype (str or tuple): allowed dependency types
"""
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
def dependents_dict(self, deptype='all'):
return dict((d.parent.name, d)
for d in self._find_deps(self._dependents, deptype))
def dependents(self, name=None, deptype='all'):
"""Return a list of direct dependents (nodes in the DAG).
Args:
name (str): filter dependents by package name
deptype (str or tuple): allowed dependency types
"""
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
def _dependencies_dict(self, deptype='all'):
"""Return a dictionary, keyed by package name, of the direct
dependencies.
Each value in the dictionary is a list of edges.
Args:
deptype: allowed dependency types
"""
_sort_fn = lambda x: (x.spec.name,) + _sort_by_dep_types(x)
_group_fn = lambda x: x.spec.name
deptype = dp.canonical_deptype(deptype)
selected_edges = self._dependencies.select(deptypes=deptype)
result = {}
for key, group in itertools.groupby(
sorted(selected_edges, key=_sort_fn), key=_group_fn
):
result[key] = list(group)
return result
#
# Private routines here are called by the parser when building a spec.
@ -1248,18 +1440,43 @@ def _add_dependency(self, spec, deptypes):
raise DuplicateDependencyError(
"Cannot depend on '%s' twice" % spec)
# create an edge and add to parent and child
dspec = DependencySpec(self, spec, deptypes)
self._dependencies[spec.name] = dspec
spec._dependents[self.name] = dspec
self.add_dependency_edge(spec, deptypes)
def add_dependency_edge(self, dependency_spec, deptype):
"""Add a dependency edge to this spec.
Args:
dependency_spec (Spec): spec of the dependency
deptype (str or tuple): dependency types
"""
deptype = dp.canonical_deptype(deptype)
# Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
if any(d in edge.deptypes for d in deptype):
msg = ('cannot add a dependency on "{0.spec}" of {1} type '
'when the "{0.parent}" has the edge {0!s} already')
raise spack.error.SpecError(msg.format(edge, deptype))
for edge in selected:
if id(dependency_spec) == id(edge.spec):
# If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll
# both see the deptype modification.
edge.add_type(deptype)
return
edge = DependencySpec(self, dependency_spec, deptype)
self._dependencies.add(edge)
dependency_spec._dependents.add(edge)
def _add_default_platform(self):
"""If a spec has an os or a target and no platform, give it
the default platform.
This is private because it is used by the parser -- it's not
expected to be used outside of ``spec.py``.
the default platform.
This is private because it is used by the parser -- it's not
expected to be used outside of ``spec.py``.
"""
arch = self.architecture
if arch and not arch.platform and (arch.os or arch.target):
@ -1280,10 +1497,12 @@ def root(self):
Spack specs have a single root (the package being installed).
"""
# FIXME: In the case of multiple parents this property does not
# FIXME: make sense. Should we revisit the semantics?
if not self._dependents:
return self
return next(iter(self._dependents.values())).parent.root
edges_by_package = next(iter(self._dependents.values()))
return edges_by_package[0].parent.root
@property
def package(self):
@ -1448,21 +1667,24 @@ def return_val(dspec):
# This code determines direction and yields the children/parents
if direction == 'children':
where = self._dependencies
edges = self.edges_to_dependencies
key_fn = lambda dspec: dspec.spec.name
succ = lambda dspec: dspec.spec
elif direction == 'parents':
where = self._dependents
edges = self.edges_from_dependents
key_fn = lambda dspec: dspec.parent.name
succ = lambda dspec: dspec.parent
else:
raise ValueError('Invalid traversal direction: %s' % direction)
for name, dspec in sorted(where.items()):
for dspec in sorted(edges(), key=key_fn):
dt = dspec.deptypes
if dt and not any(d in deptype for d in dt):
continue
for child in succ(dspec).traverse_edges(
visited, d + 1, deptype, dspec, **kwargs):
visited, d + 1, deptype, dspec, **kwargs
):
yield child
# Postorder traversal yields after successors
@ -1684,17 +1906,17 @@ def to_node_dict(self, hash=ht.dag_hash):
d['package_hash'] = package_hash
# Note: Relies on sorting dict by keys later in algorithm.
deps = self.dependencies_dict(deptype=hash.deptype)
deps = self._dependencies_dict(deptype=hash.deptype)
if deps:
deps_list = []
for name, dspec in sorted(deps.items()):
for name, edges_for_name in sorted(deps.items()):
name_tuple = ('name', name)
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
deps_list.append(syaml.syaml_dict([name_tuple,
hash_tuple,
type_tuple]))
for dspec in edges_for_name:
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
deps_list.append(syaml.syaml_dict(
[name_tuple, hash_tuple, type_tuple]
))
d['dependencies'] = deps_list
# Name is included in case this is replacing a virtual.
@ -2257,8 +2479,14 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
changed = False
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self._dependencies.keys()):
changed |= self._dependencies[name].spec._concretize_helper(
for name in sorted(self._dependencies):
# WARNING: This function is an implementation detail of the
# WARNING: original concretizer. Since with that greedy
# WARNING: algorithm we don't allow multiple nodes from
# WARNING: the same package in a DAG, here we hard-code
# WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name"
changed |= self._dependencies[name][0].spec._concretize_helper(
concretizer, presets, visited
)
@ -2285,14 +2513,14 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
for name, dep_spec in self._dependents.items():
assert self.virtual
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
dependent = dep_spec.parent
deptypes = dep_spec.deptypes
# remove self from all dependents, unless it is already removed
if self.name in dependent._dependencies:
del dependent._dependencies[self.name]
del dependent._dependencies.edges[self.name]
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
@ -2363,12 +2591,12 @@ def _expand_virtual_packages(self, concretizer):
# If replacement is external then trim the dependencies
if replacement.external:
if (spec._dependencies):
if spec._dependencies:
for dep in spec.dependencies():
del dep._dependents[spec.name]
del dep._dependents.edges[spec.name]
changed = True
spec._dependencies = DependencyMap()
replacement._dependencies = DependencyMap()
spec.clear_dependencies()
replacement.clear_dependencies()
replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
@ -2715,9 +2943,8 @@ def flat_dependencies(self, **kwargs):
if not copy:
for spec in flat_deps.values():
if not spec.concrete:
spec._dependencies.clear()
spec._dependents.clear()
self._dependencies.clear()
spec.clear_edges()
self.clear_dependencies()
return flat_deps
@ -2732,11 +2959,12 @@ def flat_dependencies(self, **kwargs):
)
def index(self, deptype='all'):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
"""Return a dictionary that points to all the dependencies in this
spec.
"""
dm = collections.defaultdict(list)
for spec in self.traverse(deptype=deptype):
dm[spec.name] = spec
dm[spec.name].append(spec)
return dm
def _evaluate_dependency_conditions(self, name):
@ -3222,12 +3450,19 @@ def _constrain_dependencies(self, other):
for name in self.common_dependencies(other):
changed |= self[name].constrain(other[name], deps=False)
if name in self._dependencies:
changed |= self._dependencies[name].update_deptypes(
other._dependencies[name].deptypes)
# WARNING: This function is an implementation detail of the
# WARNING: original concretizer. Since with that greedy
# WARNING: algorithm we don't allow multiple nodes from
# WARNING: the same package in a DAG, here we hard-code
# WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name"
edges_from_name = self._dependencies[name]
changed |= edges_from_name[0].update_deptypes(
other._dependencies[name][0].deptypes)
# Update with additional constraints from other spec
for name in other.dep_difference(self):
dep_spec_copy = other.get_dependency(name)
dep_spec_copy = other._get_dependency(name)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
self._add_dependency(dep_copy.copy(), deptypes)
@ -3490,8 +3725,8 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
else None
self.compiler = other.compiler.copy() if other.compiler else None
if cleardeps:
self._dependents = DependencyMap()
self._dependencies = DependencyMap()
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
self.compiler_flags = other.compiler_flags.copy()
self.compiler_flags.spec = self
self.variants = other.variants.copy()
@ -3547,22 +3782,25 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
return changed
def _dup_deps(self, other, deptypes, caches):
new_specs = {self.name: self}
for dspec in other.traverse_edges(cover='edges',
root=False):
if (dspec.deptypes and
not any(d in deptypes for d in dspec.deptypes)):
def spid(spec):
return id(spec)
new_specs = {spid(other): self}
for edge in other.traverse_edges(cover='edges', root=False):
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
continue
if dspec.parent.name not in new_specs:
new_specs[dspec.parent.name] = dspec.parent.copy(
deps=False, caches=caches)
if dspec.spec.name not in new_specs:
new_specs[dspec.spec.name] = dspec.spec.copy(
deps=False, caches=caches)
if spid(edge.parent) not in new_specs:
new_specs[spid(edge.parent)] = edge.parent.copy(
deps=False, caches=caches
)
new_specs[dspec.parent.name]._add_dependency(
new_specs[dspec.spec.name], dspec.deptypes)
if spid(edge.spec) not in new_specs:
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches)
new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], edge.deptypes
)
def copy(self, deps=True, **kwargs):
"""Make a copy of this spec.
@ -3679,8 +3917,10 @@ def eq_dag(self, other, deptypes=True, vs=None, vo=None):
for name in sorted(self._dependencies)]
osorted = [other._dependencies[name]
for name in sorted(other._dependencies)]
for s_dspec, o_dspec in zip(ssorted, osorted):
for s_dspec, o_dspec in zip(
itertools.chain.from_iterable(ssorted),
itertools.chain.from_iterable(osorted)
):
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
return False
@ -3724,7 +3964,9 @@ def _cmp_iter(self):
yield item
def deps():
for _, dep in sorted(self._dependencies.items()):
for dep in sorted(
itertools.chain.from_iterable(self._dependencies.values())
):
yield dep.spec.name
yield tuple(sorted(dep.deptypes))
yield hash(dep.spec)
@ -4302,7 +4544,8 @@ def tree(self, **kwargs):
# when only covering nodes, we merge dependency types
# from all dependents before showing them.
types = [
ds.deptypes for ds in node.dependents_dict().values()]
ds.deptypes for ds in node.edges_from_dependents()
]
else:
# when covering edges or paths, we show dependency
# types only for the edge through which we visited
@ -4373,6 +4616,18 @@ def splice(self, other, transitive):
assert other.concrete
assert other.name in self
# Check, for the time being, that we don't have DAG with multiple
# specs from the same package
def multiple_specs(root):
counter = collections.Counter([node.name for node in root.traverse()])
_, max_number = counter.most_common()[0]
return max_number > 1
if multiple_specs(self) or multiple_specs(other):
msg = ('Either "{0}" or "{1}" contain multiple specs from the same '
'package, which cannot be handled by splicing at the moment')
raise ValueError(msg.format(self, other))
# Multiple unique specs with the same name will collide, so the
# _dependents of these specs should not be trusted.
# Variants may also be ignored here for now...
@ -4398,19 +4653,22 @@ def splice(self, other, transitive):
nodes.update(self_nodes)
for name in nodes:
# TODO: check if splice semantics is respected
if name in self_nodes:
dependencies = self[name]._dependencies
for dep in dependencies:
nodes[name]._add_dependency(nodes[dep],
dependencies[dep].deptypes)
if any(dep not in self_nodes for dep in dependencies):
for edge in self[name].edges_to_dependencies():
nodes[name].add_dependency_edge(
nodes[edge.spec.name], edge.deptypes
)
if any(dep not in self_nodes
for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
dependencies = other[name]._dependencies
for dep in dependencies:
nodes[name]._add_dependency(nodes[dep],
dependencies[dep].deptypes)
if any(dep not in other_nodes for dep in dependencies):
for edge in other[name].edges_to_dependencies():
nodes[name].add_dependency_edge(
nodes[edge.spec.name], edge.deptypes
)
if any(dep not in other_nodes
for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
ret = nodes[self.name]
@ -4491,7 +4749,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
# Update with additional constraints from other spec
for name in current_spec_constraint.dep_difference(merged_spec):
edge = current_spec_constraint.get_dependency(name)
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
return merged_spec

View file

@ -176,13 +176,17 @@ def test_concretize(self, spec):
def test_concretize_mention_build_dep(self):
spec = check_concretize('cmake-client ^cmake@3.4.3')
# Check parent's perspective of child
dependency = spec.dependencies_dict()['cmake']
assert set(dependency.deptypes) == set(['build'])
to_dependencies = spec.edges_to_dependencies(name='cmake')
assert len(to_dependencies) == 1
assert set(to_dependencies[0].deptypes) == set(['build'])
# Check child's perspective of parent
cmake = spec['cmake']
dependent = cmake.dependents_dict()['cmake-client']
assert set(dependent.deptypes) == set(['build'])
from_dependents = cmake.edges_from_dependents(name='cmake-client')
assert len(from_dependents) == 1
assert set(from_dependents[0].deptypes) == set(['build'])
def test_concretize_preferred_version(self):
spec = check_concretize('python')
@ -379,30 +383,37 @@ def test_no_compilers_for_arch(self):
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
assert 'mpi' in spec._dependencies
assert len(spec.dependencies(name='mpi')) == 1
assert 'fake' not in spec
spec.concretize()
assert 'zmpi' in spec._dependencies
assert all('mpi' not in d._dependencies for d in spec.traverse())
assert 'zmpi' in spec
assert 'mpi' in spec
assert 'fake' in spec._dependencies['zmpi'].spec
assert len(spec.dependencies(name='zmpi')) == 1
assert all(not d.dependencies(name='mpi') for d in spec.traverse())
assert all(x in spec for x in ('zmpi', 'mpi'))
edges_to_zmpi = spec.edges_to_dependencies(name='zmpi')
assert len(edges_to_zmpi) == 1
assert 'fake' in edges_to_zmpi[0].spec
def test_virtual_is_fully_expanded_for_mpileaks(
self
):
spec = Spec('mpileaks ^mpi@10.0')
assert 'mpi' in spec._dependencies
assert len(spec.dependencies(name='mpi')) == 1
assert 'fake' not in spec
spec.concretize()
assert 'zmpi' in spec._dependencies
assert 'callpath' in spec._dependencies
assert 'zmpi' in spec._dependencies['callpath'].spec._dependencies
assert 'fake' in spec._dependencies['callpath'].spec._dependencies[
'zmpi'].spec._dependencies # NOQA: ignore=E501
assert all('mpi' not in d._dependencies for d in spec.traverse())
assert 'zmpi' in spec
assert 'mpi' in spec
assert len(spec.dependencies(name='zmpi')) == 1
assert len(spec.dependencies(name='callpath')) == 1
callpath = spec.dependencies(name='callpath')[0]
assert len(callpath.dependencies(name='zmpi')) == 1
zmpi = callpath.dependencies(name='zmpi')[0]
assert len(zmpi.dependencies(name='fake')) == 1
assert all(not d.dependencies(name='mpi') for d in spec.traverse())
assert all(x in spec for x in ('zmpi', 'mpi'))
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec = Spec('indirect-mpich')
@ -604,7 +615,7 @@ def test_regression_issue_4492(self):
assert s.concrete
# Remove the dependencies and reset caches
s._dependencies.clear()
s.clear_dependencies()
s._concrete = False
assert not s.concrete

View file

@ -754,10 +754,11 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
@pytest.fixture(scope='function')
def database(mock_store, mock_packages, config, monkeypatch):
def database(mock_store, mock_packages, config):
"""This activates the mock store, packages, AND config."""
with spack.store.use_store(str(mock_store)) as store:
yield store.db
# Force reading the database again between tests
store.db.last_seen_verifier = ''

View file

@ -973,3 +973,36 @@ def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
mutable_database.remove(s)
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
@pytest.mark.parametrize('spec_str,parent_name,expected_nparents', [
('dyninst', 'callpath', 3),
('libelf', 'dyninst', 1),
('libelf', 'libdwarf', 1)
])
@pytest.mark.regression('11983')
def test_check_parents(spec_str, parent_name, expected_nparents, database):
"""Check that a spec returns the correct number of parents."""
s = database.query_one(spec_str)
parents = s.dependents(name=parent_name)
assert len(parents) == expected_nparents
edges = s.edges_from_dependents(name=parent_name)
assert len(edges) == expected_nparents
def test_consistency_of_dependents_upon_remove(mutable_database):
# Check the initial state
s = mutable_database.query_one('dyninst')
parents = s.dependents(name='callpath')
assert len(parents) == 3
# Remove a dependent (and all its dependents)
mutable_database.remove('mpileaks ^callpath ^mpich2')
mutable_database.remove('callpath ^mpich2')
# Check the final state
s = mutable_database.query_one('dyninst')
parents = s.dependents(name='callpath')
assert len(parents) == 2

View file

@ -2,43 +2,31 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from six import StringIO
import pytest
import six
import spack.graph
import spack.repo
from spack.graph import AsciiGraph, graph_dot, topological_sort
from spack.spec import Spec
import spack.spec
def test_topo_sort(mock_packages):
"""Test topo sort gives correct order."""
s = Spec('mpileaks').normalized()
topo = topological_sort(s)
assert topo.index('mpileaks') < topo.index('callpath')
assert topo.index('mpileaks') < topo.index('mpi')
assert topo.index('mpileaks') < topo.index('dyninst')
assert topo.index('mpileaks') < topo.index('libdwarf')
assert topo.index('mpileaks') < topo.index('libelf')
assert topo.index('callpath') < topo.index('mpi')
assert topo.index('callpath') < topo.index('dyninst')
assert topo.index('callpath') < topo.index('libdwarf')
assert topo.index('callpath') < topo.index('libelf')
assert topo.index('dyninst') < topo.index('libdwarf')
assert topo.index('dyninst') < topo.index('libelf')
assert topo.index('libdwarf') < topo.index('libelf')
@pytest.mark.parametrize('spec_str', ['mpileaks', 'callpath'])
def test_topo_sort(spec_str, config, mock_packages):
"""Ensure nodes are ordered topologically"""
s = spack.spec.Spec(spec_str).concretized()
nodes = spack.graph.topological_sort(s)
for idx, current in enumerate(nodes):
assert all(following not in current for following in nodes[idx + 1:])
def test_static_graph_mpileaks(mock_packages):
def test_static_graph_mpileaks(config, mock_packages):
"""Test a static spack graph for a simple package."""
s = Spec('mpileaks').normalized()
s = spack.spec.Spec('mpileaks').normalized()
stream = StringIO()
graph_dot([s], static=True, out=stream)
stream = six.StringIO()
spack.graph.graph_dot([s], static=True, out=stream)
dot = stream.getvalue()
@ -62,72 +50,67 @@ def test_static_graph_mpileaks(mock_packages):
def test_dynamic_dot_graph_mpileaks(mock_packages, config):
"""Test dynamically graphing the mpileaks package."""
s = Spec('mpileaks').concretized()
stream = StringIO()
graph_dot([s], static=False, out=stream)
s = spack.spec.Spec('mpileaks').concretized()
stream = six.StringIO()
spack.graph.graph_dot([s], static=False, out=stream)
dot = stream.getvalue()
print(dot)
mpileaks_hash, mpileaks_lbl = s.dag_hash(), s.format('{name}')
mpi_hash, mpi_lbl = s['mpi'].dag_hash(), s['mpi'].format('{name}')
callpath_hash, callpath_lbl = (
s['callpath'].dag_hash(), s['callpath'].format('{name}'))
dyninst_hash, dyninst_lbl = (
s['dyninst'].dag_hash(), s['dyninst'].format('{name}'))
libdwarf_hash, libdwarf_lbl = (
s['libdwarf'].dag_hash(), s['libdwarf'].format('{name}'))
libelf_hash, libelf_lbl = (
s['libelf'].dag_hash(), s['libelf'].format('{name}'))
nodes_to_check = ['mpileaks', 'mpi', 'callpath', 'dyninst', 'libdwarf', 'libelf']
hashes = {}
for name in nodes_to_check:
current = s[name]
current_hash = current.dag_hash()
hashes[name] = current_hash
assert ' "{0}" [label="{1}"]\n'.format(
current_hash, spack.graph.node_label(current)
) in dot
assert ' "%s" [label="%s"]\n' % (mpileaks_hash, mpileaks_lbl) in dot
assert ' "%s" [label="%s"]\n' % (callpath_hash, callpath_lbl) in dot
assert ' "%s" [label="%s"]\n' % (mpi_hash, mpi_lbl) in dot
assert ' "%s" [label="%s"]\n' % (dyninst_hash, dyninst_lbl) in dot
assert ' "%s" [label="%s"]\n' % (libdwarf_hash, libdwarf_lbl) in dot
assert ' "%s" [label="%s"]\n' % (libelf_hash, libelf_lbl) in dot
assert ' "%s" -> "%s"\n' % (dyninst_hash, libdwarf_hash) in dot
assert ' "%s" -> "%s"\n' % (callpath_hash, dyninst_hash) in dot
assert ' "%s" -> "%s"\n' % (mpileaks_hash, mpi_hash) in dot
assert ' "%s" -> "%s"\n' % (libdwarf_hash, libelf_hash) in dot
assert ' "%s" -> "%s"\n' % (callpath_hash, mpi_hash) in dot
assert ' "%s" -> "%s"\n' % (mpileaks_hash, callpath_hash) in dot
assert ' "%s" -> "%s"\n' % (dyninst_hash, libelf_hash) in dot
dependencies_to_check = [
('dyninst', 'libdwarf'),
('callpath', 'dyninst'),
('mpileaks', 'mpi'),
('libdwarf', 'libelf'),
('callpath', 'mpi'),
('mpileaks', 'callpath'),
('dyninst', 'libelf')
]
for parent, child in dependencies_to_check:
assert ' "{0}" -> "{1}"\n'.format(hashes[parent], hashes[child]) in dot
def test_ascii_graph_mpileaks(mock_packages):
"""Test dynamically graphing the mpileaks package."""
s = Spec('mpileaks').normalized()
@pytest.mark.skipif(
sys.version_info < (3, 6), reason="Ordering might not be consistent"
)
def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
monkeypatch.setattr(
spack.graph.AsciiGraph, '_node_label',
lambda self, node: node.name
)
s = spack.spec.Spec('mpileaks').concretized()
stream = StringIO()
graph = AsciiGraph()
stream = six.StringIO()
graph = spack.graph.AsciiGraph()
graph.write(s, out=stream, color=False)
string = stream.getvalue()
graph_str = stream.getvalue()
graph_str = '\n'.join([line.rstrip() for line in graph_str.split('\n')])
# Some lines in spack graph still have trailing space
# TODO: fix this.
string = '\n'.join([line.rstrip() for line in string.split('\n')])
assert string == r'''o mpileaks
assert graph_str == r'''o mpileaks
|\
| o callpath
| o callpath
|/|
o | mpi
o | mpich
/
o dyninst
o dyninst
|\
| o libdwarf
o | libdwarf
|/
o libelf
o libelf
'''
def test_topo_sort_filtered(mock_packages):
"""Test topo sort gives correct order when filtering link deps."""
s = Spec('both-link-and-build-dep-a').normalized()
def test_topological_sort_filtering_dependency_types(config, mock_packages):
s = spack.spec.Spec('both-link-and-build-dep-a').concretized()
topo = topological_sort(s, deptype=('link',))
assert topo == ['both-link-and-build-dep-a', 'both-link-and-build-dep-c']
nodes = spack.graph.topological_sort(s, deptype=('link',))
names = [s.name for s in nodes]
assert names == ['both-link-and-build-dep-c', 'both-link-and-build-dep-a']

View file

@ -18,10 +18,10 @@
def check_links(spec_to_check):
for spec in spec_to_check.traverse():
for dependent in spec.dependents():
assert spec.name in dependent.dependencies_dict()
assert dependent.edges_to_dependencies(name=spec.name)
for dependency in spec.dependencies():
assert spec.name in dependency.dependents_dict()
assert dependency.edges_from_dependents(name=spec.name)
@pytest.fixture()
@ -297,9 +297,16 @@ def test_conflicting_spec_constraints(self):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
mpileaks._dependencies['callpath']. \
spec._dependencies['mpich'].spec = Spec('mpich@2.0')
mpileaks.edges_to_dependencies(
name='mpich'
)[0].spec = Spec('mpich@1.0')
mpileaks.edges_to_dependencies(
name='callpath'
)[0].spec.edges_to_dependencies(
name='mpich'
)[0].spec = Spec('mpich@2.0')
with pytest.raises(spack.spec.InconsistentSpecError):
mpileaks.flat_dependencies(copy=False)
@ -617,6 +624,23 @@ def test_copy_concretized(self):
copy_ids = set(id(s) for s in copy.traverse())
assert not orig_ids.intersection(copy_ids)
def test_copy_through_spec_build_interface(self):
"""Check that copying dependencies using id(node) as a fast identifier of the
node works when the spec is wrapped in a SpecBuildInterface object.
"""
s = Spec('mpileaks').concretized()
c0 = s.copy()
assert c0 == s
# Single indirection
c1 = s['mpileaks'].copy()
assert c0 == c1 == s
# Double indirection
c2 = s['mpileaks']['mpileaks'].copy()
assert c0 == c1 == c2 == s
"""
Here is the graph with deptypes labeled (assume all packages have a 'dt'
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
@ -790,21 +814,25 @@ def test_construct_spec_with_deptypes(self):
}
})
assert s['b']._dependencies['c'].deptypes == ('build',)
assert s['d']._dependencies['e'].deptypes == ('build', 'link')
assert s['e']._dependencies['f'].deptypes == ('run',)
assert s['b'].edges_to_dependencies(
name='c'
)[0].deptypes == ('build',)
assert s['d'].edges_to_dependencies(
name='e'
)[0].deptypes == ('build', 'link')
assert s['e'].edges_to_dependencies(
name='f'
)[0].deptypes == ('run',)
assert s['b']._dependencies['c'].deptypes == ('build',)
assert s['d']._dependencies['e'].deptypes == ('build', 'link')
assert s['e']._dependencies['f'].deptypes == ('run',)
assert s['c']._dependents['b'].deptypes == ('build',)
assert s['e']._dependents['d'].deptypes == ('build', 'link')
assert s['f']._dependents['e'].deptypes == ('run',)
assert s['c']._dependents['b'].deptypes == ('build',)
assert s['e']._dependents['d'].deptypes == ('build', 'link')
assert s['f']._dependents['e'].deptypes == ('run',)
assert s['c'].edges_from_dependents(
name='b'
)[0].deptypes == ('build',)
assert s['e'].edges_from_dependents(
name='d'
)[0].deptypes == ('build', 'link')
assert s['f'].edges_from_dependents(
name='e'
)[0].deptypes == ('run',)
def check_diamond_deptypes(self, spec):
"""Validate deptypes in dt-diamond spec.
@ -813,17 +841,21 @@ def check_diamond_deptypes(self, spec):
depend on the same dependency in different ways.
"""
assert spec['dt-diamond']._dependencies[
'dt-diamond-left'].deptypes == ('build', 'link')
assert spec['dt-diamond'].edges_to_dependencies(
name='dt-diamond-left'
)[0].deptypes == ('build', 'link')
assert spec['dt-diamond']._dependencies[
'dt-diamond-right'].deptypes == ('build', 'link')
assert spec['dt-diamond'].edges_to_dependencies(
name='dt-diamond-right'
)[0].deptypes == ('build', 'link')
assert spec['dt-diamond-left']._dependencies[
'dt-diamond-bottom'].deptypes == ('build',)
assert spec['dt-diamond-left'].edges_to_dependencies(
name='dt-diamond-bottom'
)[0].deptypes == ('build',)
assert spec['dt-diamond-right'] ._dependencies[
'dt-diamond-bottom'].deptypes == ('build', 'link', 'run')
assert spec['dt-diamond-right'].edges_to_dependencies(
name='dt-diamond-bottom'
)[0].deptypes == ('build', 'link', 'run')
def check_diamond_normalized_dag(self, spec):
@ -989,3 +1021,99 @@ def test_spec_tree_respect_deptypes(self):
assert 'version-test-pkg' in out
out = s.tree(deptypes=('link', 'run'))
assert 'version-test-pkg' not in out
def test_synthetic_construction_of_split_dependencies_from_same_package(
mock_packages, config
):
# Construct in a synthetic way (i.e. without using the solver)
# the following spec:
#
# b
# build / \ link,run
# c@2.0 c@1.0
#
# To demonstrate that a spec can now hold two direct
# dependencies from the same package
root = Spec('b').concretized()
link_run_spec = Spec('c@1.0').concretized()
build_spec = Spec('c@2.0').concretized()
root.add_dependency_edge(link_run_spec, deptype='link')
root.add_dependency_edge(link_run_spec, deptype='run')
root.add_dependency_edge(build_spec, deptype='build')
# Check dependencies from the perspective of root
assert len(root.dependencies()) == 2
assert all(x.name == 'c' for x in root.dependencies())
assert '@2.0' in root.dependencies(name='c', deptype='build')[0]
assert '@1.0' in root.dependencies(name='c', deptype=('link', 'run'))[0]
# Check parent from the perspective of the dependencies
assert len(build_spec.dependents()) == 1
assert len(link_run_spec.dependents()) == 1
assert build_spec.dependents() == link_run_spec.dependents()
assert build_spec != link_run_spec
def test_synthetic_construction_bootstrapping(mock_packages, config):
# Construct the following spec:
#
# b@2.0
# | build
# b@1.0
#
root = Spec('b@2.0').concretized()
bootstrap = Spec('b@1.0').concretized()
root.add_dependency_edge(bootstrap, deptype='build')
assert len(root.dependencies()) == 1
assert root.dependencies()[0].name == 'b'
assert root.name == 'b'
def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config):
# Construct the following spec:
#
# b@2.0
# | build,link,run
# b@1.0
#
# with three calls and check we always have a single edge
root = Spec('b@2.0').concretized()
bootstrap = Spec('b@1.0').concretized()
for current_deptype in ('build', 'link', 'run'):
root.add_dependency_edge(bootstrap, deptype=current_deptype)
# Check edges in dependencies
assert len(root.edges_to_dependencies()) == 1
forward_edge = root.edges_to_dependencies(deptype=current_deptype)[0]
assert current_deptype in forward_edge.deptypes
assert id(forward_edge.parent) == id(root)
assert id(forward_edge.spec) == id(bootstrap)
# Check edges from dependents
assert len(bootstrap.edges_from_dependents()) == 1
backward_edge = bootstrap.edges_from_dependents(deptype=current_deptype)[0]
assert current_deptype in backward_edge.deptypes
assert id(backward_edge.parent) == id(root)
assert id(backward_edge.spec) == id(bootstrap)
@pytest.mark.parametrize('c1_deptypes,c2_deptypes', [
('link', ('build', 'link')),
(('link', 'run'), ('build', 'link'))
])
def test_adding_same_deptype_with_the_same_name_raises(
mock_packages, config, c1_deptypes, c2_deptypes
):
p = Spec('b@2.0').concretized()
c1 = Spec('b@1.0').concretized()
c2 = Spec('b@2.0').concretized()
p.add_dependency_edge(c1, deptype=c1_deptypes)
with pytest.raises(spack.error.SpackError):
p.add_dependency_edge(c2, deptype=c2_deptypes)

View file

@ -415,6 +415,7 @@ def install(self, spec, prefix):
# default: 'gmp', => ('gmp', 'gmp', True, True)
# any other combination needs a full tuple
# if not (useinc || uselib): usedir - i.e (False, False)
direct_dependencies = [x.name for x in spec.dependencies()]
for library in (
('cuda', 'cuda', False, False),
('hip', 'hip', True, False),
@ -465,7 +466,7 @@ def install(self, spec, prefix):
useinc = True
uselib = True
library_requested = spacklibname.split(':')[0] in spec.dependencies_dict()
library_requested = spacklibname.split(':')[0] in direct_dependencies
options.append(
'--with-{library}={value}'.format(
library=petsclibname,