Allow for multiple dependencies/dependents from the same package (#28673)
Change the internal representation of `Spec` to allow for multiple dependencies or dependents stemming from the same package. This change permits to represent cases which are frequent in cross compiled environments or to bootstrap compilers. Modifications: - [x] Substitute `DependencyMap` with `_EdgeMap`. The main differences are that the latter does not support direct item assignment and can be modified only through its API. It also provides a `select_by` method to query items. - [x] Reworked a few public APIs of `Spec` to get list of dependencies or related edges. - [x] Added unit tests to prevent regression on #11983 and prove the synthetic construction of specs with multiple deps from the same package. Since #22845 went in first, this PR reuses that format and thus it should not change hashes. The same package may be present multiple times in the list of dependencies with different associated specs (each with its own hash).
This commit is contained in:
parent
3d624d204f
commit
2cd5c00923
14 changed files with 768 additions and 313 deletions
|
@ -967,3 +967,12 @@ def nullcontext(*args, **kwargs):
|
||||||
|
|
||||||
class UnhashableArguments(TypeError):
|
class UnhashableArguments(TypeError):
|
||||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||||
|
|
||||||
|
|
||||||
|
def enum(**kwargs):
|
||||||
|
"""Return an enum-like class.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
**kwargs: explicit dictionary of enums
|
||||||
|
"""
|
||||||
|
return type('Enum', (object,), kwargs)
|
||||||
|
|
|
@ -8,9 +8,9 @@
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.graph
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.filesystem_view import YamlFilesystemView
|
from spack.filesystem_view import YamlFilesystemView
|
||||||
from spack.graph import topological_sort
|
|
||||||
|
|
||||||
description = "deactivate a package extension"
|
description = "deactivate a package extension"
|
||||||
section = "extensions"
|
section = "extensions"
|
||||||
|
@ -68,11 +68,8 @@ def deactivate(parser, args):
|
||||||
tty.msg("Deactivating %s and all dependencies." %
|
tty.msg("Deactivating %s and all dependencies." %
|
||||||
pkg.spec.short_spec)
|
pkg.spec.short_spec)
|
||||||
|
|
||||||
topo_order = topological_sort(spec)
|
nodes_in_topological_order = spack.graph.topological_sort(spec)
|
||||||
index = spec.index()
|
for espec in reversed(nodes_in_topological_order):
|
||||||
|
|
||||||
for name in topo_order:
|
|
||||||
espec = index[name]
|
|
||||||
epkg = espec.package
|
epkg = espec.package
|
||||||
if epkg.extends(pkg.extendee_spec):
|
if epkg.extends(pkg.extendee_spec):
|
||||||
if epkg.is_activated(view) or args.force:
|
if epkg.is_activated(view) or args.force:
|
||||||
|
|
|
@ -961,7 +961,7 @@ def _check_ref_counts(self):
|
||||||
counts = {}
|
counts = {}
|
||||||
for key, rec in self._data.items():
|
for key, rec in self._data.items():
|
||||||
counts.setdefault(key, 0)
|
counts.setdefault(key, 0)
|
||||||
for dep in rec.spec.dependencies(_tracked_deps):
|
for dep in rec.spec.dependencies(deptype=_tracked_deps):
|
||||||
dep_key = dep.dag_hash()
|
dep_key = dep.dag_hash()
|
||||||
counts.setdefault(dep_key, 0)
|
counts.setdefault(dep_key, 0)
|
||||||
counts[dep_key] += 1
|
counts[dep_key] += 1
|
||||||
|
@ -1078,7 +1078,7 @@ def _add(
|
||||||
# Retrieve optional arguments
|
# Retrieve optional arguments
|
||||||
installation_time = installation_time or _now()
|
installation_time = installation_time or _now()
|
||||||
|
|
||||||
for dep in spec.dependencies(_tracked_deps):
|
for dep in spec.dependencies(deptype=_tracked_deps):
|
||||||
dkey = dep.dag_hash()
|
dkey = dep.dag_hash()
|
||||||
if dkey not in self._data:
|
if dkey not in self._data:
|
||||||
extra_args = {
|
extra_args = {
|
||||||
|
@ -1120,9 +1120,7 @@ def _add(
|
||||||
)
|
)
|
||||||
|
|
||||||
# Connect dependencies from the DB to the new copy.
|
# Connect dependencies from the DB to the new copy.
|
||||||
for name, dep in six.iteritems(
|
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||||
spec.dependencies_dict(_tracked_deps)
|
|
||||||
):
|
|
||||||
dkey = dep.spec.dag_hash()
|
dkey = dep.spec.dag_hash()
|
||||||
upstream, record = self.query_by_spec_hash(dkey)
|
upstream, record = self.query_by_spec_hash(dkey)
|
||||||
new_spec._add_dependency(record.spec, dep.deptypes)
|
new_spec._add_dependency(record.spec, dep.deptypes)
|
||||||
|
@ -1185,7 +1183,7 @@ def _decrement_ref_count(self, spec):
|
||||||
if rec.ref_count == 0 and not rec.installed:
|
if rec.ref_count == 0 and not rec.installed:
|
||||||
del self._data[key]
|
del self._data[key]
|
||||||
|
|
||||||
for dep in spec.dependencies(_tracked_deps):
|
for dep in spec.dependencies(deptype=_tracked_deps):
|
||||||
self._decrement_ref_count(dep)
|
self._decrement_ref_count(dep)
|
||||||
|
|
||||||
def _increment_ref_count(self, spec):
|
def _increment_ref_count(self, spec):
|
||||||
|
@ -1213,13 +1211,10 @@ def _remove(self, spec):
|
||||||
|
|
||||||
del self._data[key]
|
del self._data[key]
|
||||||
|
|
||||||
for dep in rec.spec.dependencies(_tracked_deps):
|
# Remove any reference to this node from dependencies and
|
||||||
# FIXME: the two lines below needs to be updated once #11983 is
|
# decrement the reference count
|
||||||
# FIXME: fixed. The "if" statement should be deleted and specs are
|
rec.spec.detach(deptype=_tracked_deps)
|
||||||
# FIXME: to be removed from dependents by hash and not by name.
|
for dep in rec.spec.dependencies(deptype=_tracked_deps):
|
||||||
# FIXME: See https://github.com/spack/spack/pull/15777#issuecomment-607818955
|
|
||||||
if dep._dependents.get(spec.name):
|
|
||||||
del dep._dependents[spec.name]
|
|
||||||
self._decrement_ref_count(dep)
|
self._decrement_ref_count(dep)
|
||||||
|
|
||||||
if rec.deprecated_for:
|
if rec.deprecated_for:
|
||||||
|
|
|
@ -58,7 +58,7 @@ def canonical_deptype(deptype):
|
||||||
if bad:
|
if bad:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Invalid dependency types: %s' % ','.join(str(t) for t in bad))
|
'Invalid dependency types: %s' % ','.join(str(t) for t in bad))
|
||||||
return tuple(sorted(deptype))
|
return tuple(sorted(set(deptype)))
|
||||||
|
|
||||||
raise ValueError('Invalid dependency type: %s' % repr(deptype))
|
raise ValueError('Invalid dependency type: %s' % repr(deptype))
|
||||||
|
|
||||||
|
|
|
@ -42,54 +42,84 @@
|
||||||
can take a number of specs as input.
|
can take a number of specs as input.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import heapq
|
||||||
|
import itertools
|
||||||
import sys
|
import sys
|
||||||
from heapq import heapify, heappop, heappush
|
|
||||||
|
|
||||||
from llnl.util.tty.color import ColorStream
|
import llnl.util.tty.color
|
||||||
|
|
||||||
from spack.dependency import all_deptypes, canonical_deptype
|
import spack.dependency
|
||||||
|
|
||||||
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
|
__all__ = ['graph_ascii', 'AsciiGraph', 'graph_dot']
|
||||||
|
|
||||||
|
|
||||||
def topological_sort(spec, reverse=False, deptype='all'):
|
def node_label(spec):
|
||||||
"""Topological sort for specs.
|
return spec.format('{name}{@version}{/hash:7}')
|
||||||
|
|
||||||
Return a list of dependency specs sorted topologically. The spec
|
|
||||||
argument is not modified in the process.
|
|
||||||
|
|
||||||
|
def topological_sort(spec, deptype='all'):
|
||||||
|
"""Return a list of dependency specs in topological sorting order.
|
||||||
|
|
||||||
|
The spec argument is not modified in by the function.
|
||||||
|
|
||||||
|
This function assumes specs don't have cycles, i.e. that we are really
|
||||||
|
operating with a DAG.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec (spack.spec.Spec): the spec to be analyzed
|
||||||
|
deptype (str or tuple): dependency types to account for when
|
||||||
|
constructing the list
|
||||||
"""
|
"""
|
||||||
deptype = canonical_deptype(deptype)
|
deptype = spack.dependency.canonical_deptype(deptype)
|
||||||
|
|
||||||
# Work on a copy so this is nondestructive.
|
# Work on a copy so this is nondestructive
|
||||||
spec = spec.copy(deps=deptype)
|
spec = spec.copy(deps=True)
|
||||||
nodes = spec.index(deptype=deptype)
|
nodes = spec.index(deptype=deptype)
|
||||||
|
|
||||||
parents = lambda s: [p for p in s.dependents() if p.name in nodes]
|
def dependencies(specs):
|
||||||
children = lambda s: s.dependencies()
|
"""Return all the dependencies (including transitive) for a spec."""
|
||||||
|
return list(set(itertools.chain.from_iterable(
|
||||||
|
s.dependencies(deptype=deptype) for s in specs
|
||||||
|
)))
|
||||||
|
|
||||||
if reverse:
|
def dependents(specs):
|
||||||
parents, children = children, parents
|
"""Return all the dependents (including those of transitive dependencies)
|
||||||
|
for a spec.
|
||||||
|
"""
|
||||||
|
candidates = list(set(itertools.chain.from_iterable(
|
||||||
|
s.dependents(deptype=deptype) for s in specs
|
||||||
|
)))
|
||||||
|
return [x for x in candidates if x.name in nodes]
|
||||||
|
|
||||||
topo_order = []
|
topological_order, children = [], {}
|
||||||
par = dict((name, parents(nodes[name])) for name in nodes.keys())
|
|
||||||
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
|
|
||||||
heapify(remaining)
|
|
||||||
|
|
||||||
while remaining:
|
# Map a spec encoded as (id, name) to a list of its transitive dependencies
|
||||||
name = heappop(remaining)
|
for spec in itertools.chain.from_iterable(nodes.values()):
|
||||||
topo_order.append(name)
|
children[(id(spec), spec.name)] = [
|
||||||
|
x for x in dependencies([spec]) if x.name in nodes
|
||||||
|
]
|
||||||
|
|
||||||
node = nodes[name]
|
# To return a result that is topologically ordered we need to add nodes
|
||||||
for dep in children(node):
|
# only after their dependencies. The first nodes we can add are leaf nodes,
|
||||||
par[dep.name].remove(node)
|
# i.e. nodes that have no dependencies.
|
||||||
if not par[dep.name]:
|
ready = [
|
||||||
heappush(remaining, dep.name)
|
spec for spec in itertools.chain.from_iterable(nodes.values())
|
||||||
|
if not dependencies([spec])
|
||||||
|
]
|
||||||
|
heapq.heapify(ready)
|
||||||
|
|
||||||
if any(par.get(s.name, []) for s in spec.traverse()):
|
while ready:
|
||||||
raise ValueError("Spec has cycles!")
|
# Pop a "ready" node and add it to the topologically ordered list
|
||||||
else:
|
s = heapq.heappop(ready)
|
||||||
return topo_order
|
topological_order.append(s)
|
||||||
|
|
||||||
|
# Check if adding the last node made other nodes "ready"
|
||||||
|
for dep in dependents([s]):
|
||||||
|
children[(id(dep), dep.name)].remove(s)
|
||||||
|
if not children[(id(dep), dep.name)]:
|
||||||
|
heapq.heappush(ready, dep)
|
||||||
|
|
||||||
|
return topological_order
|
||||||
|
|
||||||
|
|
||||||
def find(seq, predicate):
|
def find(seq, predicate):
|
||||||
|
@ -120,7 +150,7 @@ def __init__(self):
|
||||||
self.node_character = 'o'
|
self.node_character = 'o'
|
||||||
self.debug = False
|
self.debug = False
|
||||||
self.indent = 0
|
self.indent = 0
|
||||||
self.deptype = all_deptypes
|
self.deptype = spack.dependency.all_deptypes
|
||||||
|
|
||||||
# These are colors in the order they'll be used for edges.
|
# These are colors in the order they'll be used for edges.
|
||||||
# See llnl.util.tty.color for details on color characters.
|
# See llnl.util.tty.color for details on color characters.
|
||||||
|
@ -131,7 +161,6 @@ def __init__(self):
|
||||||
self._name_to_color = None # Node name to color
|
self._name_to_color = None # Node name to color
|
||||||
self._out = None # Output stream
|
self._out = None # Output stream
|
||||||
self._frontier = None # frontier
|
self._frontier = None # frontier
|
||||||
self._nodes = None # dict from name -> node
|
|
||||||
self._prev_state = None # State of previous line
|
self._prev_state = None # State of previous line
|
||||||
self._prev_index = None # Index of expansion point of prev line
|
self._prev_index = None # Index of expansion point of prev line
|
||||||
|
|
||||||
|
@ -290,7 +319,10 @@ def advance(to_pos, edges):
|
||||||
self._set_state(BACK_EDGE, end, label)
|
self._set_state(BACK_EDGE, end, label)
|
||||||
self._out.write("\n")
|
self._out.write("\n")
|
||||||
|
|
||||||
def _node_line(self, index, name):
|
def _node_label(self, node):
|
||||||
|
return node.format('{name}@@{version}{/hash:7}')
|
||||||
|
|
||||||
|
def _node_line(self, index, node):
|
||||||
"""Writes a line with a node at index."""
|
"""Writes a line with a node at index."""
|
||||||
self._indent()
|
self._indent()
|
||||||
for c in range(index):
|
for c in range(index):
|
||||||
|
@ -301,7 +333,7 @@ def _node_line(self, index, name):
|
||||||
for c in range(index + 1, len(self._frontier)):
|
for c in range(index + 1, len(self._frontier)):
|
||||||
self._write_edge("| ", c)
|
self._write_edge("| ", c)
|
||||||
|
|
||||||
self._out.write(" %s" % name)
|
self._out.write(self._node_label(node))
|
||||||
self._set_state(NODE, index)
|
self._set_state(NODE, index)
|
||||||
self._out.write("\n")
|
self._out.write("\n")
|
||||||
|
|
||||||
|
@ -363,29 +395,29 @@ def write(self, spec, color=None, out=None):
|
||||||
if color is None:
|
if color is None:
|
||||||
color = out.isatty()
|
color = out.isatty()
|
||||||
|
|
||||||
self._out = ColorStream(out, color=color)
|
self._out = llnl.util.tty.color.ColorStream(out, color=color)
|
||||||
|
|
||||||
# We'll traverse the spec in topo order as we graph it.
|
# We'll traverse the spec in topological order as we graph it.
|
||||||
topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
|
nodes_in_topological_order = topological_sort(spec, deptype=self.deptype)
|
||||||
|
|
||||||
# Work on a copy to be nondestructive
|
# Work on a copy to be nondestructive
|
||||||
spec = spec.copy()
|
spec = spec.copy()
|
||||||
self._nodes = spec.index()
|
|
||||||
|
|
||||||
# Colors associated with each node in the DAG.
|
# Colors associated with each node in the DAG.
|
||||||
# Edges are colored by the node they point to.
|
# Edges are colored by the node they point to.
|
||||||
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
|
self._name_to_color = {
|
||||||
for i, name in enumerate(topo_order))
|
spec.full_hash(): self.colors[i % len(self.colors)]
|
||||||
|
for i, spec in enumerate(nodes_in_topological_order)
|
||||||
|
}
|
||||||
|
|
||||||
# Frontier tracks open edges of the graph as it's written out.
|
# Frontier tracks open edges of the graph as it's written out.
|
||||||
self._frontier = [[spec.name]]
|
self._frontier = [[spec.full_hash()]]
|
||||||
while self._frontier:
|
while self._frontier:
|
||||||
# Find an unexpanded part of frontier
|
# Find an unexpanded part of frontier
|
||||||
i = find(self._frontier, lambda f: len(f) > 1)
|
i = find(self._frontier, lambda f: len(f) > 1)
|
||||||
|
|
||||||
if i >= 0:
|
if i >= 0:
|
||||||
# Expand frontier until there are enough columns for all
|
# Expand frontier until there are enough columns for all children.
|
||||||
# children.
|
|
||||||
|
|
||||||
# Figure out how many back connections there are and
|
# Figure out how many back connections there are and
|
||||||
# sort them so we do them in order
|
# sort them so we do them in order
|
||||||
|
@ -436,8 +468,8 @@ def write(self, spec, color=None, out=None):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Just allow the expansion here.
|
# Just allow the expansion here.
|
||||||
name = self._frontier[i].pop(0)
|
dep_hash = self._frontier[i].pop(0)
|
||||||
deps = [name]
|
deps = [dep_hash]
|
||||||
self._frontier.insert(i, deps)
|
self._frontier.insert(i, deps)
|
||||||
self._expand_right_line(i)
|
self._expand_right_line(i)
|
||||||
|
|
||||||
|
@ -453,18 +485,17 @@ def write(self, spec, color=None, out=None):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Nothing to expand; add dependencies for a node.
|
# Nothing to expand; add dependencies for a node.
|
||||||
name = topo_order.pop()
|
node = nodes_in_topological_order.pop()
|
||||||
node = self._nodes[name]
|
|
||||||
|
|
||||||
# Find the named node in the frontier and draw it.
|
# Find the named node in the frontier and draw it.
|
||||||
i = find(self._frontier, lambda f: name in f)
|
i = find(self._frontier, lambda f: node.full_hash() in f)
|
||||||
self._node_line(i, name)
|
self._node_line(i, node)
|
||||||
|
|
||||||
# Replace node with its dependencies
|
# Replace node with its dependencies
|
||||||
self._frontier.pop(i)
|
self._frontier.pop(i)
|
||||||
deps = node.dependencies(self.deptype)
|
deps = node.dependencies(deptype=self.deptype)
|
||||||
if deps:
|
if deps:
|
||||||
deps = sorted((d.name for d in deps), reverse=True)
|
deps = sorted((d.full_hash() for d in deps), reverse=True)
|
||||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||||
|
|
||||||
elif self._frontier:
|
elif self._frontier:
|
||||||
|
@ -478,7 +509,7 @@ def graph_ascii(spec, node='o', out=None, debug=False,
|
||||||
graph.indent = indent
|
graph.indent = indent
|
||||||
graph.node_character = node
|
graph.node_character = node
|
||||||
if deptype:
|
if deptype:
|
||||||
graph.deptype = canonical_deptype(deptype)
|
graph.deptype = spack.dependency.canonical_deptype(deptype)
|
||||||
|
|
||||||
graph.write(spec, color=color, out=out)
|
graph.write(spec, color=color, out=out)
|
||||||
|
|
||||||
|
@ -499,7 +530,7 @@ def graph_dot(specs, deptype='all', static=False, out=None):
|
||||||
|
|
||||||
if out is None:
|
if out is None:
|
||||||
out = sys.stdout
|
out = sys.stdout
|
||||||
deptype = canonical_deptype(deptype)
|
deptype = spack.dependency.canonical_deptype(deptype)
|
||||||
|
|
||||||
def static_graph(spec, deptype):
|
def static_graph(spec, deptype):
|
||||||
pkg = spec.package
|
pkg = spec.package
|
||||||
|
@ -517,7 +548,7 @@ def dynamic_graph(spec, deptypes):
|
||||||
nodes = set() # elements are (node key, node label)
|
nodes = set() # elements are (node key, node label)
|
||||||
edges = set() # elements are (src key, dest key)
|
edges = set() # elements are (src key, dest key)
|
||||||
for s in spec.traverse(deptype=deptype):
|
for s in spec.traverse(deptype=deptype):
|
||||||
nodes.add((s.dag_hash(), s.name))
|
nodes.add((s.dag_hash(), node_label(s)))
|
||||||
for d in s.dependencies(deptype=deptype):
|
for d in s.dependencies(deptype=deptype):
|
||||||
edge = (s.dag_hash(), d.dag_hash())
|
edge = (s.dag_hash(), d.dag_hash())
|
||||||
edges.add(edge)
|
edges.add(edge)
|
||||||
|
|
|
@ -227,8 +227,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||||
dep.concretize()
|
dep.concretize()
|
||||||
# mark compiler as depended-on by the packages that use it
|
# mark compiler as depended-on by the packages that use it
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
dep._dependents[pkg.name] = spack.spec.DependencySpec(
|
dep._dependents.add(
|
||||||
pkg.spec, dep, ('build',))
|
spack.spec.DependencySpec(pkg.spec, dep, ('build',))
|
||||||
|
)
|
||||||
packages = [(s.package, False) for
|
packages = [(s.package, False) for
|
||||||
s in dep.traverse(order='post', root=False)]
|
s in dep.traverse(order='post', root=False)]
|
||||||
packages.append((dep.package, True))
|
packages.append((dep.package, True))
|
||||||
|
|
|
@ -1259,9 +1259,10 @@ class Body(object):
|
||||||
# add all clauses from dependencies
|
# add all clauses from dependencies
|
||||||
if transitive:
|
if transitive:
|
||||||
if spec.concrete:
|
if spec.concrete:
|
||||||
for dep_name, dep in spec.dependencies_dict().items():
|
# TODO: We need to distinguish 2 specs from the same package later
|
||||||
for dtype in dep.deptypes:
|
for edge in spec.edges_to_dependencies():
|
||||||
clauses.append(fn.depends_on(spec.name, dep_name, dtype))
|
for dtype in edge.deptypes:
|
||||||
|
clauses.append(fn.depends_on(spec.name, edge.spec.name, dtype))
|
||||||
|
|
||||||
for dep in spec.traverse(root=False):
|
for dep in spec.traverse(root=False):
|
||||||
if spec.concrete:
|
if spec.concrete:
|
||||||
|
@ -1907,12 +1908,18 @@ def external_spec_selected(self, pkg, idx):
|
||||||
)
|
)
|
||||||
|
|
||||||
def depends_on(self, pkg, dep, type):
|
def depends_on(self, pkg, dep, type):
|
||||||
dependency = self._specs[pkg]._dependencies.get(dep)
|
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||||
if not dependency:
|
|
||||||
self._specs[pkg]._add_dependency(
|
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||||
self._specs[dep], (type,))
|
msg = ("Current solver does not handle multiple dependency edges "
|
||||||
|
"of the same name")
|
||||||
|
assert len(dependencies) < 2, msg
|
||||||
|
|
||||||
|
if not dependencies:
|
||||||
|
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
|
||||||
else:
|
else:
|
||||||
dependency.add_type(type)
|
# TODO: This assumes that each solve unifies dependencies
|
||||||
|
dependencies[0].add_type(type)
|
||||||
|
|
||||||
def reorder_flags(self):
|
def reorder_flags(self):
|
||||||
"""Order compiler flags on specs in predefined order.
|
"""Order compiler flags on specs in predefined order.
|
||||||
|
|
|
@ -667,7 +667,7 @@ class DependencySpec(object):
|
||||||
def __init__(self, parent, spec, deptypes):
|
def __init__(self, parent, spec, deptypes):
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.deptypes = tuple(sorted(set(deptypes)))
|
self.deptypes = dp.canonical_deptype(deptypes)
|
||||||
|
|
||||||
def update_deptypes(self, deptypes):
|
def update_deptypes(self, deptypes):
|
||||||
deptypes = set(deptypes)
|
deptypes = set(deptypes)
|
||||||
|
@ -696,6 +696,9 @@ def __str__(self):
|
||||||
self.deptypes,
|
self.deptypes,
|
||||||
self.spec.name if self.spec else None)
|
self.spec.name if self.spec else None)
|
||||||
|
|
||||||
|
def canonical(self):
|
||||||
|
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||||
|
|
||||||
|
|
||||||
_valid_compiler_flags = [
|
_valid_compiler_flags = [
|
||||||
'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
|
'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
|
||||||
|
@ -766,13 +769,119 @@ def __str__(self):
|
||||||
for key in sorted_keys) + cond_symbol
|
for key in sorted_keys) + cond_symbol
|
||||||
|
|
||||||
|
|
||||||
class DependencyMap(lang.HashableMap):
|
def _sort_by_dep_types(dspec):
|
||||||
"""Each spec has a DependencyMap containing specs for its dependencies.
|
# Use negation since False < True for sorting
|
||||||
The DependencyMap is keyed by name. """
|
return tuple(t not in dspec.deptypes for t in ("link", "run", "build", "test"))
|
||||||
|
|
||||||
|
|
||||||
|
#: Enum for edge directions
|
||||||
|
EdgeDirection = lang.enum(parent=0, child=1)
|
||||||
|
|
||||||
|
|
||||||
|
@lang.lazy_lexicographic_ordering
|
||||||
|
class _EdgeMap(Mapping):
|
||||||
|
"""Represent a collection of edges (DependencySpec objects) in the DAG.
|
||||||
|
|
||||||
|
Objects of this class are used in Specs to track edges that are
|
||||||
|
outgoing towards direct dependencies, or edges that are incoming
|
||||||
|
from direct dependents.
|
||||||
|
|
||||||
|
Edges are stored in a dictionary and keyed by package name.
|
||||||
|
"""
|
||||||
|
def __init__(self, store_by=EdgeDirection.child):
|
||||||
|
# Sanitize input arguments
|
||||||
|
msg = 'unexpected value for "store_by" argument'
|
||||||
|
assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
|
||||||
|
|
||||||
|
#: This dictionary maps a package name to a list of edges
|
||||||
|
#: i.e. to a list of DependencySpec objects
|
||||||
|
self.edges = {}
|
||||||
|
self.store_by_child = (store_by == EdgeDirection.child)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self.edges[key]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.edges)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.edges)
|
||||||
|
|
||||||
|
def add(self, edge):
|
||||||
|
"""Adds a new edge to this object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
edge (DependencySpec): edge to be added
|
||||||
|
"""
|
||||||
|
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||||
|
current_list = self.edges.setdefault(key, [])
|
||||||
|
current_list.append(edge)
|
||||||
|
current_list.sort(key=_sort_by_dep_types)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
|
return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
|
||||||
|
|
||||||
|
def _cmp_iter(self):
|
||||||
|
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
"""Copies this object and returns a clone"""
|
||||||
|
clone = type(self)()
|
||||||
|
clone.store_by_child = self.store_by_child
|
||||||
|
|
||||||
|
# Copy everything from this dict into it.
|
||||||
|
for dspec in itertools.chain.from_iterable(self.values()):
|
||||||
|
clone.add(dspec.copy())
|
||||||
|
|
||||||
|
return clone
|
||||||
|
|
||||||
|
def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
|
||||||
|
"""Select a list of edges and return them.
|
||||||
|
|
||||||
|
If an edge:
|
||||||
|
- Has *any* of the dependency types passed as argument,
|
||||||
|
- Matches the parent and/or child name, if passed
|
||||||
|
then it is selected.
|
||||||
|
|
||||||
|
The deptypes argument needs to be canonical, since the method won't
|
||||||
|
convert it for performance reason.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent (str): name of the parent package
|
||||||
|
child (str): name of the child package
|
||||||
|
deptypes (tuple): allowed dependency types in canonical form
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of DependencySpec objects
|
||||||
|
"""
|
||||||
|
if not deptypes:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Start from all the edges we store
|
||||||
|
selected = (d for d in itertools.chain.from_iterable(self.values()))
|
||||||
|
|
||||||
|
# Filter by parent name
|
||||||
|
if parent:
|
||||||
|
selected = (d for d in selected if d.parent.name == parent)
|
||||||
|
|
||||||
|
# Filter by child name
|
||||||
|
if child:
|
||||||
|
selected = (d for d in selected if d.spec.name == child)
|
||||||
|
|
||||||
|
# Filter by allowed dependency types
|
||||||
|
if deptypes:
|
||||||
|
selected = (
|
||||||
|
dep for dep in selected
|
||||||
|
if not dep.deptypes or
|
||||||
|
any(d in deptypes for d in dep.deptypes)
|
||||||
|
)
|
||||||
|
|
||||||
|
return list(selected)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.edges.clear()
|
||||||
|
|
||||||
|
|
||||||
def _command_default_handler(descriptor, spec, cls):
|
def _command_default_handler(descriptor, spec, cls):
|
||||||
"""Default handler when looking for the 'command' attribute.
|
"""Default handler when looking for the 'command' attribute.
|
||||||
|
@ -1016,7 +1125,9 @@ def __init__(self, spec, name, query_parameters):
|
||||||
super(SpecBuildInterface, self).__init__(spec)
|
super(SpecBuildInterface, self).__init__(spec)
|
||||||
# Adding new attributes goes after super() call since the ObjectWrapper
|
# Adding new attributes goes after super() call since the ObjectWrapper
|
||||||
# resets __dict__ to behave like the passed object
|
# resets __dict__ to behave like the passed object
|
||||||
self.token = spec, name, query_parameters
|
original_spec = getattr(spec, 'wrapped_obj', spec)
|
||||||
|
self.wrapped_obj = original_spec
|
||||||
|
self.token = original_spec, name, query_parameters
|
||||||
is_virtual = spack.repo.path.is_virtual(name)
|
is_virtual = spack.repo.path.is_virtual(name)
|
||||||
self.last_query = QueryState(
|
self.last_query = QueryState(
|
||||||
name=name,
|
name=name,
|
||||||
|
@ -1027,6 +1138,9 @@ def __init__(self, spec, name, query_parameters):
|
||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
return SpecBuildInterface, self.token
|
return SpecBuildInterface, self.token
|
||||||
|
|
||||||
|
def copy(self, *args, **kwargs):
|
||||||
|
return self.wrapped_obj.copy(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||||
class Spec(object):
|
class Spec(object):
|
||||||
|
@ -1070,8 +1184,8 @@ def __init__(self, spec_like=None, normal=False,
|
||||||
self.architecture = None
|
self.architecture = None
|
||||||
self.compiler = None
|
self.compiler = None
|
||||||
self.compiler_flags = FlagMap(self)
|
self.compiler_flags = FlagMap(self)
|
||||||
self._dependents = DependencyMap()
|
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||||
self._dependencies = DependencyMap()
|
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||||
self.namespace = None
|
self.namespace = None
|
||||||
|
|
||||||
self._hash = None
|
self._hash = None
|
||||||
|
@ -1143,34 +1257,112 @@ def _format_module_list(modules):
|
||||||
def external(self):
|
def external(self):
|
||||||
return bool(self.external_path) or bool(self.external_modules)
|
return bool(self.external_path) or bool(self.external_modules)
|
||||||
|
|
||||||
def get_dependency(self, name):
|
def clear_dependencies(self):
|
||||||
dep = self._dependencies.get(name)
|
"""Trim the dependencies of this spec."""
|
||||||
if dep is not None:
|
self._dependencies.clear()
|
||||||
return dep
|
|
||||||
raise InvalidDependencyError(self.name, name)
|
|
||||||
|
|
||||||
def _find_deps(self, where, deptype):
|
def clear_edges(self):
|
||||||
|
"""Trim the dependencies and dependents of this spec."""
|
||||||
|
self._dependencies.clear()
|
||||||
|
self._dependents.clear()
|
||||||
|
|
||||||
|
def detach(self, deptype='all'):
|
||||||
|
"""Remove any reference that dependencies have of this node.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
deptype (str or tuple): dependency types tracked by the
|
||||||
|
current spec
|
||||||
|
"""
|
||||||
|
key = self.dag_hash()
|
||||||
|
# Go through the dependencies
|
||||||
|
for dep in self.dependencies(deptype=deptype):
|
||||||
|
# Remove the spec from dependents
|
||||||
|
if self.name in dep._dependents:
|
||||||
|
dependents_copy = dep._dependents.edges[self.name]
|
||||||
|
del dep._dependents.edges[self.name]
|
||||||
|
for edge in dependents_copy:
|
||||||
|
if edge.parent.dag_hash() == key:
|
||||||
|
continue
|
||||||
|
dep._dependents.add(edge)
|
||||||
|
|
||||||
|
def _get_dependency(self, name):
|
||||||
|
# WARNING: This function is an implementation detail of the
|
||||||
|
# WARNING: original concretizer. Since with that greedy
|
||||||
|
# WARNING: algorithm we don't allow multiple nodes from
|
||||||
|
# WARNING: the same package in a DAG, here we hard-code
|
||||||
|
# WARNING: using index 0 i.e. we assume that we have only
|
||||||
|
# WARNING: one edge from package "name"
|
||||||
|
deps = self.edges_to_dependencies(name=name)
|
||||||
|
if len(deps) != 1:
|
||||||
|
err_msg = 'expected only 1 "{0}" dependency, but got {1}'
|
||||||
|
raise spack.error.SpecError(err_msg.format(name, len(deps)))
|
||||||
|
return deps[0]
|
||||||
|
|
||||||
|
def edges_from_dependents(self, name=None, deptype='all'):
|
||||||
|
"""Return a list of edges connecting this node in the DAG
|
||||||
|
to parents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): filter dependents by package name
|
||||||
|
deptype (str or tuple): allowed dependency types
|
||||||
|
"""
|
||||||
deptype = dp.canonical_deptype(deptype)
|
deptype = dp.canonical_deptype(deptype)
|
||||||
|
return [
|
||||||
|
d for d in
|
||||||
|
self._dependents.select(parent=name, deptypes=deptype)
|
||||||
|
]
|
||||||
|
|
||||||
return [dep for dep in where.values()
|
def edges_to_dependencies(self, name=None, deptype='all'):
|
||||||
if deptype and (not dep.deptypes or
|
"""Return a list of edges connecting this node in the DAG
|
||||||
any(d in deptype for d in dep.deptypes))]
|
to children.
|
||||||
|
|
||||||
def dependencies(self, deptype='all'):
|
Args:
|
||||||
return [d.spec
|
name (str): filter dependencies by package name
|
||||||
for d in self._find_deps(self._dependencies, deptype)]
|
deptype (str or tuple): allowed dependency types
|
||||||
|
"""
|
||||||
|
deptype = dp.canonical_deptype(deptype)
|
||||||
|
return [
|
||||||
|
d for d in
|
||||||
|
self._dependencies.select(child=name, deptypes=deptype)
|
||||||
|
]
|
||||||
|
|
||||||
def dependents(self, deptype='all'):
|
def dependencies(self, name=None, deptype='all'):
|
||||||
return [d.parent
|
"""Return a list of direct dependencies (nodes in the DAG).
|
||||||
for d in self._find_deps(self._dependents, deptype)]
|
|
||||||
|
|
||||||
def dependencies_dict(self, deptype='all'):
|
Args:
|
||||||
return dict((d.spec.name, d)
|
name (str): filter dependencies by package name
|
||||||
for d in self._find_deps(self._dependencies, deptype))
|
deptype (str or tuple): allowed dependency types
|
||||||
|
"""
|
||||||
|
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
|
||||||
|
|
||||||
def dependents_dict(self, deptype='all'):
|
def dependents(self, name=None, deptype='all'):
|
||||||
return dict((d.parent.name, d)
|
"""Return a list of direct dependents (nodes in the DAG).
|
||||||
for d in self._find_deps(self._dependents, deptype))
|
|
||||||
|
Args:
|
||||||
|
name (str): filter dependents by package name
|
||||||
|
deptype (str or tuple): allowed dependency types
|
||||||
|
"""
|
||||||
|
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
|
||||||
|
|
||||||
|
def _dependencies_dict(self, deptype='all'):
|
||||||
|
"""Return a dictionary, keyed by package name, of the direct
|
||||||
|
dependencies.
|
||||||
|
|
||||||
|
Each value in the dictionary is a list of edges.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
deptype: allowed dependency types
|
||||||
|
"""
|
||||||
|
_sort_fn = lambda x: (x.spec.name,) + _sort_by_dep_types(x)
|
||||||
|
_group_fn = lambda x: x.spec.name
|
||||||
|
deptype = dp.canonical_deptype(deptype)
|
||||||
|
selected_edges = self._dependencies.select(deptypes=deptype)
|
||||||
|
result = {}
|
||||||
|
for key, group in itertools.groupby(
|
||||||
|
sorted(selected_edges, key=_sort_fn), key=_group_fn
|
||||||
|
):
|
||||||
|
result[key] = list(group)
|
||||||
|
return result
|
||||||
|
|
||||||
#
|
#
|
||||||
# Private routines here are called by the parser when building a spec.
|
# Private routines here are called by the parser when building a spec.
|
||||||
|
@ -1248,18 +1440,43 @@ def _add_dependency(self, spec, deptypes):
|
||||||
raise DuplicateDependencyError(
|
raise DuplicateDependencyError(
|
||||||
"Cannot depend on '%s' twice" % spec)
|
"Cannot depend on '%s' twice" % spec)
|
||||||
|
|
||||||
# create an edge and add to parent and child
|
self.add_dependency_edge(spec, deptypes)
|
||||||
dspec = DependencySpec(self, spec, deptypes)
|
|
||||||
self._dependencies[spec.name] = dspec
|
def add_dependency_edge(self, dependency_spec, deptype):
|
||||||
spec._dependents[self.name] = dspec
|
"""Add a dependency edge to this spec.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dependency_spec (Spec): spec of the dependency
|
||||||
|
deptype (str or tuple): dependency types
|
||||||
|
"""
|
||||||
|
deptype = dp.canonical_deptype(deptype)
|
||||||
|
|
||||||
|
# Check if we need to update edges that are already present
|
||||||
|
selected = self._dependencies.select(child=dependency_spec.name)
|
||||||
|
for edge in selected:
|
||||||
|
if any(d in edge.deptypes for d in deptype):
|
||||||
|
msg = ('cannot add a dependency on "{0.spec}" of {1} type '
|
||||||
|
'when the "{0.parent}" has the edge {0!s} already')
|
||||||
|
raise spack.error.SpecError(msg.format(edge, deptype))
|
||||||
|
|
||||||
|
for edge in selected:
|
||||||
|
if id(dependency_spec) == id(edge.spec):
|
||||||
|
# If we are here, it means the edge object was previously added to
|
||||||
|
# both the parent and the child. When we update this object they'll
|
||||||
|
# both see the deptype modification.
|
||||||
|
edge.add_type(deptype)
|
||||||
|
return
|
||||||
|
|
||||||
|
edge = DependencySpec(self, dependency_spec, deptype)
|
||||||
|
self._dependencies.add(edge)
|
||||||
|
dependency_spec._dependents.add(edge)
|
||||||
|
|
||||||
def _add_default_platform(self):
|
def _add_default_platform(self):
|
||||||
"""If a spec has an os or a target and no platform, give it
|
"""If a spec has an os or a target and no platform, give it
|
||||||
the default platform.
|
the default platform.
|
||||||
|
|
||||||
This is private because it is used by the parser -- it's not
|
|
||||||
expected to be used outside of ``spec.py``.
|
|
||||||
|
|
||||||
|
This is private because it is used by the parser -- it's not
|
||||||
|
expected to be used outside of ``spec.py``.
|
||||||
"""
|
"""
|
||||||
arch = self.architecture
|
arch = self.architecture
|
||||||
if arch and not arch.platform and (arch.os or arch.target):
|
if arch and not arch.platform and (arch.os or arch.target):
|
||||||
|
@ -1280,10 +1497,12 @@ def root(self):
|
||||||
|
|
||||||
Spack specs have a single root (the package being installed).
|
Spack specs have a single root (the package being installed).
|
||||||
"""
|
"""
|
||||||
|
# FIXME: In the case of multiple parents this property does not
|
||||||
|
# FIXME: make sense. Should we revisit the semantics?
|
||||||
if not self._dependents:
|
if not self._dependents:
|
||||||
return self
|
return self
|
||||||
|
edges_by_package = next(iter(self._dependents.values()))
|
||||||
return next(iter(self._dependents.values())).parent.root
|
return edges_by_package[0].parent.root
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def package(self):
|
def package(self):
|
||||||
|
@ -1448,21 +1667,24 @@ def return_val(dspec):
|
||||||
|
|
||||||
# This code determines direction and yields the children/parents
|
# This code determines direction and yields the children/parents
|
||||||
if direction == 'children':
|
if direction == 'children':
|
||||||
where = self._dependencies
|
edges = self.edges_to_dependencies
|
||||||
|
key_fn = lambda dspec: dspec.spec.name
|
||||||
succ = lambda dspec: dspec.spec
|
succ = lambda dspec: dspec.spec
|
||||||
elif direction == 'parents':
|
elif direction == 'parents':
|
||||||
where = self._dependents
|
edges = self.edges_from_dependents
|
||||||
|
key_fn = lambda dspec: dspec.parent.name
|
||||||
succ = lambda dspec: dspec.parent
|
succ = lambda dspec: dspec.parent
|
||||||
else:
|
else:
|
||||||
raise ValueError('Invalid traversal direction: %s' % direction)
|
raise ValueError('Invalid traversal direction: %s' % direction)
|
||||||
|
|
||||||
for name, dspec in sorted(where.items()):
|
for dspec in sorted(edges(), key=key_fn):
|
||||||
dt = dspec.deptypes
|
dt = dspec.deptypes
|
||||||
if dt and not any(d in deptype for d in dt):
|
if dt and not any(d in deptype for d in dt):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for child in succ(dspec).traverse_edges(
|
for child in succ(dspec).traverse_edges(
|
||||||
visited, d + 1, deptype, dspec, **kwargs):
|
visited, d + 1, deptype, dspec, **kwargs
|
||||||
|
):
|
||||||
yield child
|
yield child
|
||||||
|
|
||||||
# Postorder traversal yields after successors
|
# Postorder traversal yields after successors
|
||||||
|
@ -1684,17 +1906,17 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||||
d['package_hash'] = package_hash
|
d['package_hash'] = package_hash
|
||||||
|
|
||||||
# Note: Relies on sorting dict by keys later in algorithm.
|
# Note: Relies on sorting dict by keys later in algorithm.
|
||||||
deps = self.dependencies_dict(deptype=hash.deptype)
|
deps = self._dependencies_dict(deptype=hash.deptype)
|
||||||
|
|
||||||
if deps:
|
if deps:
|
||||||
deps_list = []
|
deps_list = []
|
||||||
for name, dspec in sorted(deps.items()):
|
for name, edges_for_name in sorted(deps.items()):
|
||||||
name_tuple = ('name', name)
|
name_tuple = ('name', name)
|
||||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
for dspec in edges_for_name:
|
||||||
type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
|
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||||
deps_list.append(syaml.syaml_dict([name_tuple,
|
type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
|
||||||
hash_tuple,
|
deps_list.append(syaml.syaml_dict(
|
||||||
type_tuple]))
|
[name_tuple, hash_tuple, type_tuple]
|
||||||
|
))
|
||||||
d['dependencies'] = deps_list
|
d['dependencies'] = deps_list
|
||||||
|
|
||||||
# Name is included in case this is replacing a virtual.
|
# Name is included in case this is replacing a virtual.
|
||||||
|
@ -2257,8 +2479,14 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
# Concretize deps first -- this is a bottom-up process.
|
# Concretize deps first -- this is a bottom-up process.
|
||||||
for name in sorted(self._dependencies.keys()):
|
for name in sorted(self._dependencies):
|
||||||
changed |= self._dependencies[name].spec._concretize_helper(
|
# WARNING: This function is an implementation detail of the
|
||||||
|
# WARNING: original concretizer. Since with that greedy
|
||||||
|
# WARNING: algorithm we don't allow multiple nodes from
|
||||||
|
# WARNING: the same package in a DAG, here we hard-code
|
||||||
|
# WARNING: using index 0 i.e. we assume that we have only
|
||||||
|
# WARNING: one edge from package "name"
|
||||||
|
changed |= self._dependencies[name][0].spec._concretize_helper(
|
||||||
concretizer, presets, visited
|
concretizer, presets, visited
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2285,14 +2513,14 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||||
|
|
||||||
def _replace_with(self, concrete):
|
def _replace_with(self, concrete):
|
||||||
"""Replace this virtual spec with a concrete spec."""
|
"""Replace this virtual spec with a concrete spec."""
|
||||||
assert(self.virtual)
|
assert self.virtual
|
||||||
for name, dep_spec in self._dependents.items():
|
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||||
dependent = dep_spec.parent
|
dependent = dep_spec.parent
|
||||||
deptypes = dep_spec.deptypes
|
deptypes = dep_spec.deptypes
|
||||||
|
|
||||||
# remove self from all dependents, unless it is already removed
|
# remove self from all dependents, unless it is already removed
|
||||||
if self.name in dependent._dependencies:
|
if self.name in dependent._dependencies:
|
||||||
del dependent._dependencies[self.name]
|
del dependent._dependencies.edges[self.name]
|
||||||
|
|
||||||
# add the replacement, unless it is already a dep of dependent.
|
# add the replacement, unless it is already a dep of dependent.
|
||||||
if concrete.name not in dependent._dependencies:
|
if concrete.name not in dependent._dependencies:
|
||||||
|
@ -2363,12 +2591,12 @@ def _expand_virtual_packages(self, concretizer):
|
||||||
|
|
||||||
# If replacement is external then trim the dependencies
|
# If replacement is external then trim the dependencies
|
||||||
if replacement.external:
|
if replacement.external:
|
||||||
if (spec._dependencies):
|
if spec._dependencies:
|
||||||
for dep in spec.dependencies():
|
for dep in spec.dependencies():
|
||||||
del dep._dependents[spec.name]
|
del dep._dependents.edges[spec.name]
|
||||||
changed = True
|
changed = True
|
||||||
spec._dependencies = DependencyMap()
|
spec.clear_dependencies()
|
||||||
replacement._dependencies = DependencyMap()
|
replacement.clear_dependencies()
|
||||||
replacement.architecture = self.architecture
|
replacement.architecture = self.architecture
|
||||||
|
|
||||||
# TODO: could this and the stuff in _dup be cleaned up?
|
# TODO: could this and the stuff in _dup be cleaned up?
|
||||||
|
@ -2715,9 +2943,8 @@ def flat_dependencies(self, **kwargs):
|
||||||
if not copy:
|
if not copy:
|
||||||
for spec in flat_deps.values():
|
for spec in flat_deps.values():
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
spec._dependencies.clear()
|
spec.clear_edges()
|
||||||
spec._dependents.clear()
|
self.clear_dependencies()
|
||||||
self._dependencies.clear()
|
|
||||||
|
|
||||||
return flat_deps
|
return flat_deps
|
||||||
|
|
||||||
|
@ -2732,11 +2959,12 @@ def flat_dependencies(self, **kwargs):
|
||||||
)
|
)
|
||||||
|
|
||||||
def index(self, deptype='all'):
|
def index(self, deptype='all'):
|
||||||
"""Return DependencyMap that points to all the dependencies in this
|
"""Return a dictionary that points to all the dependencies in this
|
||||||
spec."""
|
spec.
|
||||||
dm = DependencyMap()
|
"""
|
||||||
|
dm = collections.defaultdict(list)
|
||||||
for spec in self.traverse(deptype=deptype):
|
for spec in self.traverse(deptype=deptype):
|
||||||
dm[spec.name] = spec
|
dm[spec.name].append(spec)
|
||||||
return dm
|
return dm
|
||||||
|
|
||||||
def _evaluate_dependency_conditions(self, name):
|
def _evaluate_dependency_conditions(self, name):
|
||||||
|
@ -3222,12 +3450,19 @@ def _constrain_dependencies(self, other):
|
||||||
for name in self.common_dependencies(other):
|
for name in self.common_dependencies(other):
|
||||||
changed |= self[name].constrain(other[name], deps=False)
|
changed |= self[name].constrain(other[name], deps=False)
|
||||||
if name in self._dependencies:
|
if name in self._dependencies:
|
||||||
changed |= self._dependencies[name].update_deptypes(
|
# WARNING: This function is an implementation detail of the
|
||||||
other._dependencies[name].deptypes)
|
# WARNING: original concretizer. Since with that greedy
|
||||||
|
# WARNING: algorithm we don't allow multiple nodes from
|
||||||
|
# WARNING: the same package in a DAG, here we hard-code
|
||||||
|
# WARNING: using index 0 i.e. we assume that we have only
|
||||||
|
# WARNING: one edge from package "name"
|
||||||
|
edges_from_name = self._dependencies[name]
|
||||||
|
changed |= edges_from_name[0].update_deptypes(
|
||||||
|
other._dependencies[name][0].deptypes)
|
||||||
|
|
||||||
# Update with additional constraints from other spec
|
# Update with additional constraints from other spec
|
||||||
for name in other.dep_difference(self):
|
for name in other.dep_difference(self):
|
||||||
dep_spec_copy = other.get_dependency(name)
|
dep_spec_copy = other._get_dependency(name)
|
||||||
dep_copy = dep_spec_copy.spec
|
dep_copy = dep_spec_copy.spec
|
||||||
deptypes = dep_spec_copy.deptypes
|
deptypes = dep_spec_copy.deptypes
|
||||||
self._add_dependency(dep_copy.copy(), deptypes)
|
self._add_dependency(dep_copy.copy(), deptypes)
|
||||||
|
@ -3490,8 +3725,8 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||||
else None
|
else None
|
||||||
self.compiler = other.compiler.copy() if other.compiler else None
|
self.compiler = other.compiler.copy() if other.compiler else None
|
||||||
if cleardeps:
|
if cleardeps:
|
||||||
self._dependents = DependencyMap()
|
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||||
self._dependencies = DependencyMap()
|
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||||
self.compiler_flags = other.compiler_flags.copy()
|
self.compiler_flags = other.compiler_flags.copy()
|
||||||
self.compiler_flags.spec = self
|
self.compiler_flags.spec = self
|
||||||
self.variants = other.variants.copy()
|
self.variants = other.variants.copy()
|
||||||
|
@ -3547,22 +3782,25 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
def _dup_deps(self, other, deptypes, caches):
|
def _dup_deps(self, other, deptypes, caches):
|
||||||
new_specs = {self.name: self}
|
def spid(spec):
|
||||||
for dspec in other.traverse_edges(cover='edges',
|
return id(spec)
|
||||||
root=False):
|
|
||||||
if (dspec.deptypes and
|
new_specs = {spid(other): self}
|
||||||
not any(d in deptypes for d in dspec.deptypes)):
|
for edge in other.traverse_edges(cover='edges', root=False):
|
||||||
|
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if dspec.parent.name not in new_specs:
|
if spid(edge.parent) not in new_specs:
|
||||||
new_specs[dspec.parent.name] = dspec.parent.copy(
|
new_specs[spid(edge.parent)] = edge.parent.copy(
|
||||||
deps=False, caches=caches)
|
deps=False, caches=caches
|
||||||
if dspec.spec.name not in new_specs:
|
)
|
||||||
new_specs[dspec.spec.name] = dspec.spec.copy(
|
|
||||||
deps=False, caches=caches)
|
|
||||||
|
|
||||||
new_specs[dspec.parent.name]._add_dependency(
|
if spid(edge.spec) not in new_specs:
|
||||||
new_specs[dspec.spec.name], dspec.deptypes)
|
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches)
|
||||||
|
|
||||||
|
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||||
|
new_specs[spid(edge.spec)], edge.deptypes
|
||||||
|
)
|
||||||
|
|
||||||
def copy(self, deps=True, **kwargs):
|
def copy(self, deps=True, **kwargs):
|
||||||
"""Make a copy of this spec.
|
"""Make a copy of this spec.
|
||||||
|
@ -3679,8 +3917,10 @@ def eq_dag(self, other, deptypes=True, vs=None, vo=None):
|
||||||
for name in sorted(self._dependencies)]
|
for name in sorted(self._dependencies)]
|
||||||
osorted = [other._dependencies[name]
|
osorted = [other._dependencies[name]
|
||||||
for name in sorted(other._dependencies)]
|
for name in sorted(other._dependencies)]
|
||||||
|
for s_dspec, o_dspec in zip(
|
||||||
for s_dspec, o_dspec in zip(ssorted, osorted):
|
itertools.chain.from_iterable(ssorted),
|
||||||
|
itertools.chain.from_iterable(osorted)
|
||||||
|
):
|
||||||
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
|
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -3724,7 +3964,9 @@ def _cmp_iter(self):
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
def deps():
|
def deps():
|
||||||
for _, dep in sorted(self._dependencies.items()):
|
for dep in sorted(
|
||||||
|
itertools.chain.from_iterable(self._dependencies.values())
|
||||||
|
):
|
||||||
yield dep.spec.name
|
yield dep.spec.name
|
||||||
yield tuple(sorted(dep.deptypes))
|
yield tuple(sorted(dep.deptypes))
|
||||||
yield hash(dep.spec)
|
yield hash(dep.spec)
|
||||||
|
@ -4302,7 +4544,8 @@ def tree(self, **kwargs):
|
||||||
# when only covering nodes, we merge dependency types
|
# when only covering nodes, we merge dependency types
|
||||||
# from all dependents before showing them.
|
# from all dependents before showing them.
|
||||||
types = [
|
types = [
|
||||||
ds.deptypes for ds in node.dependents_dict().values()]
|
ds.deptypes for ds in node.edges_from_dependents()
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
# when covering edges or paths, we show dependency
|
# when covering edges or paths, we show dependency
|
||||||
# types only for the edge through which we visited
|
# types only for the edge through which we visited
|
||||||
|
@ -4373,6 +4616,18 @@ def splice(self, other, transitive):
|
||||||
assert other.concrete
|
assert other.concrete
|
||||||
assert other.name in self
|
assert other.name in self
|
||||||
|
|
||||||
|
# Check, for the time being, that we don't have DAG with multiple
|
||||||
|
# specs from the same package
|
||||||
|
def multiple_specs(root):
|
||||||
|
counter = collections.Counter([node.name for node in root.traverse()])
|
||||||
|
_, max_number = counter.most_common()[0]
|
||||||
|
return max_number > 1
|
||||||
|
|
||||||
|
if multiple_specs(self) or multiple_specs(other):
|
||||||
|
msg = ('Either "{0}" or "{1}" contain multiple specs from the same '
|
||||||
|
'package, which cannot be handled by splicing at the moment')
|
||||||
|
raise ValueError(msg.format(self, other))
|
||||||
|
|
||||||
# Multiple unique specs with the same name will collide, so the
|
# Multiple unique specs with the same name will collide, so the
|
||||||
# _dependents of these specs should not be trusted.
|
# _dependents of these specs should not be trusted.
|
||||||
# Variants may also be ignored here for now...
|
# Variants may also be ignored here for now...
|
||||||
|
@ -4398,19 +4653,22 @@ def splice(self, other, transitive):
|
||||||
nodes.update(self_nodes)
|
nodes.update(self_nodes)
|
||||||
|
|
||||||
for name in nodes:
|
for name in nodes:
|
||||||
|
# TODO: check if splice semantics is respected
|
||||||
if name in self_nodes:
|
if name in self_nodes:
|
||||||
dependencies = self[name]._dependencies
|
for edge in self[name].edges_to_dependencies():
|
||||||
for dep in dependencies:
|
nodes[name].add_dependency_edge(
|
||||||
nodes[name]._add_dependency(nodes[dep],
|
nodes[edge.spec.name], edge.deptypes
|
||||||
dependencies[dep].deptypes)
|
)
|
||||||
if any(dep not in self_nodes for dep in dependencies):
|
if any(dep not in self_nodes
|
||||||
|
for dep in self[name]._dependencies):
|
||||||
nodes[name].build_spec = self[name].build_spec
|
nodes[name].build_spec = self[name].build_spec
|
||||||
else:
|
else:
|
||||||
dependencies = other[name]._dependencies
|
for edge in other[name].edges_to_dependencies():
|
||||||
for dep in dependencies:
|
nodes[name].add_dependency_edge(
|
||||||
nodes[name]._add_dependency(nodes[dep],
|
nodes[edge.spec.name], edge.deptypes
|
||||||
dependencies[dep].deptypes)
|
)
|
||||||
if any(dep not in other_nodes for dep in dependencies):
|
if any(dep not in other_nodes
|
||||||
|
for dep in other[name]._dependencies):
|
||||||
nodes[name].build_spec = other[name].build_spec
|
nodes[name].build_spec = other[name].build_spec
|
||||||
|
|
||||||
ret = nodes[self.name]
|
ret = nodes[self.name]
|
||||||
|
@ -4491,7 +4749,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
|
||||||
|
|
||||||
# Update with additional constraints from other spec
|
# Update with additional constraints from other spec
|
||||||
for name in current_spec_constraint.dep_difference(merged_spec):
|
for name in current_spec_constraint.dep_difference(merged_spec):
|
||||||
edge = current_spec_constraint.get_dependency(name)
|
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||||
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
|
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
|
||||||
|
|
||||||
return merged_spec
|
return merged_spec
|
||||||
|
|
|
@ -176,13 +176,17 @@ def test_concretize(self, spec):
|
||||||
|
|
||||||
def test_concretize_mention_build_dep(self):
|
def test_concretize_mention_build_dep(self):
|
||||||
spec = check_concretize('cmake-client ^cmake@3.4.3')
|
spec = check_concretize('cmake-client ^cmake@3.4.3')
|
||||||
|
|
||||||
# Check parent's perspective of child
|
# Check parent's perspective of child
|
||||||
dependency = spec.dependencies_dict()['cmake']
|
to_dependencies = spec.edges_to_dependencies(name='cmake')
|
||||||
assert set(dependency.deptypes) == set(['build'])
|
assert len(to_dependencies) == 1
|
||||||
|
assert set(to_dependencies[0].deptypes) == set(['build'])
|
||||||
|
|
||||||
# Check child's perspective of parent
|
# Check child's perspective of parent
|
||||||
cmake = spec['cmake']
|
cmake = spec['cmake']
|
||||||
dependent = cmake.dependents_dict()['cmake-client']
|
from_dependents = cmake.edges_from_dependents(name='cmake-client')
|
||||||
assert set(dependent.deptypes) == set(['build'])
|
assert len(from_dependents) == 1
|
||||||
|
assert set(from_dependents[0].deptypes) == set(['build'])
|
||||||
|
|
||||||
def test_concretize_preferred_version(self):
|
def test_concretize_preferred_version(self):
|
||||||
spec = check_concretize('python')
|
spec = check_concretize('python')
|
||||||
|
@ -379,30 +383,37 @@ def test_no_compilers_for_arch(self):
|
||||||
def test_virtual_is_fully_expanded_for_callpath(self):
|
def test_virtual_is_fully_expanded_for_callpath(self):
|
||||||
# force dependence on fake "zmpi" by asking for MPI 10.0
|
# force dependence on fake "zmpi" by asking for MPI 10.0
|
||||||
spec = Spec('callpath ^mpi@10.0')
|
spec = Spec('callpath ^mpi@10.0')
|
||||||
assert 'mpi' in spec._dependencies
|
assert len(spec.dependencies(name='mpi')) == 1
|
||||||
assert 'fake' not in spec
|
assert 'fake' not in spec
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert 'zmpi' in spec._dependencies
|
assert len(spec.dependencies(name='zmpi')) == 1
|
||||||
assert all('mpi' not in d._dependencies for d in spec.traverse())
|
assert all(not d.dependencies(name='mpi') for d in spec.traverse())
|
||||||
assert 'zmpi' in spec
|
assert all(x in spec for x in ('zmpi', 'mpi'))
|
||||||
assert 'mpi' in spec
|
|
||||||
assert 'fake' in spec._dependencies['zmpi'].spec
|
edges_to_zmpi = spec.edges_to_dependencies(name='zmpi')
|
||||||
|
assert len(edges_to_zmpi) == 1
|
||||||
|
assert 'fake' in edges_to_zmpi[0].spec
|
||||||
|
|
||||||
def test_virtual_is_fully_expanded_for_mpileaks(
|
def test_virtual_is_fully_expanded_for_mpileaks(
|
||||||
self
|
self
|
||||||
):
|
):
|
||||||
spec = Spec('mpileaks ^mpi@10.0')
|
spec = Spec('mpileaks ^mpi@10.0')
|
||||||
assert 'mpi' in spec._dependencies
|
assert len(spec.dependencies(name='mpi')) == 1
|
||||||
assert 'fake' not in spec
|
assert 'fake' not in spec
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert 'zmpi' in spec._dependencies
|
assert len(spec.dependencies(name='zmpi')) == 1
|
||||||
assert 'callpath' in spec._dependencies
|
assert len(spec.dependencies(name='callpath')) == 1
|
||||||
assert 'zmpi' in spec._dependencies['callpath'].spec._dependencies
|
|
||||||
assert 'fake' in spec._dependencies['callpath'].spec._dependencies[
|
callpath = spec.dependencies(name='callpath')[0]
|
||||||
'zmpi'].spec._dependencies # NOQA: ignore=E501
|
assert len(callpath.dependencies(name='zmpi')) == 1
|
||||||
assert all('mpi' not in d._dependencies for d in spec.traverse())
|
|
||||||
assert 'zmpi' in spec
|
zmpi = callpath.dependencies(name='zmpi')[0]
|
||||||
assert 'mpi' in spec
|
assert len(zmpi.dependencies(name='fake')) == 1
|
||||||
|
|
||||||
|
assert all(not d.dependencies(name='mpi') for d in spec.traverse())
|
||||||
|
assert all(x in spec for x in ('zmpi', 'mpi'))
|
||||||
|
|
||||||
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
|
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
|
||||||
spec = Spec('indirect-mpich')
|
spec = Spec('indirect-mpich')
|
||||||
|
@ -604,7 +615,7 @@ def test_regression_issue_4492(self):
|
||||||
assert s.concrete
|
assert s.concrete
|
||||||
|
|
||||||
# Remove the dependencies and reset caches
|
# Remove the dependencies and reset caches
|
||||||
s._dependencies.clear()
|
s.clear_dependencies()
|
||||||
s._concrete = False
|
s._concrete = False
|
||||||
|
|
||||||
assert not s.concrete
|
assert not s.concrete
|
||||||
|
|
|
@ -754,10 +754,11 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def database(mock_store, mock_packages, config, monkeypatch):
|
def database(mock_store, mock_packages, config):
|
||||||
"""This activates the mock store, packages, AND config."""
|
"""This activates the mock store, packages, AND config."""
|
||||||
with spack.store.use_store(str(mock_store)) as store:
|
with spack.store.use_store(str(mock_store)) as store:
|
||||||
yield store.db
|
yield store.db
|
||||||
|
# Force reading the database again between tests
|
||||||
store.db.last_seen_verifier = ''
|
store.db.last_seen_verifier = ''
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -973,3 +973,36 @@ def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
|
||||||
mutable_database.remove(s)
|
mutable_database.remove(s)
|
||||||
|
|
||||||
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
|
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('spec_str,parent_name,expected_nparents', [
|
||||||
|
('dyninst', 'callpath', 3),
|
||||||
|
('libelf', 'dyninst', 1),
|
||||||
|
('libelf', 'libdwarf', 1)
|
||||||
|
])
|
||||||
|
@pytest.mark.regression('11983')
|
||||||
|
def test_check_parents(spec_str, parent_name, expected_nparents, database):
|
||||||
|
"""Check that a spec returns the correct number of parents."""
|
||||||
|
s = database.query_one(spec_str)
|
||||||
|
|
||||||
|
parents = s.dependents(name=parent_name)
|
||||||
|
assert len(parents) == expected_nparents
|
||||||
|
|
||||||
|
edges = s.edges_from_dependents(name=parent_name)
|
||||||
|
assert len(edges) == expected_nparents
|
||||||
|
|
||||||
|
|
||||||
|
def test_consistency_of_dependents_upon_remove(mutable_database):
|
||||||
|
# Check the initial state
|
||||||
|
s = mutable_database.query_one('dyninst')
|
||||||
|
parents = s.dependents(name='callpath')
|
||||||
|
assert len(parents) == 3
|
||||||
|
|
||||||
|
# Remove a dependent (and all its dependents)
|
||||||
|
mutable_database.remove('mpileaks ^callpath ^mpich2')
|
||||||
|
mutable_database.remove('callpath ^mpich2')
|
||||||
|
|
||||||
|
# Check the final state
|
||||||
|
s = mutable_database.query_one('dyninst')
|
||||||
|
parents = s.dependents(name='callpath')
|
||||||
|
assert len(parents) == 2
|
||||||
|
|
|
@ -2,43 +2,31 @@
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import sys
|
||||||
|
|
||||||
from six import StringIO
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
|
import spack.graph
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.graph import AsciiGraph, graph_dot, topological_sort
|
import spack.spec
|
||||||
from spack.spec import Spec
|
|
||||||
|
|
||||||
|
|
||||||
def test_topo_sort(mock_packages):
|
@pytest.mark.parametrize('spec_str', ['mpileaks', 'callpath'])
|
||||||
"""Test topo sort gives correct order."""
|
def test_topo_sort(spec_str, config, mock_packages):
|
||||||
s = Spec('mpileaks').normalized()
|
"""Ensure nodes are ordered topologically"""
|
||||||
|
s = spack.spec.Spec(spec_str).concretized()
|
||||||
topo = topological_sort(s)
|
nodes = spack.graph.topological_sort(s)
|
||||||
|
for idx, current in enumerate(nodes):
|
||||||
assert topo.index('mpileaks') < topo.index('callpath')
|
assert all(following not in current for following in nodes[idx + 1:])
|
||||||
assert topo.index('mpileaks') < topo.index('mpi')
|
|
||||||
assert topo.index('mpileaks') < topo.index('dyninst')
|
|
||||||
assert topo.index('mpileaks') < topo.index('libdwarf')
|
|
||||||
assert topo.index('mpileaks') < topo.index('libelf')
|
|
||||||
|
|
||||||
assert topo.index('callpath') < topo.index('mpi')
|
|
||||||
assert topo.index('callpath') < topo.index('dyninst')
|
|
||||||
assert topo.index('callpath') < topo.index('libdwarf')
|
|
||||||
assert topo.index('callpath') < topo.index('libelf')
|
|
||||||
|
|
||||||
assert topo.index('dyninst') < topo.index('libdwarf')
|
|
||||||
assert topo.index('dyninst') < topo.index('libelf')
|
|
||||||
|
|
||||||
assert topo.index('libdwarf') < topo.index('libelf')
|
|
||||||
|
|
||||||
|
|
||||||
def test_static_graph_mpileaks(mock_packages):
|
def test_static_graph_mpileaks(config, mock_packages):
|
||||||
"""Test a static spack graph for a simple package."""
|
"""Test a static spack graph for a simple package."""
|
||||||
s = Spec('mpileaks').normalized()
|
s = spack.spec.Spec('mpileaks').normalized()
|
||||||
|
|
||||||
stream = StringIO()
|
stream = six.StringIO()
|
||||||
graph_dot([s], static=True, out=stream)
|
spack.graph.graph_dot([s], static=True, out=stream)
|
||||||
|
|
||||||
dot = stream.getvalue()
|
dot = stream.getvalue()
|
||||||
|
|
||||||
|
@ -62,72 +50,67 @@ def test_static_graph_mpileaks(mock_packages):
|
||||||
|
|
||||||
def test_dynamic_dot_graph_mpileaks(mock_packages, config):
|
def test_dynamic_dot_graph_mpileaks(mock_packages, config):
|
||||||
"""Test dynamically graphing the mpileaks package."""
|
"""Test dynamically graphing the mpileaks package."""
|
||||||
s = Spec('mpileaks').concretized()
|
s = spack.spec.Spec('mpileaks').concretized()
|
||||||
|
stream = six.StringIO()
|
||||||
stream = StringIO()
|
spack.graph.graph_dot([s], static=False, out=stream)
|
||||||
graph_dot([s], static=False, out=stream)
|
|
||||||
|
|
||||||
dot = stream.getvalue()
|
dot = stream.getvalue()
|
||||||
print(dot)
|
|
||||||
|
|
||||||
mpileaks_hash, mpileaks_lbl = s.dag_hash(), s.format('{name}')
|
nodes_to_check = ['mpileaks', 'mpi', 'callpath', 'dyninst', 'libdwarf', 'libelf']
|
||||||
mpi_hash, mpi_lbl = s['mpi'].dag_hash(), s['mpi'].format('{name}')
|
hashes = {}
|
||||||
callpath_hash, callpath_lbl = (
|
for name in nodes_to_check:
|
||||||
s['callpath'].dag_hash(), s['callpath'].format('{name}'))
|
current = s[name]
|
||||||
dyninst_hash, dyninst_lbl = (
|
current_hash = current.dag_hash()
|
||||||
s['dyninst'].dag_hash(), s['dyninst'].format('{name}'))
|
hashes[name] = current_hash
|
||||||
libdwarf_hash, libdwarf_lbl = (
|
assert ' "{0}" [label="{1}"]\n'.format(
|
||||||
s['libdwarf'].dag_hash(), s['libdwarf'].format('{name}'))
|
current_hash, spack.graph.node_label(current)
|
||||||
libelf_hash, libelf_lbl = (
|
) in dot
|
||||||
s['libelf'].dag_hash(), s['libelf'].format('{name}'))
|
|
||||||
|
|
||||||
assert ' "%s" [label="%s"]\n' % (mpileaks_hash, mpileaks_lbl) in dot
|
dependencies_to_check = [
|
||||||
assert ' "%s" [label="%s"]\n' % (callpath_hash, callpath_lbl) in dot
|
('dyninst', 'libdwarf'),
|
||||||
assert ' "%s" [label="%s"]\n' % (mpi_hash, mpi_lbl) in dot
|
('callpath', 'dyninst'),
|
||||||
assert ' "%s" [label="%s"]\n' % (dyninst_hash, dyninst_lbl) in dot
|
('mpileaks', 'mpi'),
|
||||||
assert ' "%s" [label="%s"]\n' % (libdwarf_hash, libdwarf_lbl) in dot
|
('libdwarf', 'libelf'),
|
||||||
assert ' "%s" [label="%s"]\n' % (libelf_hash, libelf_lbl) in dot
|
('callpath', 'mpi'),
|
||||||
|
('mpileaks', 'callpath'),
|
||||||
assert ' "%s" -> "%s"\n' % (dyninst_hash, libdwarf_hash) in dot
|
('dyninst', 'libelf')
|
||||||
assert ' "%s" -> "%s"\n' % (callpath_hash, dyninst_hash) in dot
|
]
|
||||||
assert ' "%s" -> "%s"\n' % (mpileaks_hash, mpi_hash) in dot
|
for parent, child in dependencies_to_check:
|
||||||
assert ' "%s" -> "%s"\n' % (libdwarf_hash, libelf_hash) in dot
|
assert ' "{0}" -> "{1}"\n'.format(hashes[parent], hashes[child]) in dot
|
||||||
assert ' "%s" -> "%s"\n' % (callpath_hash, mpi_hash) in dot
|
|
||||||
assert ' "%s" -> "%s"\n' % (mpileaks_hash, callpath_hash) in dot
|
|
||||||
assert ' "%s" -> "%s"\n' % (dyninst_hash, libelf_hash) in dot
|
|
||||||
|
|
||||||
|
|
||||||
def test_ascii_graph_mpileaks(mock_packages):
|
@pytest.mark.skipif(
|
||||||
"""Test dynamically graphing the mpileaks package."""
|
sys.version_info < (3, 6), reason="Ordering might not be consistent"
|
||||||
s = Spec('mpileaks').normalized()
|
)
|
||||||
|
def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
|
||||||
|
monkeypatch.setattr(
|
||||||
|
spack.graph.AsciiGraph, '_node_label',
|
||||||
|
lambda self, node: node.name
|
||||||
|
)
|
||||||
|
s = spack.spec.Spec('mpileaks').concretized()
|
||||||
|
|
||||||
stream = StringIO()
|
stream = six.StringIO()
|
||||||
graph = AsciiGraph()
|
graph = spack.graph.AsciiGraph()
|
||||||
graph.write(s, out=stream, color=False)
|
graph.write(s, out=stream, color=False)
|
||||||
string = stream.getvalue()
|
graph_str = stream.getvalue()
|
||||||
|
graph_str = '\n'.join([line.rstrip() for line in graph_str.split('\n')])
|
||||||
|
|
||||||
# Some lines in spack graph still have trailing space
|
assert graph_str == r'''o mpileaks
|
||||||
# TODO: fix this.
|
|
||||||
string = '\n'.join([line.rstrip() for line in string.split('\n')])
|
|
||||||
|
|
||||||
assert string == r'''o mpileaks
|
|
||||||
|\
|
|\
|
||||||
| o callpath
|
| o callpath
|
||||||
|/|
|
|/|
|
||||||
o | mpi
|
o | mpich
|
||||||
/
|
/
|
||||||
o dyninst
|
o dyninst
|
||||||
|\
|
|\
|
||||||
| o libdwarf
|
o | libdwarf
|
||||||
|/
|
|/
|
||||||
o libelf
|
o libelf
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def test_topo_sort_filtered(mock_packages):
|
def test_topological_sort_filtering_dependency_types(config, mock_packages):
|
||||||
"""Test topo sort gives correct order when filtering link deps."""
|
s = spack.spec.Spec('both-link-and-build-dep-a').concretized()
|
||||||
s = Spec('both-link-and-build-dep-a').normalized()
|
|
||||||
|
|
||||||
topo = topological_sort(s, deptype=('link',))
|
nodes = spack.graph.topological_sort(s, deptype=('link',))
|
||||||
|
names = [s.name for s in nodes]
|
||||||
assert topo == ['both-link-and-build-dep-a', 'both-link-and-build-dep-c']
|
assert names == ['both-link-and-build-dep-c', 'both-link-and-build-dep-a']
|
||||||
|
|
|
@ -18,10 +18,10 @@
|
||||||
def check_links(spec_to_check):
|
def check_links(spec_to_check):
|
||||||
for spec in spec_to_check.traverse():
|
for spec in spec_to_check.traverse():
|
||||||
for dependent in spec.dependents():
|
for dependent in spec.dependents():
|
||||||
assert spec.name in dependent.dependencies_dict()
|
assert dependent.edges_to_dependencies(name=spec.name)
|
||||||
|
|
||||||
for dependency in spec.dependencies():
|
for dependency in spec.dependencies():
|
||||||
assert spec.name in dependency.dependents_dict()
|
assert dependency.edges_from_dependents(name=spec.name)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
|
@ -297,9 +297,16 @@ def test_conflicting_spec_constraints(self):
|
||||||
# Normalize then add conflicting constraints to the DAG (this is an
|
# Normalize then add conflicting constraints to the DAG (this is an
|
||||||
# extremely unlikely scenario, but we test for it anyway)
|
# extremely unlikely scenario, but we test for it anyway)
|
||||||
mpileaks.normalize()
|
mpileaks.normalize()
|
||||||
mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
|
|
||||||
mpileaks._dependencies['callpath']. \
|
mpileaks.edges_to_dependencies(
|
||||||
spec._dependencies['mpich'].spec = Spec('mpich@2.0')
|
name='mpich'
|
||||||
|
)[0].spec = Spec('mpich@1.0')
|
||||||
|
|
||||||
|
mpileaks.edges_to_dependencies(
|
||||||
|
name='callpath'
|
||||||
|
)[0].spec.edges_to_dependencies(
|
||||||
|
name='mpich'
|
||||||
|
)[0].spec = Spec('mpich@2.0')
|
||||||
|
|
||||||
with pytest.raises(spack.spec.InconsistentSpecError):
|
with pytest.raises(spack.spec.InconsistentSpecError):
|
||||||
mpileaks.flat_dependencies(copy=False)
|
mpileaks.flat_dependencies(copy=False)
|
||||||
|
@ -617,6 +624,23 @@ def test_copy_concretized(self):
|
||||||
copy_ids = set(id(s) for s in copy.traverse())
|
copy_ids = set(id(s) for s in copy.traverse())
|
||||||
assert not orig_ids.intersection(copy_ids)
|
assert not orig_ids.intersection(copy_ids)
|
||||||
|
|
||||||
|
def test_copy_through_spec_build_interface(self):
|
||||||
|
"""Check that copying dependencies using id(node) as a fast identifier of the
|
||||||
|
node works when the spec is wrapped in a SpecBuildInterface object.
|
||||||
|
"""
|
||||||
|
s = Spec('mpileaks').concretized()
|
||||||
|
|
||||||
|
c0 = s.copy()
|
||||||
|
assert c0 == s
|
||||||
|
|
||||||
|
# Single indirection
|
||||||
|
c1 = s['mpileaks'].copy()
|
||||||
|
assert c0 == c1 == s
|
||||||
|
|
||||||
|
# Double indirection
|
||||||
|
c2 = s['mpileaks']['mpileaks'].copy()
|
||||||
|
assert c0 == c1 == c2 == s
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Here is the graph with deptypes labeled (assume all packages have a 'dt'
|
Here is the graph with deptypes labeled (assume all packages have a 'dt'
|
||||||
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
|
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
|
||||||
|
@ -790,21 +814,25 @@ def test_construct_spec_with_deptypes(self):
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
assert s['b']._dependencies['c'].deptypes == ('build',)
|
assert s['b'].edges_to_dependencies(
|
||||||
assert s['d']._dependencies['e'].deptypes == ('build', 'link')
|
name='c'
|
||||||
assert s['e']._dependencies['f'].deptypes == ('run',)
|
)[0].deptypes == ('build',)
|
||||||
|
assert s['d'].edges_to_dependencies(
|
||||||
|
name='e'
|
||||||
|
)[0].deptypes == ('build', 'link')
|
||||||
|
assert s['e'].edges_to_dependencies(
|
||||||
|
name='f'
|
||||||
|
)[0].deptypes == ('run',)
|
||||||
|
|
||||||
assert s['b']._dependencies['c'].deptypes == ('build',)
|
assert s['c'].edges_from_dependents(
|
||||||
assert s['d']._dependencies['e'].deptypes == ('build', 'link')
|
name='b'
|
||||||
assert s['e']._dependencies['f'].deptypes == ('run',)
|
)[0].deptypes == ('build',)
|
||||||
|
assert s['e'].edges_from_dependents(
|
||||||
assert s['c']._dependents['b'].deptypes == ('build',)
|
name='d'
|
||||||
assert s['e']._dependents['d'].deptypes == ('build', 'link')
|
)[0].deptypes == ('build', 'link')
|
||||||
assert s['f']._dependents['e'].deptypes == ('run',)
|
assert s['f'].edges_from_dependents(
|
||||||
|
name='e'
|
||||||
assert s['c']._dependents['b'].deptypes == ('build',)
|
)[0].deptypes == ('run',)
|
||||||
assert s['e']._dependents['d'].deptypes == ('build', 'link')
|
|
||||||
assert s['f']._dependents['e'].deptypes == ('run',)
|
|
||||||
|
|
||||||
def check_diamond_deptypes(self, spec):
|
def check_diamond_deptypes(self, spec):
|
||||||
"""Validate deptypes in dt-diamond spec.
|
"""Validate deptypes in dt-diamond spec.
|
||||||
|
@ -813,17 +841,21 @@ def check_diamond_deptypes(self, spec):
|
||||||
depend on the same dependency in different ways.
|
depend on the same dependency in different ways.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
assert spec['dt-diamond']._dependencies[
|
assert spec['dt-diamond'].edges_to_dependencies(
|
||||||
'dt-diamond-left'].deptypes == ('build', 'link')
|
name='dt-diamond-left'
|
||||||
|
)[0].deptypes == ('build', 'link')
|
||||||
|
|
||||||
assert spec['dt-diamond']._dependencies[
|
assert spec['dt-diamond'].edges_to_dependencies(
|
||||||
'dt-diamond-right'].deptypes == ('build', 'link')
|
name='dt-diamond-right'
|
||||||
|
)[0].deptypes == ('build', 'link')
|
||||||
|
|
||||||
assert spec['dt-diamond-left']._dependencies[
|
assert spec['dt-diamond-left'].edges_to_dependencies(
|
||||||
'dt-diamond-bottom'].deptypes == ('build',)
|
name='dt-diamond-bottom'
|
||||||
|
)[0].deptypes == ('build',)
|
||||||
|
|
||||||
assert spec['dt-diamond-right'] ._dependencies[
|
assert spec['dt-diamond-right'].edges_to_dependencies(
|
||||||
'dt-diamond-bottom'].deptypes == ('build', 'link', 'run')
|
name='dt-diamond-bottom'
|
||||||
|
)[0].deptypes == ('build', 'link', 'run')
|
||||||
|
|
||||||
def check_diamond_normalized_dag(self, spec):
|
def check_diamond_normalized_dag(self, spec):
|
||||||
|
|
||||||
|
@ -989,3 +1021,99 @@ def test_spec_tree_respect_deptypes(self):
|
||||||
assert 'version-test-pkg' in out
|
assert 'version-test-pkg' in out
|
||||||
out = s.tree(deptypes=('link', 'run'))
|
out = s.tree(deptypes=('link', 'run'))
|
||||||
assert 'version-test-pkg' not in out
|
assert 'version-test-pkg' not in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_synthetic_construction_of_split_dependencies_from_same_package(
|
||||||
|
mock_packages, config
|
||||||
|
):
|
||||||
|
# Construct in a synthetic way (i.e. without using the solver)
|
||||||
|
# the following spec:
|
||||||
|
#
|
||||||
|
# b
|
||||||
|
# build / \ link,run
|
||||||
|
# c@2.0 c@1.0
|
||||||
|
#
|
||||||
|
# To demonstrate that a spec can now hold two direct
|
||||||
|
# dependencies from the same package
|
||||||
|
root = Spec('b').concretized()
|
||||||
|
link_run_spec = Spec('c@1.0').concretized()
|
||||||
|
build_spec = Spec('c@2.0').concretized()
|
||||||
|
|
||||||
|
root.add_dependency_edge(link_run_spec, deptype='link')
|
||||||
|
root.add_dependency_edge(link_run_spec, deptype='run')
|
||||||
|
root.add_dependency_edge(build_spec, deptype='build')
|
||||||
|
|
||||||
|
# Check dependencies from the perspective of root
|
||||||
|
assert len(root.dependencies()) == 2
|
||||||
|
assert all(x.name == 'c' for x in root.dependencies())
|
||||||
|
|
||||||
|
assert '@2.0' in root.dependencies(name='c', deptype='build')[0]
|
||||||
|
assert '@1.0' in root.dependencies(name='c', deptype=('link', 'run'))[0]
|
||||||
|
|
||||||
|
# Check parent from the perspective of the dependencies
|
||||||
|
assert len(build_spec.dependents()) == 1
|
||||||
|
assert len(link_run_spec.dependents()) == 1
|
||||||
|
assert build_spec.dependents() == link_run_spec.dependents()
|
||||||
|
assert build_spec != link_run_spec
|
||||||
|
|
||||||
|
|
||||||
|
def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||||
|
# Construct the following spec:
|
||||||
|
#
|
||||||
|
# b@2.0
|
||||||
|
# | build
|
||||||
|
# b@1.0
|
||||||
|
#
|
||||||
|
root = Spec('b@2.0').concretized()
|
||||||
|
bootstrap = Spec('b@1.0').concretized()
|
||||||
|
|
||||||
|
root.add_dependency_edge(bootstrap, deptype='build')
|
||||||
|
|
||||||
|
assert len(root.dependencies()) == 1
|
||||||
|
assert root.dependencies()[0].name == 'b'
|
||||||
|
assert root.name == 'b'
|
||||||
|
|
||||||
|
|
||||||
|
def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config):
|
||||||
|
# Construct the following spec:
|
||||||
|
#
|
||||||
|
# b@2.0
|
||||||
|
# | build,link,run
|
||||||
|
# b@1.0
|
||||||
|
#
|
||||||
|
# with three calls and check we always have a single edge
|
||||||
|
root = Spec('b@2.0').concretized()
|
||||||
|
bootstrap = Spec('b@1.0').concretized()
|
||||||
|
|
||||||
|
for current_deptype in ('build', 'link', 'run'):
|
||||||
|
root.add_dependency_edge(bootstrap, deptype=current_deptype)
|
||||||
|
|
||||||
|
# Check edges in dependencies
|
||||||
|
assert len(root.edges_to_dependencies()) == 1
|
||||||
|
forward_edge = root.edges_to_dependencies(deptype=current_deptype)[0]
|
||||||
|
assert current_deptype in forward_edge.deptypes
|
||||||
|
assert id(forward_edge.parent) == id(root)
|
||||||
|
assert id(forward_edge.spec) == id(bootstrap)
|
||||||
|
|
||||||
|
# Check edges from dependents
|
||||||
|
assert len(bootstrap.edges_from_dependents()) == 1
|
||||||
|
backward_edge = bootstrap.edges_from_dependents(deptype=current_deptype)[0]
|
||||||
|
assert current_deptype in backward_edge.deptypes
|
||||||
|
assert id(backward_edge.parent) == id(root)
|
||||||
|
assert id(backward_edge.spec) == id(bootstrap)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('c1_deptypes,c2_deptypes', [
|
||||||
|
('link', ('build', 'link')),
|
||||||
|
(('link', 'run'), ('build', 'link'))
|
||||||
|
])
|
||||||
|
def test_adding_same_deptype_with_the_same_name_raises(
|
||||||
|
mock_packages, config, c1_deptypes, c2_deptypes
|
||||||
|
):
|
||||||
|
p = Spec('b@2.0').concretized()
|
||||||
|
c1 = Spec('b@1.0').concretized()
|
||||||
|
c2 = Spec('b@2.0').concretized()
|
||||||
|
|
||||||
|
p.add_dependency_edge(c1, deptype=c1_deptypes)
|
||||||
|
with pytest.raises(spack.error.SpackError):
|
||||||
|
p.add_dependency_edge(c2, deptype=c2_deptypes)
|
||||||
|
|
|
@ -415,6 +415,7 @@ def install(self, spec, prefix):
|
||||||
# default: 'gmp', => ('gmp', 'gmp', True, True)
|
# default: 'gmp', => ('gmp', 'gmp', True, True)
|
||||||
# any other combination needs a full tuple
|
# any other combination needs a full tuple
|
||||||
# if not (useinc || uselib): usedir - i.e (False, False)
|
# if not (useinc || uselib): usedir - i.e (False, False)
|
||||||
|
direct_dependencies = [x.name for x in spec.dependencies()]
|
||||||
for library in (
|
for library in (
|
||||||
('cuda', 'cuda', False, False),
|
('cuda', 'cuda', False, False),
|
||||||
('hip', 'hip', True, False),
|
('hip', 'hip', True, False),
|
||||||
|
@ -465,7 +466,7 @@ def install(self, spec, prefix):
|
||||||
useinc = True
|
useinc = True
|
||||||
uselib = True
|
uselib = True
|
||||||
|
|
||||||
library_requested = spacklibname.split(':')[0] in spec.dependencies_dict()
|
library_requested = spacklibname.split(':')[0] in direct_dependencies
|
||||||
options.append(
|
options.append(
|
||||||
'--with-{library}={value}'.format(
|
'--with-{library}={value}'.format(
|
||||||
library=petsclibname,
|
library=petsclibname,
|
||||||
|
|
Loading…
Reference in a new issue