Merge pull request #1330 from suraia/fix-ascii-graph

Fix graph command with ASCII output
This commit is contained in:
becker33 2016-08-04 09:37:24 -07:00 committed by GitHub
commit 369b2ef01f
3 changed files with 101 additions and 121 deletions

View file

@ -61,7 +61,6 @@
can take a number of specs as input. can take a number of specs as input.
""" """
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import * from heapq import *
@ -71,6 +70,8 @@
import spack import spack
from spack.spec import Spec from spack.spec import Spec
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, **kwargs): def topological_sort(spec, **kwargs):
"""Topological sort for specs. """Topological sort for specs.
@ -94,6 +95,7 @@ def topological_sort(spec, **kwargs):
nodes = spec.index() nodes = spec.index()
topo_order = [] topo_order = []
par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])] remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining) heapify(remaining)
@ -102,12 +104,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name) topo_order.append(name)
node = nodes[name] node = nodes[name]
for dep in children(node).values(): for dep in children(node):
del parents(dep)[node.name] par[dep.name].remove(node)
if not parents(dep): if not par[dep.name]:
heappush(remaining, dep.name) heappush(remaining, dep.name)
if any(parents(s) for s in spec.traverse()): if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!") raise ValueError("Spec has cycles!")
else: else:
return topo_order return topo_order
@ -132,6 +134,7 @@ def find(seq, predicate):
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
class AsciiGraph(object): class AsciiGraph(object):
def __init__(self): def __init__(self):
# These can be set after initialization or after a call to # These can be set after initialization or after a call to
@ -153,18 +156,15 @@ def __init__(self):
self._prev_state = None # State of previous line self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line self._prev_index = None # Index of expansion point of prev line
def _indent(self): def _indent(self):
self._out.write(self.indent * ' ') self._out.write(self.indent * ' ')
def _write_edge(self, string, index, sub=0): def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream.""" """Write a colored edge to the output stream."""
name = self._frontier[index][sub] name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string) edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge) self._out.write(edge)
def _connect_deps(self, i, deps, label=None): def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier. """Connect dependencies to existing edges in the frontier.
@ -199,7 +199,8 @@ def _connect_deps(self, i, deps, label=None):
collapse = True collapse = True
if self._prev_state == EXPAND_RIGHT: if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1. # Special case where previous line expanded and i is off by 1.
self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j))) self._back_edge_line([], j, i + 1, True,
label + "-1.5 " + str((i + 1, j)))
collapse = False collapse = False
else: else:
@ -209,17 +210,18 @@ def _connect_deps(self, i, deps, label=None):
if i - j > 1: if i - j > 1:
# We need two lines to connect if distance > 1 # We need two lines to connect if distance > 1
self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) self._back_edge_line([], j, i, True,
label + "-1 " + str((i, j)))
collapse = False collapse = False
self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j))) self._back_edge_line([j], -1, -1, collapse,
label + "-2 " + str((i, j)))
return True return True
elif deps: elif deps:
self._frontier.insert(i, deps) self._frontier.insert(i, deps)
return False return False
def _set_state(self, state, index, label=None): def _set_state(self, state, index, label=None):
if state not in states: if state not in states:
raise ValueError("Invalid graph state!") raise ValueError("Invalid graph state!")
@ -233,7 +235,6 @@ def _set_state(self, state, index, label=None):
self._out.write("%-20s" % (str(label) if label else '')) self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier) self._out.write("%s" % self._frontier)
def _back_edge_line(self, prev_ends, end, start, collapse, label=None): def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph. """Write part of a backwards edge in the graph.
@ -287,27 +288,26 @@ def advance(to_pos, edges):
self._indent() self._indent()
for p in prev_ends: for p in prev_ends:
advance(p, lambda: [("| ", self._pos)] ) advance(p, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
advance(p+1, lambda: [("|/", self._pos)] ) advance(p + 1, lambda: [("|/", self._pos)]) # NOQA: ignore=E272
if end >= 0: if end >= 0:
advance(end + 1, lambda: [("| ", self._pos)] ) advance(end + 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
advance(start - 1, lambda: [("|", self._pos), ("_", end)] ) advance(start - 1, lambda: [("|", self._pos), ("_", end)]) # NOQA: ignore=E272
else: else:
advance(start - 1, lambda: [("| ", self._pos)] ) advance(start - 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
if start >= 0: if start >= 0:
advance(start, lambda: [("|", self._pos), ("/", end)] ) advance(start, lambda: [("|", self._pos), ("/", end)]) # NOQA: ignore=E272
if collapse: if collapse:
advance(flen, lambda: [(" /", self._pos)] ) advance(flen, lambda: [(" /", self._pos)]) # NOQA: ignore=E272
else: else:
advance(flen, lambda: [("| ", self._pos)] ) advance(flen, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
self._set_state(BACK_EDGE, end, label) self._set_state(BACK_EDGE, end, label)
self._out.write("\n") self._out.write("\n")
def _node_line(self, index, name): def _node_line(self, index, name):
"""Writes a line with a node at index.""" """Writes a line with a node at index."""
self._indent() self._indent()
@ -323,7 +323,6 @@ def _node_line(self, index, name):
self._set_state(NODE, index) self._set_state(NODE, index)
self._out.write("\n") self._out.write("\n")
def _collapse_line(self, index): def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index.""" """Write a collapsing line after a node was added at index."""
self._indent() self._indent()
@ -335,7 +334,6 @@ def _collapse_line(self, index):
self._set_state(COLLAPSE, index) self._set_state(COLLAPSE, index)
self._out.write("\n") self._out.write("\n")
def _merge_right_line(self, index): def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'""" """Edge at index is same as edge to right. Merge directly with '\'"""
self._indent() self._indent()
@ -349,7 +347,6 @@ def _merge_right_line(self, index):
self._set_state(MERGE_RIGHT, index) self._set_state(MERGE_RIGHT, index)
self._out.write("\n") self._out.write("\n")
def _expand_right_line(self, index): def _expand_right_line(self, index):
self._indent() self._indent()
for c in range(index): for c in range(index):
@ -364,7 +361,6 @@ def _expand_right_line(self, index):
self._set_state(EXPAND_RIGHT, index) self._set_state(EXPAND_RIGHT, index)
self._out.write("\n") self._out.write("\n")
def write(self, spec, **kwargs): def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec. """Write out an ascii graph of the provided spec.
@ -407,7 +403,8 @@ def write(self, spec, **kwargs):
i = find(self._frontier, lambda f: len(f) > 1) i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0: if i >= 0:
# Expand frontier until there are enough columns for all children. # Expand frontier until there are enough columns for all
# children.
# Figure out how many back connections there are and # Figure out how many back connections there are and
# sort them so we do them in order # sort them so we do them in order
@ -425,7 +422,8 @@ def write(self, spec, **kwargs):
for j, (b, d) in enumerate(back): for j, (b, d) in enumerate(back):
self._frontier[i].remove(d) self._frontier[i].remove(d)
if i - b > 1: if i - b > 1:
self._back_edge_line(prev_ends, b, i, False, 'left-1') self._back_edge_line(prev_ends, b, i, False,
'left-1')
del prev_ends[:] del prev_ends[:]
prev_ends.append(b) prev_ends.append(b)
@ -439,8 +437,9 @@ def write(self, spec, **kwargs):
elif len(self._frontier[i]) > 1: elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections # Expand forward after doing all back connections
if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1 if (i + 1 < len(self._frontier) and
and self._frontier[i+1][0] in self._frontier[i]): len(self._frontier[i + 1]) == 1 and
self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right. # We need to connect to the element to the right.
# Keep lines straight by connecting directly and # Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract. # avoiding unnecessary expand/contract.
@ -458,7 +457,6 @@ def write(self, spec, **kwargs):
self._frontier.pop(i) self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand") self._connect_deps(i, deps, "post-expand")
# Handle any remaining back edges to the right # Handle any remaining back edges to the right
j = i + 1 j = i + 1
while j < len(self._frontier): while j < len(self._frontier):
@ -477,8 +475,9 @@ def write(self, spec, **kwargs):
# Replace node with its dependencies # Replace node with its dependencies
self._frontier.pop(i) self._frontier.pop(i)
if node.dependencies: if node.dependencies():
deps = sorted((d for d in node.dependencies), reverse=True) deps = sorted((d.name for d in node.dependencies()),
reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere. self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier: elif self._frontier:
@ -501,7 +500,6 @@ def graph_ascii(spec, **kwargs):
graph.write(spec, color=color, out=out) graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs): def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs. """Generate a graph in dot format of all provided specs.

View file

@ -460,7 +460,7 @@ def concrete(self):
def __str__(self): def __str__(self):
return ''.join( return ''.join(
["^" + str(self[name].spec) for name in sorted(self.keys())]) ["^" + self[name].format() for name in sorted(self.keys())])
@key_ordering @key_ordering
@ -861,7 +861,7 @@ def return_val(res):
for name in sorted(successors): for name in sorted(successors):
child = successors[name] child = successors[name]
children = child.spec.traverse_with_deptype( children = child.spec.traverse_with_deptype(
visited, d=d + 1, deptype=deptype_query, visited, d=d + 1, deptype=deptype,
deptype_query=deptype_query, deptype_query=deptype_query,
_self_deptype=child.deptypes, **kwargs) _self_deptype=child.deptypes, **kwargs)
for elt in children: for elt in children:

View file

@ -32,8 +32,6 @@
import spack.architecture import spack.architecture
import spack.package import spack.package
from llnl.util.lang import list_modules
from spack.spec import Spec from spack.spec import Spec
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
@ -51,7 +49,6 @@ def test_conflicting_package_constraints(self):
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
spec.normalize) spec.normalize)
def test_preorder_node_traversal(self): def test_preorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -66,7 +63,6 @@ def test_preorder_node_traversal(self):
traversal = dag.traverse(depth=True) traversal = dag.traverse(depth=True)
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_edge_traversal(self): def test_preorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -81,7 +77,6 @@ def test_preorder_edge_traversal(self):
traversal = dag.traverse(cover='edges', depth=True) traversal = dag.traverse(cover='edges', depth=True)
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_path_traversal(self): def test_preorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -96,7 +91,6 @@ def test_preorder_path_traversal(self):
traversal = dag.traverse(cover='paths', depth=True) traversal = dag.traverse(cover='paths', depth=True)
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_node_traversal(self): def test_postorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -111,7 +105,6 @@ def test_postorder_node_traversal(self):
traversal = dag.traverse(depth=True, order='post') traversal = dag.traverse(depth=True, order='post')
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_edge_traversal(self): def test_postorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -126,7 +119,6 @@ def test_postorder_edge_traversal(self):
traversal = dag.traverse(cover='edges', depth=True, order='post') traversal = dag.traverse(cover='edges', depth=True, order='post')
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_path_traversal(self): def test_postorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi') dag = Spec('mpileaks ^zmpi')
dag.normalize() dag.normalize()
@ -141,7 +133,6 @@ def test_postorder_path_traversal(self):
traversal = dag.traverse(cover='paths', depth=True, order='post') traversal = dag.traverse(cover='paths', depth=True, order='post')
self.assertEqual([(x, y.name) for x, y in traversal], pairs) self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_conflicting_spec_constraints(self): def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@ -155,7 +146,6 @@ def test_conflicting_spec_constraints(self):
self.assertRaises(spack.spec.InconsistentSpecError, self.assertRaises(spack.spec.InconsistentSpecError,
lambda: mpileaks.flat_dependencies(copy=False)) lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self): def test_normalize_twice(self):
"""Make sure normalize can be run twice on the same spec, """Make sure normalize can be run twice on the same spec,
and that it is idempotent.""" and that it is idempotent."""
@ -166,7 +156,6 @@ def test_normalize_twice(self):
spec.normalize() spec.normalize()
self.assertEqual(n1, spec) self.assertEqual(n1, spec)
def test_normalize_a_lot(self): def test_normalize_a_lot(self):
spec = Spec('mpileaks') spec = Spec('mpileaks')
spec.normalize() spec.normalize()
@ -174,7 +163,6 @@ def test_normalize_a_lot(self):
spec.normalize() spec.normalize()
spec.normalize() spec.normalize()
def test_normalize_with_virtual_spec(self): def test_normalize_with_virtual_spec(self):
dag = Spec('mpileaks', dag = Spec('mpileaks',
Spec('callpath', Spec('callpath',
@ -189,14 +177,13 @@ def test_normalize_with_virtual_spec(self):
# make sure nothing with the same name occurs twice # make sure nothing with the same name occurs twice
counts = {} counts = {}
for spec in dag.traverse(key=id): for spec in dag.traverse(key=id):
if not spec.name in counts: if spec.name not in counts:
counts[spec.name] = 0 counts[spec.name] = 0
counts[spec.name] += 1 counts[spec.name] += 1
for name in counts: for name in counts:
self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name) self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name)
def check_links(self, spec_to_check): def check_links(self, spec_to_check):
for spec in spec_to_check.traverse(): for spec in spec_to_check.traverse():
for dependent in spec.dependents(): for dependent in spec.dependents():
@ -211,7 +198,6 @@ def check_links(self, spec_to_check):
"%s not in dependents of %s" % "%s not in dependents of %s" %
(spec.name, dependency.name)) (spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self): def test_dependents_and_dependencies_are_correct(self):
spec = Spec('mpileaks', spec = Spec('mpileaks',
Spec('callpath', Spec('callpath',
@ -226,43 +212,45 @@ def test_dependents_and_dependencies_are_correct(self):
spec.normalize() spec.normalize()
self.check_links(spec) self.check_links(spec)
def test_unsatisfiable_version(self): def test_unsatisfiable_version(self):
self.set_pkg_dep('mpileaks', 'mpich@1.0') self.set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize) self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
spec.normalize)
def test_unsatisfiable_compiler(self): def test_unsatisfiable_compiler(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc') self.set_pkg_dep('mpileaks', 'mpich%gcc')
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf'
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) ' ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
spec.normalize)
def test_unsatisfiable_compiler_version(self): def test_unsatisfiable_compiler_version(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6') self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf'
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) ' ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
spec.normalize)
def test_unsatisfiable_architecture(self): def test_unsatisfiable_architecture(self):
platform = spack.architecture.platform()
self.set_pkg_dep('mpileaks', 'mpich platform=test target=be') self.set_pkg_dep('mpileaks', 'mpich platform=test target=be')
spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath'
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize) ' ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError,
spec.normalize)
def test_invalid_dep(self): def test_invalid_dep(self):
spec = Spec('libelf ^mpich') spec = Spec('libelf ^mpich')
self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) self.assertRaises(spack.spec.InvalidDependencyException,
spec.normalize)
spec = Spec('libelf ^libdwarf') spec = Spec('libelf ^libdwarf')
self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) self.assertRaises(spack.spec.InvalidDependencyException,
spec.normalize)
spec = Spec('mpich ^dyninst ^libelf') spec = Spec('mpich ^dyninst ^libelf')
self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) self.assertRaises(spack.spec.InvalidDependencyException,
spec.normalize)
def test_equal(self): def test_equal(self):
# Different spec structures to test for equality # Different spec structures to test for equality
@ -301,10 +289,10 @@ def test_equal(self):
self.assertFalse(flip_flat.eq_dag(flip_dag)) self.assertFalse(flip_flat.eq_dag(flip_dag))
self.assertFalse(dag.eq_dag(flip_dag)) self.assertFalse(dag.eq_dag(flip_dag))
def test_normalize_mpileaks(self): def test_normalize_mpileaks(self):
# Spec parsed in from a string # Spec parsed in from a string
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf') spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11'
' ^libdwarf')
# What that spec should look like after parsing # What that spec should look like after parsing
expected_flat = Spec( expected_flat = Spec(
@ -367,7 +355,6 @@ def test_normalize_mpileaks(self):
self.assertEqual(spec, non_unique_nodes) self.assertEqual(spec, non_unique_nodes)
self.assertFalse(spec.eq_dag(non_unique_nodes)) self.assertFalse(spec.eq_dag(non_unique_nodes))
def test_normalize_with_virtual_package(self): def test_normalize_with_virtual_package(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf') spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
spec.normalize() spec.normalize()
@ -383,7 +370,6 @@ def test_normalize_with_virtual_package(self):
self.assertEqual(str(spec), str(expected_normalized)) self.assertEqual(str(spec), str(expected_normalized))
def test_contains(self): def test_contains(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf') spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
self.assertTrue(Spec('mpi') in spec) self.assertTrue(Spec('mpi') in spec)
@ -394,7 +380,6 @@ def test_contains(self):
self.assertFalse(Spec('libgoblin') in spec) self.assertFalse(Spec('libgoblin') in spec)
self.assertTrue(Spec('mpileaks') in spec) self.assertTrue(Spec('mpileaks') in spec)
def test_copy_simple(self): def test_copy_simple(self):
orig = Spec('mpileaks') orig = Spec('mpileaks')
copy = orig.copy() copy = orig.copy()
@ -411,7 +396,6 @@ def test_copy_simple(self):
copy_ids = set(id(s) for s in copy.traverse()) copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids)) self.assertFalse(orig_ids.intersection(copy_ids))
def test_copy_normalized(self): def test_copy_normalized(self):
orig = Spec('mpileaks') orig = Spec('mpileaks')
orig.normalize() orig.normalize()
@ -429,7 +413,6 @@ def test_copy_normalized(self):
copy_ids = set(id(s) for s in copy.traverse()) copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids)) self.assertFalse(orig_ids.intersection(copy_ids))
def test_copy_concretized(self): def test_copy_concretized(self):
orig = Spec('mpileaks') orig = Spec('mpileaks')
orig.concretize() orig.concretize()
@ -476,20 +459,20 @@ def test_deptype_traversal(self):
dag = Spec('dtuse') dag = Spec('dtuse')
dag.normalize() dag.normalize()
names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4', names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
'dtrun1', 'dtlink5', 'dtrun3'] 'dtlink1', 'dtlink3', 'dtlink4']
traversal = dag.traverse() traversal = dag.traverse(deptype=('build', 'link'))
self.assertEqual([x.name for x in traversal], names) self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_with_builddeps(self): def test_deptype_traversal_with_builddeps(self):
dag = Spec('dttop') dag = Spec('dttop')
dag.normalize() dag.normalize()
names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1', names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3'] 'dtlink1', 'dtlink3', 'dtlink4']
traversal = dag.traverse() traversal = dag.traverse(deptype=('build', 'link'))
self.assertEqual([x.name for x in traversal], names) self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_full(self): def test_deptype_traversal_full(self):
@ -500,15 +483,14 @@ def test_deptype_traversal_full(self):
'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
'dtrun3', 'dtbuild3'] 'dtrun3', 'dtbuild3']
traversal = dag.traverse(deptype_query=spack.alldeps) traversal = dag.traverse(deptype=spack.alldeps)
self.assertEqual([x.name for x in traversal], names) self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_pythonpath(self): def test_deptype_traversal_run(self):
dag = Spec('dttop') dag = Spec('dttop')
dag.normalize() dag.normalize()
names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1', names = ['dttop', 'dtrun1', 'dtrun3']
'dtrun3']
traversal = dag.traverse(deptype=spack.nolink, deptype_query='run') traversal = dag.traverse(deptype='run')
self.assertEqual([x.name for x in traversal], names) self.assertEqual([x.name for x in traversal], names)