More spec improvements

- Spec.copy() does not create superfluous nodes and preserves DAG
  connections.

- Spec.normalize() doesn't create extra dependency nodes or throw out
  old ones like before.

- Added better test cases for above changes.

Minor things:
- Fixed bug waiting to happen in PackageDB.get()
  - instances was keyed by name, not by spec, so caching wasn't really
    working at all.
- removed unused PackageDB.compute_dependents function.
- Fixed PackageDB.graph_dependencies() so that spack graph works again.
This commit is contained in:
Todd Gamblin 2014-08-09 16:17:40 -07:00
parent 63f8af8078
commit 5f073ae220
5 changed files with 131 additions and 63 deletions

View file

@ -29,7 +29,7 @@
import spack
import spack.cmd
description = "Show dependent packages."
description = "Show installed packages that depend on another."
def setup_parser(subparser):
subparser.add_argument(
@ -42,5 +42,5 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
deps = [d.format(fmt) for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt), *deps)
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)

View file

@ -69,9 +69,9 @@ def get(self, spec):
if not spec in self.instances:
package_class = self.get_class_for_package_name(spec.name)
self.instances[spec.name] = package_class(spec)
self.instances[spec.copy()] = package_class(spec)
return self.instances[spec.name]
return self.instances[spec]
@_autospec
@ -179,24 +179,6 @@ def get_class_for_package_name(self, pkg_name):
return cls
def compute_dependents(self):
"""Reads in all package files and sets dependence information on
Package objects in memory.
"""
if not hasattr(compute_dependents, index):
compute_dependents.index = {}
for pkg in all_packages():
if pkg._dependents is None:
pkg._dependents = []
for name, dep in pkg.dependencies.iteritems():
dpkg = self.get(name)
if dpkg._dependents is None:
dpkg._dependents = []
dpkg._dependents.append(pkg.name)
def graph_dependencies(self, out=sys.stdout):
"""Print out a graph of all the dependencies between package.
Graph is in dot format."""
@ -211,10 +193,17 @@ def quote(string):
return '"%s"' % string
deps = []
for pkg in all_packages():
for pkg in self.all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
# Add edges for each depends_on in the package.
for dep_name, dep in pkg.dependencies.iteritems():
deps.append((pkg.name, dep_name))
# If the package provides something, add an edge for that.
for provider in set(p.name for p in pkg.provided):
deps.append((provider, pkg.name))
out.write('\n')
for pair in deps:

View file

@ -310,9 +310,8 @@ def concrete(self):
def __str__(self):
sorted_dep_names = sorted(self.keys())
return ''.join(
["^" + str(self[name]) for name in sorted_dep_names])
["^" + str(self[name]) for name in sorted(self.keys())])
@key_ordering
@ -562,10 +561,9 @@ def dep_hash(self, length=None):
"""Return a hash representing all dependencies of this spec
(direct and indirect).
This always normalizes first so that hash is consistent.
If you want this hash to be consistent, you should
concretize the spec first so that it is not ambiguous.
"""
self.normalize()
sha = hashlib.sha1()
sha.update(self.dep_string())
full_hash = sha.hexdigest()
@ -641,7 +639,7 @@ def _expand_virtual_packages(self):
spec._replace_with(concrete)
# If there are duplicate providers or duplicate provider deps, this
# consolidates them and merges constraints.
# consolidates them and merge constraints.
self.normalize(force=True)
@ -675,43 +673,51 @@ def concretized(self):
return clone
def flat_dependencies(self):
"""Return a DependencyMap containing all of this spec's dependencies
with their constraints merged. If there are any conflicts, throw
an exception.
def flat_dependencies(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
This will work even on specs that are not normalized; i.e. specs
that have two instances of the same dependency in the DAG.
This is the first step of normalization.
If copy is True, returns merged copies of its dependencies
without modifying the spec it's called on.
If copy is False, clears this spec's dependencies and
returns them.
"""
# Once that is guaranteed, we know any constraint violations are due
# to the spec -- so they're the user's fault, not Spack's.
copy = kwargs.get('copy', True)
flat_deps = DependencyMap()
try:
for spec in self.traverse(root=False):
if spec.name not in flat_deps:
new_spec = spec.copy(deps=False)
flat_deps[spec.name] = new_spec
if copy:
flat_deps[spec.name] = spec.copy(deps=False)
else:
flat_deps[spec.name] = spec
else:
flat_deps[spec.name].constrain(spec)
except UnsatisfiableSpecError, e:
# This REALLY shouldn't happen unless something is wrong in spack.
# It means we got a spec DAG with two instances of the same package
# that had inconsistent constraints. There's no way for a user to
# produce a spec like this (the parser adds all deps to the root),
# so this means OUR code is not sane!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
if not copy:
for dep in flat_deps.values():
dep.dependencies.clear()
dep.dependents.clear()
self.dependencies.clear()
return flat_deps
return flat_deps
except UnsatisfiableSpecError, e:
# Here, the DAG contains two instances of the same package
# with inconsistent constraints. Users cannot produce
# inconsistent specs like this on the command line: the
# parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
conflict, throw an exception. """
self.dependencies = self.flat_dependencies()
for dep in self.flat_dependencies(copy=False):
self._add_dependency(dep)
def _normalize_helper(self, visited, spec_deps, provider_index):
@ -819,8 +825,7 @@ def normalize(self, **kwargs):
self.package.validate_dependencies()
# Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies()
self.dependencies.clear()
spec_deps = self.flat_dependencies(copy=False)
# Figure out which of the user-provided deps provide virtual deps.
# Remove virtual deps that are already provided by something in the spec
@ -1037,22 +1042,29 @@ def _dup(self, other, **kwargs):
Whether deps should be copied too. Set to false to copy a
spec but not its dependencies.
"""
# TODO: this needs to handle DAGs.
# Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture
self.compiler = None
if other.compiler:
self.compiler = other.compiler.copy()
self.compiler = other.compiler.copy() if other.compiler else None
self.dependents = DependencyMap()
copy_deps = kwargs.get('deps', True)
if copy_deps:
self.dependencies = other.dependencies.copy()
else:
self.dependencies = DependencyMap()
self.dependencies = DependencyMap()
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
# This copies the deps from other using _dup(deps=False)
new_nodes = other.flat_dependencies()
new_nodes[self.name] = self
# Hook everything up properly here by traversing.
for spec in other.traverse(cover='nodes'):
parent = new_nodes[spec.name]
for child in spec.dependencies:
if child not in parent.dependencies:
parent._add_dependency(new_nodes[child])
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
self._concrete = other._concrete

View file

@ -29,10 +29,12 @@
import tempfile
import shutil
import os
from contextlib import closing
from llnl.util.filesystem import *
import spack
from spack.spec import Spec
from spack.packages import PackageDB
from spack.directory_layout import SpecHashDirectoryLayout
@ -83,6 +85,18 @@ def test_read_and_write_spec(self):
# Make sure spec file can be read back in to get the original spec
spec_from_file = self.layout.read_spec(spec_path)
self.assertEqual(spec, spec_from_file)
self.assertTrue(spec.eq_dag, spec_from_file)
self.assertTrue(spec_from_file.concrete)
# Ensure that specs that come out "normal" are really normal.
with closing(open(spec_path)) as spec_file:
read_separately = Spec(spec_file.read())
read_separately.normalize()
self.assertEqual(read_separately, spec_from_file)
read_separately.concretize()
self.assertEqual(read_separately, spec_from_file)
# Make sure the dep hash of the read-in spec is the same
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())

View file

@ -392,3 +392,56 @@ def test_contains(self):
self.assertIn(Spec('libdwarf'), spec)
self.assertNotIn(Spec('libgoblin'), spec)
self.assertIn(Spec('mpileaks'), spec)
def test_copy_simple(self):
orig = Spec('mpileaks')
copy = orig.copy()
self.check_links(copy)
self.assertEqual(orig, copy)
self.assertTrue(orig.eq_dag(copy))
self.assertEqual(orig._normal, copy._normal)
self.assertEqual(orig._concrete, copy._concrete)
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
def test_copy_normalized(self):
orig = Spec('mpileaks')
orig.normalize()
copy = orig.copy()
self.check_links(copy)
self.assertEqual(orig, copy)
self.assertTrue(orig.eq_dag(copy))
self.assertEqual(orig._normal, copy._normal)
self.assertEqual(orig._concrete, copy._concrete)
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()
copy = orig.copy()
self.check_links(copy)
self.assertEqual(orig, copy)
self.assertTrue(orig.eq_dag(copy))
self.assertEqual(orig._normal, copy._normal)
self.assertEqual(orig._concrete, copy._concrete)
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))