Merge pull request #21 in SCALE/spack from features/directory-layout-test to develop
# By Todd Gamblin # Via Todd Gamblin * commit '98797459f343c400f4f6fe988bae47d4bab9116b': Minor tweaks after spec update. More spec improvements Add postorder traversal to specs Clean up specs, spec comparison, and spec hashing.
This commit is contained in:
commit
884a4fecd1
11 changed files with 595 additions and 177 deletions
|
@ -81,7 +81,7 @@
|
|||
# stage directories.
|
||||
#
|
||||
from spack.directory_layout import SpecHashDirectoryLayout
|
||||
install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
|
||||
install_layout = SpecHashDirectoryLayout(install_path)
|
||||
|
||||
#
|
||||
# This controls how things are concretized in spack.
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Show dependent packages."
|
||||
description = "Show installed packages that depend on another."
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
|
@ -42,5 +42,5 @@ def dependents(parser, args):
|
|||
tty.die("spack dependents takes only one spec.")
|
||||
|
||||
fmt = '$_$@$%@$+$=$#'
|
||||
deps = [d.format(fmt) for d in specs[0].package.installed_dependents]
|
||||
tty.msg("Dependents of %s" % specs[0].format(fmt), *deps)
|
||||
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
|
||||
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
|
||||
|
|
|
@ -89,7 +89,7 @@ def find(parser, args):
|
|||
format = " %-{}s%s".format(width)
|
||||
|
||||
for abbrv, spec in zip(abbreviated, specs):
|
||||
print format % (abbrv, spec.package.prefix)
|
||||
print format % (abbrv, spec.prefix)
|
||||
|
||||
elif args.full_specs:
|
||||
for spec in specs:
|
||||
|
|
|
@ -118,7 +118,7 @@ def concretize_compiler(self, spec):
|
|||
return
|
||||
|
||||
try:
|
||||
nearest = next(p for p in spec.preorder_traversal(direction='parents')
|
||||
nearest = next(p for p in spec.traverse(direction='parents')
|
||||
if p.compiler is not None).compiler
|
||||
|
||||
if not nearest in all_compilers:
|
||||
|
|
|
@ -29,7 +29,10 @@
|
|||
import shutil
|
||||
from contextlib import closing
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
|
||||
import spack
|
||||
from spack.spec import Spec
|
||||
from spack.error import SpackError
|
||||
|
||||
|
@ -131,12 +134,9 @@ def __init__(self, root, **kwargs):
|
|||
"""Prefix size is number of characters in the SHA-1 prefix to use
|
||||
to make each hash unique.
|
||||
"""
|
||||
prefix_size = kwargs.get('prefix_size', 8)
|
||||
spec_file = kwargs.get('spec_file', '.spec')
|
||||
|
||||
spec_file_name = kwargs.get('spec_file_name', '.spec')
|
||||
super(SpecHashDirectoryLayout, self).__init__(root)
|
||||
self.prefix_size = prefix_size
|
||||
self.spec_file = spec_file
|
||||
self.spec_file_name = spec_file_name
|
||||
|
||||
|
||||
def relative_path_for_spec(self, spec):
|
||||
|
@ -154,16 +154,36 @@ def write_spec(self, spec, path):
|
|||
def read_spec(self, path):
|
||||
"""Read the contents of a file and parse them as a spec"""
|
||||
with closing(open(path)) as spec_file:
|
||||
string = spec_file.read().replace('\n', '')
|
||||
# Specs from files are assumed normal and concrete
|
||||
return Spec(string, concrete=True)
|
||||
spec = Spec(spec_file.read().replace('\n', ''))
|
||||
|
||||
# If we do not have a package on hand for this spec, we know
|
||||
# it is concrete, and we *assume* that it is normal. This
|
||||
# prevents us from trying to fetch a non-existing package, and
|
||||
# allows best effort for commands like spack find.
|
||||
if not spack.db.exists(spec.name):
|
||||
spec._normal = True
|
||||
spec._concrete = True
|
||||
else:
|
||||
spec.normalize()
|
||||
if not spec.concrete:
|
||||
tty.warn("Spec read from installed package is not concrete:",
|
||||
path, spec)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
return join_path(self.path_for_spec(spec), self.spec_file_name)
|
||||
|
||||
|
||||
def make_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
path = self.path_for_spec(spec)
|
||||
spec_file_path = join_path(path, self.spec_file)
|
||||
spec_file_path = self.spec_file_path(spec)
|
||||
|
||||
if os.path.isdir(path):
|
||||
if not os.path.isfile(spec_file_path):
|
||||
|
@ -177,8 +197,7 @@ def make_path_for_spec(self, spec):
|
|||
spec_hash = self.hash_spec(spec)
|
||||
installed_hash = self.hash_spec(installed_spec)
|
||||
if installed_spec == spec_hash:
|
||||
raise SpecHashCollisionError(
|
||||
installed_hash, spec_hash, self.prefix_size)
|
||||
raise SpecHashCollisionError(installed_hash, spec_hash)
|
||||
else:
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Spec file in %s does not match SHA-1 hash!'
|
||||
|
@ -195,7 +214,7 @@ def all_specs(self):
|
|||
for path in traverse_dirs_at_depth(self.root, 3):
|
||||
arch, compiler, last_dir = path
|
||||
spec_file_path = join_path(
|
||||
self.root, arch, compiler, last_dir, self.spec_file)
|
||||
self.root, arch, compiler, last_dir, self.spec_file_name)
|
||||
if os.path.exists(spec_file_path):
|
||||
spec = self.read_spec(spec_file_path)
|
||||
yield spec
|
||||
|
@ -209,10 +228,10 @@ def __init__(self, message):
|
|||
|
||||
class SpecHashCollisionError(DirectoryLayoutError):
|
||||
"""Raised when there is a hash collision in an SpecHashDirectoryLayout."""
|
||||
def __init__(self, installed_spec, new_spec, prefix_size):
|
||||
def __init__(self, installed_spec, new_spec):
|
||||
super(SpecHashDirectoryLayout, self).__init__(
|
||||
'Specs %s and %s have the same %d character SHA-1 prefix!'
|
||||
% prefix_size, installed_spec, new_spec)
|
||||
'Specs %s and %s have the same SHA-1 prefix!'
|
||||
% installed_spec, new_spec)
|
||||
|
||||
|
||||
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
||||
|
|
|
@ -69,9 +69,9 @@ def get(self, spec):
|
|||
|
||||
if not spec in self.instances:
|
||||
package_class = self.get_class_for_package_name(spec.name)
|
||||
self.instances[spec.name] = package_class(spec)
|
||||
self.instances[spec.copy()] = package_class(spec)
|
||||
|
||||
return self.instances[spec.name]
|
||||
return self.instances[spec]
|
||||
|
||||
|
||||
@_autospec
|
||||
|
@ -115,7 +115,13 @@ def installed_package_specs(self):
|
|||
"""Read installed package names straight from the install directory
|
||||
layout.
|
||||
"""
|
||||
return spack.install_layout.all_specs()
|
||||
# Get specs from the directory layout but ensure that they're
|
||||
# all normalized properly.
|
||||
installed = []
|
||||
for spec in spack.install_layout.all_specs():
|
||||
spec.normalize()
|
||||
installed.append(spec)
|
||||
return installed
|
||||
|
||||
|
||||
@memoized
|
||||
|
@ -179,24 +185,6 @@ def get_class_for_package_name(self, pkg_name):
|
|||
return cls
|
||||
|
||||
|
||||
def compute_dependents(self):
|
||||
"""Reads in all package files and sets dependence information on
|
||||
Package objects in memory.
|
||||
"""
|
||||
if not hasattr(compute_dependents, index):
|
||||
compute_dependents.index = {}
|
||||
|
||||
for pkg in all_packages():
|
||||
if pkg._dependents is None:
|
||||
pkg._dependents = []
|
||||
|
||||
for name, dep in pkg.dependencies.iteritems():
|
||||
dpkg = self.get(name)
|
||||
if dpkg._dependents is None:
|
||||
dpkg._dependents = []
|
||||
dpkg._dependents.append(pkg.name)
|
||||
|
||||
|
||||
def graph_dependencies(self, out=sys.stdout):
|
||||
"""Print out a graph of all the dependencies between package.
|
||||
Graph is in dot format."""
|
||||
|
@ -211,10 +199,17 @@ def quote(string):
|
|||
return '"%s"' % string
|
||||
|
||||
deps = []
|
||||
for pkg in all_packages():
|
||||
for pkg in self.all_packages():
|
||||
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
|
||||
|
||||
# Add edges for each depends_on in the package.
|
||||
for dep_name, dep in pkg.dependencies.iteritems():
|
||||
deps.append((pkg.name, dep_name))
|
||||
|
||||
# If the package provides something, add an edge for that.
|
||||
for provider in set(p.name for p in pkg.provided):
|
||||
deps.append((provider, pkg.name))
|
||||
|
||||
out.write('\n')
|
||||
|
||||
for pair in deps:
|
||||
|
|
|
@ -94,6 +94,7 @@
|
|||
import itertools
|
||||
import hashlib
|
||||
from StringIO import StringIO
|
||||
from operator import attrgetter
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import *
|
||||
|
@ -309,9 +310,8 @@ def concrete(self):
|
|||
|
||||
|
||||
def __str__(self):
|
||||
sorted_dep_names = sorted(self.keys())
|
||||
return ''.join(
|
||||
["^" + str(self[name]) for name in sorted_dep_names])
|
||||
["^" + str(self[name]) for name in sorted(self.keys())])
|
||||
|
||||
|
||||
@key_ordering
|
||||
|
@ -352,10 +352,6 @@ def __init__(self, spec_like, *dep_like, **kwargs):
|
|||
self._normal = kwargs.get('normal', False)
|
||||
self._concrete = kwargs.get('concrete', False)
|
||||
|
||||
# Specs cannot be concrete and non-normal.
|
||||
if self._concrete:
|
||||
self._normal = True
|
||||
|
||||
# This allows users to construct a spec DAG with literals.
|
||||
# Note that given two specs a and b, Spec(a) copies a, but
|
||||
# Spec(a, b) will copy a but just add b as a dep.
|
||||
|
@ -454,10 +450,19 @@ def concrete(self):
|
|||
return self._concrete
|
||||
|
||||
|
||||
def preorder_traversal(self, visited=None, d=0, **kwargs):
|
||||
"""Generic preorder traversal of the DAG represented by this spec.
|
||||
def traverse(self, visited=None, d=0, **kwargs):
|
||||
"""Generic traversal of the DAG represented by this spec.
|
||||
This will yield each node in the spec. Options:
|
||||
|
||||
order [=pre|post]
|
||||
Order to traverse spec nodes. Defaults to preorder traversal.
|
||||
Options are:
|
||||
|
||||
'pre': Pre-order traversal; each node is yielded before its
|
||||
children in the dependency DAG.
|
||||
'post': Post-order traversal; each node is yielded after its
|
||||
children in the dependency DAG.
|
||||
|
||||
cover [=nodes|edges|paths]
|
||||
Determines how extensively to cover the dag. Possible vlaues:
|
||||
|
||||
|
@ -475,7 +480,7 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
|
|||
spec, but also their depth from the root in a (depth, node)
|
||||
tuple.
|
||||
|
||||
keyfun [=id]
|
||||
key [=id]
|
||||
Allow a custom key function to track the identity of nodes
|
||||
in the traversal.
|
||||
|
||||
|
@ -487,44 +492,57 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
|
|||
'parents', traverses upwards in the DAG towards the root.
|
||||
|
||||
"""
|
||||
# get initial values for kwargs
|
||||
depth = kwargs.get('depth', False)
|
||||
key_fun = kwargs.get('key', id)
|
||||
if isinstance(key_fun, basestring):
|
||||
key_fun = attrgetter(key_fun)
|
||||
yield_root = kwargs.get('root', True)
|
||||
cover = kwargs.get('cover', 'nodes')
|
||||
direction = kwargs.get('direction', 'children')
|
||||
order = kwargs.get('order', 'pre')
|
||||
|
||||
cover_values = ('nodes', 'edges', 'paths')
|
||||
if cover not in cover_values:
|
||||
raise ValueError("Invalid value for cover: %s. Choices are %s"
|
||||
% (cover, ",".join(cover_values)))
|
||||
|
||||
direction_values = ('children', 'parents')
|
||||
if direction not in direction_values:
|
||||
raise ValueError("Invalid value for direction: %s. Choices are %s"
|
||||
% (direction, ",".join(direction_values)))
|
||||
# Make sure kwargs have legal values; raise ValueError if not.
|
||||
def validate(name, val, allowed_values):
|
||||
if val not in allowed_values:
|
||||
raise ValueError("Invalid value for %s: %s. Choices are %s"
|
||||
% (name, val, ",".join(allowed_values)))
|
||||
validate('cover', cover, ('nodes', 'edges', 'paths'))
|
||||
validate('direction', direction, ('children', 'parents'))
|
||||
validate('order', order, ('pre', 'post'))
|
||||
|
||||
if visited is None:
|
||||
visited = set()
|
||||
|
||||
result = (d, self) if depth else self
|
||||
key = key_fun(self)
|
||||
|
||||
if key in visited:
|
||||
if cover == 'nodes': return
|
||||
if yield_root or d > 0: yield result
|
||||
if cover == 'edges': return
|
||||
else:
|
||||
if yield_root or d > 0: yield result
|
||||
# Node traversal does not yield visited nodes.
|
||||
if key in visited and cover == 'nodes':
|
||||
return
|
||||
|
||||
successors = self.dependencies
|
||||
if direction == 'parents':
|
||||
successors = self.dependents
|
||||
# Determine whether and what to yield for this node.
|
||||
yield_me = yield_root or d > 0
|
||||
result = (d, self) if depth else self
|
||||
|
||||
visited.add(key)
|
||||
for name in sorted(successors):
|
||||
child = successors[name]
|
||||
for elt in child.preorder_traversal(visited, d+1, **kwargs):
|
||||
yield elt
|
||||
# Preorder traversal yields before successors
|
||||
if yield_me and order == 'pre':
|
||||
yield result
|
||||
|
||||
# Edge traversal yields but skips children of visited nodes
|
||||
if not (key in visited and cover == 'edges'):
|
||||
# This code determines direction and yields the children/parents
|
||||
successors = self.dependencies
|
||||
if direction == 'parents':
|
||||
successors = self.dependents
|
||||
|
||||
visited.add(key)
|
||||
for name in sorted(successors):
|
||||
child = successors[name]
|
||||
for elt in child.traverse(visited, d+1, **kwargs):
|
||||
yield elt
|
||||
|
||||
# Postorder traversal yields after successors
|
||||
if yield_me and order == 'post':
|
||||
yield result
|
||||
|
||||
|
||||
@property
|
||||
|
@ -540,13 +558,14 @@ def prefix(self):
|
|||
|
||||
|
||||
def dep_hash(self, length=None):
|
||||
"""Return a hash representing the dependencies of this spec
|
||||
This will always normalize first so that the hash is consistent.
|
||||
"""
|
||||
self.normalize()
|
||||
"""Return a hash representing all dependencies of this spec
|
||||
(direct and indirect).
|
||||
|
||||
If you want this hash to be consistent, you should
|
||||
concretize the spec first so that it is not ambiguous.
|
||||
"""
|
||||
sha = hashlib.sha1()
|
||||
sha.update(str(self.dependencies))
|
||||
sha.update(self.dep_string())
|
||||
full_hash = sha.hexdigest()
|
||||
|
||||
return full_hash[:length]
|
||||
|
@ -609,7 +628,7 @@ def _expand_virtual_packages(self):
|
|||
a problem.
|
||||
"""
|
||||
while True:
|
||||
virtuals =[v for v in self.preorder_traversal() if v.virtual]
|
||||
virtuals =[v for v in self.traverse() if v.virtual]
|
||||
if not virtuals:
|
||||
return
|
||||
|
||||
|
@ -620,7 +639,7 @@ def _expand_virtual_packages(self):
|
|||
spec._replace_with(concrete)
|
||||
|
||||
# If there are duplicate providers or duplicate provider deps, this
|
||||
# consolidates them and merges constraints.
|
||||
# consolidates them and merge constraints.
|
||||
self.normalize(force=True)
|
||||
|
||||
|
||||
|
@ -654,47 +673,51 @@ def concretized(self):
|
|||
return clone
|
||||
|
||||
|
||||
def flat_dependencies(self):
|
||||
"""Return a DependencyMap containing all of this spec's dependencies
|
||||
with their constraints merged. If there are any conflicts, throw
|
||||
an exception.
|
||||
def flat_dependencies(self, **kwargs):
|
||||
"""Return a DependencyMap containing all of this spec's
|
||||
dependencies with their constraints merged.
|
||||
|
||||
This will work even on specs that are not normalized; i.e. specs
|
||||
that have two instances of the same dependency in the DAG.
|
||||
This is used as the first step of normalization.
|
||||
If copy is True, returns merged copies of its dependencies
|
||||
without modifying the spec it's called on.
|
||||
|
||||
If copy is False, clears this spec's dependencies and
|
||||
returns them.
|
||||
"""
|
||||
# This ensures that the package descriptions themselves are consistent
|
||||
if not self.virtual:
|
||||
self.package.validate_dependencies()
|
||||
copy = kwargs.get('copy', True)
|
||||
|
||||
# Once that is guaranteed, we know any constraint violations are due
|
||||
# to the spec -- so they're the user's fault, not Spack's.
|
||||
flat_deps = DependencyMap()
|
||||
try:
|
||||
for spec in self.preorder_traversal():
|
||||
for spec in self.traverse(root=False):
|
||||
if spec.name not in flat_deps:
|
||||
new_spec = spec.copy(dependencies=False)
|
||||
flat_deps[spec.name] = new_spec
|
||||
|
||||
if copy:
|
||||
flat_deps[spec.name] = spec.copy(deps=False)
|
||||
else:
|
||||
flat_deps[spec.name] = spec
|
||||
else:
|
||||
flat_deps[spec.name].constrain(spec)
|
||||
|
||||
except UnsatisfiableSpecError, e:
|
||||
# This REALLY shouldn't happen unless something is wrong in spack.
|
||||
# It means we got a spec DAG with two instances of the same package
|
||||
# that had inconsistent constraints. There's no way for a user to
|
||||
# produce a spec like this (the parser adds all deps to the root),
|
||||
# so this means OUR code is not sane!
|
||||
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
|
||||
if not copy:
|
||||
for dep in flat_deps.values():
|
||||
dep.dependencies.clear()
|
||||
dep.dependents.clear()
|
||||
self.dependencies.clear()
|
||||
|
||||
return flat_deps
|
||||
return flat_deps
|
||||
|
||||
except UnsatisfiableSpecError, e:
|
||||
# Here, the DAG contains two instances of the same package
|
||||
# with inconsistent constraints. Users cannot produce
|
||||
# inconsistent specs like this on the command line: the
|
||||
# parser doesn't allow it. Spack must be broken!
|
||||
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
|
||||
|
||||
|
||||
def flatten(self):
|
||||
"""Pull all dependencies up to the root (this spec).
|
||||
Merge constraints for dependencies with the same name, and if they
|
||||
conflict, throw an exception. """
|
||||
self.dependencies = self.flat_dependencies()
|
||||
for dep in self.flat_dependencies(copy=False):
|
||||
self._add_dependency(dep)
|
||||
|
||||
|
||||
def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||
|
@ -797,11 +820,12 @@ def normalize(self, **kwargs):
|
|||
# Ensure first that all packages & compilers in the DAG exist.
|
||||
self.validate_names()
|
||||
|
||||
# Then ensure that the packages referenced are sane, that the
|
||||
# provided spec is sane, and that all dependency specs are in the
|
||||
# root node of the spec. flat_dependencies will do this for us.
|
||||
spec_deps = self.flat_dependencies()
|
||||
self.dependencies.clear()
|
||||
# Ensure that the package & dep descriptions are consistent & sane
|
||||
if not self.virtual:
|
||||
self.package.validate_dependencies()
|
||||
|
||||
# Get all the dependencies into one DependencyMap
|
||||
spec_deps = self.flat_dependencies(copy=False)
|
||||
|
||||
# Figure out which of the user-provided deps provide virtual deps.
|
||||
# Remove virtual deps that are already provided by something in the spec
|
||||
|
@ -843,7 +867,7 @@ def validate_names(self):
|
|||
If they're not, it will raise either UnknownPackageError or
|
||||
UnsupportedCompilerError.
|
||||
"""
|
||||
for spec in self.preorder_traversal():
|
||||
for spec in self.traverse():
|
||||
# Don't get a package for a virtual name.
|
||||
if not spec.virtual:
|
||||
spack.db.get(spec.name)
|
||||
|
@ -911,17 +935,17 @@ def _constrain_dependencies(self, other):
|
|||
def common_dependencies(self, other):
|
||||
"""Return names of dependencies that self an other have in common."""
|
||||
common = set(
|
||||
s.name for s in self.preorder_traversal(root=False))
|
||||
s.name for s in self.traverse(root=False))
|
||||
common.intersection_update(
|
||||
s.name for s in other.preorder_traversal(root=False))
|
||||
s.name for s in other.traverse(root=False))
|
||||
return common
|
||||
|
||||
|
||||
def dep_difference(self, other):
|
||||
"""Returns dependencies in self that are not in other."""
|
||||
mine = set(s.name for s in self.preorder_traversal(root=False))
|
||||
mine = set(s.name for s in self.traverse(root=False))
|
||||
mine.difference_update(
|
||||
s.name for s in other.preorder_traversal(root=False))
|
||||
s.name for s in other.traverse(root=False))
|
||||
return mine
|
||||
|
||||
|
||||
|
@ -980,8 +1004,8 @@ def satisfies_dependencies(self, other):
|
|||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = ProviderIndex(self.preorder_traversal(), restrict=True)
|
||||
other_index = ProviderIndex(other.preorder_traversal(), restrict=True)
|
||||
self_index = ProviderIndex(self.traverse(), restrict=True)
|
||||
other_index = ProviderIndex(other.traverse(), restrict=True)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
if not self_index.satisfies(other_index):
|
||||
|
@ -1003,7 +1027,7 @@ def satisfies_dependencies(self, other):
|
|||
|
||||
def virtual_dependencies(self):
|
||||
"""Return list of any virtual deps in this spec."""
|
||||
return [spec for spec in self.preorder_traversal() if spec.virtual]
|
||||
return [spec for spec in self.traverse() if spec.virtual]
|
||||
|
||||
|
||||
def _dup(self, other, **kwargs):
|
||||
|
@ -1018,22 +1042,29 @@ def _dup(self, other, **kwargs):
|
|||
Whether deps should be copied too. Set to false to copy a
|
||||
spec but not its dependencies.
|
||||
"""
|
||||
# TODO: this needs to handle DAGs.
|
||||
# Local node attributes get copied first.
|
||||
self.name = other.name
|
||||
self.versions = other.versions.copy()
|
||||
self.variants = other.variants.copy()
|
||||
self.architecture = other.architecture
|
||||
self.compiler = None
|
||||
if other.compiler:
|
||||
self.compiler = other.compiler.copy()
|
||||
|
||||
self.compiler = other.compiler.copy() if other.compiler else None
|
||||
self.dependents = DependencyMap()
|
||||
copy_deps = kwargs.get('dependencies', True)
|
||||
if copy_deps:
|
||||
self.dependencies = other.dependencies.copy()
|
||||
else:
|
||||
self.dependencies = DependencyMap()
|
||||
self.dependencies = DependencyMap()
|
||||
|
||||
# If we copy dependencies, preserve DAG structure in the new spec
|
||||
if kwargs.get('deps', True):
|
||||
# This copies the deps from other using _dup(deps=False)
|
||||
new_nodes = other.flat_dependencies()
|
||||
new_nodes[self.name] = self
|
||||
|
||||
# Hook everything up properly here by traversing.
|
||||
for spec in other.traverse(cover='nodes'):
|
||||
parent = new_nodes[spec.name]
|
||||
for child in spec.dependencies:
|
||||
if child not in parent.dependencies:
|
||||
parent._add_dependency(new_nodes[child])
|
||||
|
||||
# Since we preserved structure, we can copy _normal safely.
|
||||
self._normal = other._normal
|
||||
self._concrete = other._concrete
|
||||
|
||||
|
@ -1057,7 +1088,7 @@ def version(self):
|
|||
|
||||
def __getitem__(self, name):
|
||||
"""TODO: reconcile __getitem__, _add_dependency, __contains__"""
|
||||
for spec in self.preorder_traversal():
|
||||
for spec in self.traverse():
|
||||
if spec.name == name:
|
||||
return spec
|
||||
|
||||
|
@ -1068,15 +1099,82 @@ def __contains__(self, spec):
|
|||
"""True if this spec has any dependency that satisfies the supplied
|
||||
spec."""
|
||||
spec = self._autospec(spec)
|
||||
for s in self.preorder_traversal():
|
||||
for s in self.traverse():
|
||||
if s.satisfies(spec):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _cmp_key(self):
|
||||
def sorted_deps(self):
|
||||
"""Return a list of all dependencies sorted by name."""
|
||||
deps = self.flat_dependencies()
|
||||
return tuple(deps[name] for name in sorted(deps))
|
||||
|
||||
|
||||
def _eq_dag(self, other, vs, vo):
|
||||
"""Recursive helper for eq_dag and ne_dag. Does the actual DAG
|
||||
traversal."""
|
||||
vs.add(id(self))
|
||||
vo.add(id(other))
|
||||
|
||||
if self.ne_node(other):
|
||||
return False
|
||||
|
||||
if len(self.dependencies) != len(other.dependencies):
|
||||
return False
|
||||
|
||||
ssorted = [self.dependencies[name] for name in sorted(self.dependencies)]
|
||||
osorted = [other.dependencies[name] for name in sorted(other.dependencies)]
|
||||
|
||||
for s, o in zip(ssorted, osorted):
|
||||
visited_s = id(s) in vs
|
||||
visited_o = id(o) in vo
|
||||
|
||||
# Check for duplicate or non-equal dependencies
|
||||
if visited_s != visited_o: return False
|
||||
|
||||
# Skip visited nodes
|
||||
if visited_s or visited_o: continue
|
||||
|
||||
# Recursive check for equality
|
||||
if not s._eq_dag(o, vs, vo):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def eq_dag(self, other):
|
||||
"""True if the full dependency DAGs of specs are equal"""
|
||||
return self._eq_dag(other, set(), set())
|
||||
|
||||
|
||||
def ne_dag(self, other):
|
||||
"""True if the full dependency DAGs of specs are not equal"""
|
||||
return not self.eq_dag(other)
|
||||
|
||||
|
||||
def _cmp_node(self):
|
||||
"""Comparison key for just *this node* and not its deps."""
|
||||
return (self.name, self.versions, self.variants,
|
||||
self.architecture, self.compiler, self.dependencies)
|
||||
self.architecture, self.compiler)
|
||||
|
||||
|
||||
def eq_node(self, other):
|
||||
"""Equality with another spec, not including dependencies."""
|
||||
return self._cmp_node() == other._cmp_node()
|
||||
|
||||
|
||||
def ne_node(self, other):
|
||||
"""Inequality with another spec, not including dependencies."""
|
||||
return self._cmp_node() != other._cmp_node()
|
||||
|
||||
|
||||
def _cmp_key(self):
|
||||
"""Comparison key for this node and all dependencies *without*
|
||||
considering structure. This is the default, as
|
||||
normalization will restore structure.
|
||||
"""
|
||||
return self._cmp_node() + (self.sorted_deps(),)
|
||||
|
||||
|
||||
def colorized(self):
|
||||
|
@ -1179,12 +1277,12 @@ def write(s, c):
|
|||
return result
|
||||
|
||||
|
||||
def dep_string(self):
|
||||
return ''.join("^" + dep.format() for dep in self.sorted_deps())
|
||||
|
||||
|
||||
def __str__(self):
|
||||
by_name = lambda d: d.name
|
||||
deps = self.preorder_traversal(key=by_name, root=False)
|
||||
sorted_deps = sorted(deps, key=by_name)
|
||||
dep_string = ''.join("^" + dep.format() for dep in sorted_deps)
|
||||
return self.format() + dep_string
|
||||
return self.format() + self.dep_string()
|
||||
|
||||
|
||||
def tree(self, **kwargs):
|
||||
|
@ -1200,7 +1298,7 @@ def tree(self, **kwargs):
|
|||
out = ""
|
||||
cur_id = 0
|
||||
ids = {}
|
||||
for d, node in self.preorder_traversal(cover=cover, depth=True):
|
||||
for d, node in self.traverse(order='pre', cover=cover, depth=True):
|
||||
out += " " * indent
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
|
|
|
@ -45,7 +45,8 @@
|
|||
'multimethod',
|
||||
'install',
|
||||
'package_sanity',
|
||||
'config']
|
||||
'config',
|
||||
'directory_layout']
|
||||
|
||||
|
||||
def list_tests():
|
||||
|
|
155
lib/spack/spack/test/directory_layout.py
Normal file
155
lib/spack/spack/test/directory_layout.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://scalability-llnl.github.io/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
"""\
|
||||
This test verifies that the Spack directory layout works properly.
|
||||
"""
|
||||
import unittest
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
from contextlib import closing
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.spec import Spec
|
||||
from spack.packages import PackageDB
|
||||
from spack.directory_layout import SpecHashDirectoryLayout
|
||||
|
||||
class DirectoryLayoutTest(unittest.TestCase):
|
||||
"""Tests that a directory layout works correctly and produces a
|
||||
consistent install path."""
|
||||
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.layout = SpecHashDirectoryLayout(self.tmpdir)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||
self.layout = None
|
||||
|
||||
|
||||
def test_read_and_write_spec(self):
|
||||
"""This goes through each package in spack and creates a directory for
|
||||
it. It then ensures that the spec for the directory's
|
||||
installed package can be read back in consistently, and
|
||||
finally that the directory can be removed by the directory
|
||||
layout.
|
||||
"""
|
||||
for pkg in spack.db.all_packages():
|
||||
spec = pkg.spec
|
||||
|
||||
# If a spec fails to concretize, just skip it. If it is a
|
||||
# real error, it will be caught by concretization tests.
|
||||
try:
|
||||
spec.concretize()
|
||||
except:
|
||||
continue
|
||||
|
||||
self.layout.make_path_for_spec(spec)
|
||||
|
||||
install_dir = self.layout.path_for_spec(spec)
|
||||
spec_path = self.layout.spec_file_path(spec)
|
||||
|
||||
# Ensure directory has been created in right place.
|
||||
self.assertTrue(os.path.isdir(install_dir))
|
||||
self.assertTrue(install_dir.startswith(self.tmpdir))
|
||||
|
||||
# Ensure spec file exists when directory is created
|
||||
self.assertTrue(os.path.isfile(spec_path))
|
||||
self.assertTrue(spec_path.startswith(install_dir))
|
||||
|
||||
# Make sure spec file can be read back in to get the original spec
|
||||
spec_from_file = self.layout.read_spec(spec_path)
|
||||
self.assertEqual(spec, spec_from_file)
|
||||
self.assertTrue(spec.eq_dag, spec_from_file)
|
||||
self.assertTrue(spec_from_file.concrete)
|
||||
|
||||
# Ensure that specs that come out "normal" are really normal.
|
||||
with closing(open(spec_path)) as spec_file:
|
||||
read_separately = Spec(spec_file.read())
|
||||
|
||||
read_separately.normalize()
|
||||
self.assertEqual(read_separately, spec_from_file)
|
||||
|
||||
read_separately.concretize()
|
||||
self.assertEqual(read_separately, spec_from_file)
|
||||
|
||||
# Make sure the dep hash of the read-in spec is the same
|
||||
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
|
||||
|
||||
# Ensure directories are properly removed
|
||||
self.layout.remove_path_for_spec(spec)
|
||||
self.assertFalse(os.path.isdir(install_dir))
|
||||
self.assertFalse(os.path.exists(install_dir))
|
||||
|
||||
|
||||
def test_handle_unknown_package(self):
|
||||
"""This test ensures that spack can at least do *some*
|
||||
operations with packages that are installed but that it
|
||||
does not know about. This is actually not such an uncommon
|
||||
scenario with spack; it can happen when you switch from a
|
||||
git branch where you're working on a new package.
|
||||
|
||||
This test ensures that the directory layout stores enough
|
||||
information about installed packages' specs to uninstall
|
||||
or query them again if the package goes away.
|
||||
"""
|
||||
mock_db = PackageDB(spack.mock_packages_path)
|
||||
|
||||
not_in_mock = set(spack.db.all_package_names()).difference(
|
||||
set(mock_db.all_package_names()))
|
||||
|
||||
# Create all the packages that are not in mock.
|
||||
installed_specs = {}
|
||||
for pkg_name in not_in_mock:
|
||||
spec = spack.db.get(pkg_name).spec
|
||||
|
||||
# If a spec fails to concretize, just skip it. If it is a
|
||||
# real error, it will be caught by concretization tests.
|
||||
try:
|
||||
spec.concretize()
|
||||
except:
|
||||
continue
|
||||
|
||||
self.layout.make_path_for_spec(spec)
|
||||
installed_specs[spec] = self.layout.path_for_spec(spec)
|
||||
|
||||
tmp = spack.db
|
||||
spack.db = mock_db
|
||||
|
||||
# Now check that even without the package files, we know
|
||||
# enough to read a spec from the spec file.
|
||||
for spec, path in installed_specs.items():
|
||||
spec_from_file = self.layout.read_spec(join_path(path, '.spec'))
|
||||
|
||||
# To satisfy these conditions, directory layouts need to
|
||||
# read in concrete specs from their install dirs somehow.
|
||||
self.assertEqual(path, self.layout.path_for_spec(spec_from_file))
|
||||
self.assertEqual(spec, spec_from_file)
|
||||
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
|
||||
|
||||
spack.db = tmp
|
|
@ -25,15 +25,18 @@
|
|||
import os
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
from contextlib import closing
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.stage import Stage
|
||||
from spack.directory_layout import SpecHashDirectoryLayout
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
||||
dir_name = 'trivial-1.0'
|
||||
archive_name = 'trivial-1.0.tar.gz'
|
||||
install_test_package = 'trivial_install_test_package'
|
||||
|
@ -66,9 +69,16 @@ def setUp(self):
|
|||
tar = which('tar')
|
||||
tar('-czf', archive_name, dir_name)
|
||||
|
||||
# We use a fake pacakge, so skip the checksum.
|
||||
# We use a fake package, so skip the checksum.
|
||||
spack.do_checksum = False
|
||||
|
||||
# Use a fake install directory to avoid conflicts bt/w
|
||||
# installed pkgs and mock packages.
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.orig_layout = spack.install_layout
|
||||
spack.install_layout = SpecHashDirectoryLayout(self.tmpdir)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
super(InstallTest, self).tearDown()
|
||||
|
||||
|
@ -78,6 +88,10 @@ def tearDown(self):
|
|||
# Turn checksumming back on
|
||||
spack.do_checksum = True
|
||||
|
||||
# restore spack's layout.
|
||||
spack.install_layout = self.orig_layout
|
||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def test_install_and_uninstall(self):
|
||||
# Get a basic concrete spec for the trivial install package.
|
||||
|
|
|
@ -48,7 +48,7 @@ def test_conflicting_package_constraints(self):
|
|||
spec.package.validate_dependencies)
|
||||
|
||||
|
||||
def test_unique_node_traversal(self):
|
||||
def test_preorder_node_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
|
@ -56,14 +56,14 @@ def test_unique_node_traversal(self):
|
|||
'zmpi', 'fake']
|
||||
pairs = zip([0,1,2,3,4,2,3], names)
|
||||
|
||||
traversal = dag.preorder_traversal()
|
||||
traversal = dag.traverse()
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.preorder_traversal(depth=True)
|
||||
traversal = dag.traverse(depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_unique_edge_traversal(self):
|
||||
def test_preorder_edge_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
|
@ -71,14 +71,14 @@ def test_unique_edge_traversal(self):
|
|||
'libelf', 'zmpi', 'fake', 'zmpi']
|
||||
pairs = zip([0,1,2,3,4,3,2,3,1], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='edges')
|
||||
traversal = dag.traverse(cover='edges')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='edges', depth=True)
|
||||
traversal = dag.traverse(cover='edges', depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_unique_path_traversal(self):
|
||||
def test_preorder_path_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
|
@ -86,10 +86,55 @@ def test_unique_path_traversal(self):
|
|||
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
|
||||
pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='paths')
|
||||
traversal = dag.traverse(cover='paths')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='paths', depth=True)
|
||||
traversal = dag.traverse(cover='paths', depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_postorder_node_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'mpileaks']
|
||||
pairs = zip([4,3,2,3,2,1,0], names)
|
||||
|
||||
traversal = dag.traverse(order='post')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.traverse(depth=True, order='post')
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_postorder_edge_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'zmpi', 'mpileaks']
|
||||
pairs = zip([4,3,3,2,3,2,1,1,0], names)
|
||||
|
||||
traversal = dag.traverse(cover='edges', order='post')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.traverse(cover='edges', depth=True, order='post')
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_postorder_path_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'fake', 'zmpi', 'mpileaks']
|
||||
pairs = zip([4,3,3,2,3,2,1,2,1,0], names)
|
||||
|
||||
traversal = dag.traverse(cover='paths', order='post')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.traverse(cover='paths', depth=True, order='post')
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
|
@ -142,7 +187,7 @@ def test_normalize_with_virtual_spec(self):
|
|||
|
||||
# make sure nothing with the same name occurs twice
|
||||
counts = {}
|
||||
for spec in dag.preorder_traversal(keyfun=id):
|
||||
for spec in dag.traverse(key=id):
|
||||
if not spec.name in counts:
|
||||
counts[spec.name] = 0
|
||||
counts[spec.name] += 1
|
||||
|
@ -152,7 +197,7 @@ def test_normalize_with_virtual_spec(self):
|
|||
|
||||
|
||||
def check_links(self, spec_to_check):
|
||||
for spec in spec_to_check.preorder_traversal():
|
||||
for spec in spec_to_check.traverse():
|
||||
for dependent in spec.dependents.values():
|
||||
self.assertIn(
|
||||
spec.name, dependent.dependencies,
|
||||
|
@ -221,30 +266,53 @@ def test_invalid_dep(self):
|
|||
|
||||
|
||||
def test_equal(self):
|
||||
spec = Spec('mpileaks ^callpath ^libelf ^libdwarf')
|
||||
self.assertNotEqual(spec, Spec(
|
||||
'mpileaks', Spec('callpath',
|
||||
Spec('libdwarf',
|
||||
Spec('libelf')))))
|
||||
self.assertNotEqual(spec, Spec(
|
||||
'mpileaks', Spec('callpath',
|
||||
Spec('libelf',
|
||||
Spec('libdwarf')))))
|
||||
# Different spec structures to test for equality
|
||||
flat = Spec('mpileaks ^callpath ^libelf ^libdwarf')
|
||||
|
||||
self.assertEqual(spec, Spec(
|
||||
'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf')))
|
||||
flat_init = Spec(
|
||||
'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf'))
|
||||
|
||||
self.assertEqual(spec, Spec(
|
||||
'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath')))
|
||||
flip_flat = Spec(
|
||||
'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath'))
|
||||
|
||||
dag = Spec('mpileaks', Spec('callpath',
|
||||
Spec('libdwarf',
|
||||
Spec('libelf'))))
|
||||
|
||||
flip_dag = Spec('mpileaks', Spec('callpath',
|
||||
Spec('libelf',
|
||||
Spec('libdwarf'))))
|
||||
|
||||
# All these are equal to each other with regular ==
|
||||
specs = (flat, flat_init, flip_flat, dag, flip_dag)
|
||||
for lhs, rhs in zip(specs, specs):
|
||||
self.assertEqual(lhs, rhs)
|
||||
self.assertEqual(str(lhs), str(rhs))
|
||||
|
||||
# Same DAGs constructed different ways are equal
|
||||
self.assertTrue(flat.eq_dag(flat_init))
|
||||
|
||||
# order at same level does not matter -- (dep on same parent)
|
||||
self.assertTrue(flat.eq_dag(flip_flat))
|
||||
|
||||
# DAGs should be unequal if nesting is different
|
||||
self.assertFalse(flat.eq_dag(dag))
|
||||
self.assertFalse(flat.eq_dag(flip_dag))
|
||||
self.assertFalse(flip_flat.eq_dag(dag))
|
||||
self.assertFalse(flip_flat.eq_dag(flip_dag))
|
||||
self.assertFalse(dag.eq_dag(flip_dag))
|
||||
|
||||
|
||||
def test_normalize_mpileaks(self):
|
||||
# Spec parsed in from a string
|
||||
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf')
|
||||
|
||||
# What that spec should look like after parsing
|
||||
expected_flat = Spec(
|
||||
'mpileaks', Spec('mpich'), Spec('callpath'), Spec('dyninst'),
|
||||
Spec('libelf@1.8.11'), Spec('libdwarf'))
|
||||
|
||||
# What it should look like after normalization
|
||||
mpich = Spec('mpich')
|
||||
libelf = Spec('libelf@1.8.11')
|
||||
expected_normalized = Spec(
|
||||
|
@ -257,7 +325,10 @@ def test_normalize_mpileaks(self):
|
|||
mpich),
|
||||
mpich)
|
||||
|
||||
expected_non_unique_nodes = Spec(
|
||||
# Similar to normalized spec, but now with copies of the same
|
||||
# libelf node. Normalization should result in a single unique
|
||||
# node for each package, so this is the wrong DAG.
|
||||
non_unique_nodes = Spec(
|
||||
'mpileaks',
|
||||
Spec('callpath',
|
||||
Spec('dyninst',
|
||||
|
@ -267,21 +338,33 @@ def test_normalize_mpileaks(self):
|
|||
mpich),
|
||||
Spec('mpich'))
|
||||
|
||||
self.assertEqual(expected_normalized, expected_non_unique_nodes)
|
||||
|
||||
self.assertEqual(str(expected_normalized), str(expected_non_unique_nodes))
|
||||
self.assertEqual(str(spec), str(expected_non_unique_nodes))
|
||||
self.assertEqual(str(expected_normalized), str(spec))
|
||||
# All specs here should be equal under regular equality
|
||||
specs = (spec, expected_flat, expected_normalized, non_unique_nodes)
|
||||
for lhs, rhs in zip(specs, specs):
|
||||
self.assertEqual(lhs, rhs)
|
||||
self.assertEqual(str(lhs), str(rhs))
|
||||
|
||||
# Test that equal and equal_dag are doing the right thing
|
||||
self.assertEqual(spec, expected_flat)
|
||||
self.assertNotEqual(spec, expected_normalized)
|
||||
self.assertNotEqual(spec, expected_non_unique_nodes)
|
||||
self.assertTrue(spec.eq_dag(expected_flat))
|
||||
|
||||
self.assertEqual(spec, expected_normalized)
|
||||
self.assertFalse(spec.eq_dag(expected_normalized))
|
||||
|
||||
self.assertEqual(spec, non_unique_nodes)
|
||||
self.assertFalse(spec.eq_dag(non_unique_nodes))
|
||||
|
||||
spec.normalize()
|
||||
|
||||
self.assertNotEqual(spec, expected_flat)
|
||||
# After normalizing, spec_dag_equal should match the normalized spec.
|
||||
self.assertEqual(spec, expected_flat)
|
||||
self.assertFalse(spec.eq_dag(expected_flat))
|
||||
|
||||
self.assertEqual(spec, expected_normalized)
|
||||
self.assertEqual(spec, expected_non_unique_nodes)
|
||||
self.assertTrue(spec.eq_dag(expected_normalized))
|
||||
|
||||
self.assertEqual(spec, non_unique_nodes)
|
||||
self.assertFalse(spec.eq_dag(non_unique_nodes))
|
||||
|
||||
|
||||
def test_normalize_with_virtual_package(self):
|
||||
|
@ -309,3 +392,56 @@ def test_contains(self):
|
|||
self.assertIn(Spec('libdwarf'), spec)
|
||||
self.assertNotIn(Spec('libgoblin'), spec)
|
||||
self.assertIn(Spec('mpileaks'), spec)
|
||||
|
||||
|
||||
def test_copy_simple(self):
|
||||
orig = Spec('mpileaks')
|
||||
copy = orig.copy()
|
||||
|
||||
self.check_links(copy)
|
||||
|
||||
self.assertEqual(orig, copy)
|
||||
self.assertTrue(orig.eq_dag(copy))
|
||||
self.assertEqual(orig._normal, copy._normal)
|
||||
self.assertEqual(orig._concrete, copy._concrete)
|
||||
|
||||
# ensure no shared nodes bt/w orig and copy.
|
||||
orig_ids = set(id(s) for s in orig.traverse())
|
||||
copy_ids = set(id(s) for s in copy.traverse())
|
||||
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||
|
||||
|
||||
def test_copy_normalized(self):
|
||||
orig = Spec('mpileaks')
|
||||
orig.normalize()
|
||||
copy = orig.copy()
|
||||
|
||||
self.check_links(copy)
|
||||
|
||||
self.assertEqual(orig, copy)
|
||||
self.assertTrue(orig.eq_dag(copy))
|
||||
self.assertEqual(orig._normal, copy._normal)
|
||||
self.assertEqual(orig._concrete, copy._concrete)
|
||||
|
||||
# ensure no shared nodes bt/w orig and copy.
|
||||
orig_ids = set(id(s) for s in orig.traverse())
|
||||
copy_ids = set(id(s) for s in copy.traverse())
|
||||
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||
|
||||
|
||||
def test_copy_concretized(self):
|
||||
orig = Spec('mpileaks')
|
||||
orig.concretize()
|
||||
copy = orig.copy()
|
||||
|
||||
self.check_links(copy)
|
||||
|
||||
self.assertEqual(orig, copy)
|
||||
self.assertTrue(orig.eq_dag(copy))
|
||||
self.assertEqual(orig._normal, copy._normal)
|
||||
self.assertEqual(orig._concrete, copy._concrete)
|
||||
|
||||
# ensure no shared nodes bt/w orig and copy.
|
||||
orig_ids = set(id(s) for s in orig.traverse())
|
||||
copy_ids = set(id(s) for s in copy.traverse())
|
||||
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||
|
|
Loading…
Reference in a new issue