spec flatten, normalize, validate; package validate

New operations for manipulating spec and package DAGs.

For specs:
	flatten:   gather all deps to the root
	normalize: Merge constraints and make spec match package DAG

For packages:
	validate_dependencies: Make sure spec constraints in package DAG are sane.

Added tests for above methods.  Also added beginnings of concretization logic,
to turn abstract spec into a concrete one.

Still need proper tests for normalize().
This commit is contained in:
Todd Gamblin 2013-10-17 14:46:00 -07:00
parent db07c7f611
commit 558cf7e406
21 changed files with 641 additions and 167 deletions

View file

@ -19,14 +19,13 @@ def setup_parser(subparser):
def test(parser, args): def test(parser, args):
if args.all: if args.all:
for name in list_modules(spack.test_path): for name in list_modules(spack.test_path, directories=False):
print "Running Tests: %s" % name print "Running Tests: %s" % name
spack.test.run(name, verbose=args.verbose) spack.test.run(name, verbose=args.verbose)
elif not args.names: elif not args.names:
print "Available tests:" print "Available tests:"
colify(list_modules(spack.test_path)) colify(list_modules(spack.test_path, directories=False))
else: else:
for name in args.names: for name in args.names:

View file

@ -11,5 +11,7 @@ def supported_compilers():
return [c for c in list_modules(spack.compilers_path)] return [c for c in list_modules(spack.compilers_path)]
def get_compiler(): @memoized
return Compiler('gcc', spack.compilers.gcc.get_version()) def default_compiler():
from spack.spec import Compiler
return Compiler('gcc', gcc.get_version())

View file

@ -0,0 +1,80 @@
"""
Functions here are used to take abstract specs and make them concrete.
For example, if a spec asks for a version between 1.8 and 1.9, these
functions might take will take the most recent 1.9 version of the
package available. Or, if the user didn't specify a compiler for a
spec, then this will assign a compiler to the spec based on defaults
or user preferences.
TODO: make this customizable and allow users to configure
concretization policies.
"""
import spack.arch
import spack.compilers
from spack.version import *
from spack.spec import *
def concretize_version(spec):
"""If the spec is already concrete, return. Otherwise take
the most recent available version, and default to the package's
version if there are no avaialble versions.
"""
# return if already concrete.
if spec.versions.concrete:
return
pkg = speck.package
available = pkg.available_versions
# If there are known avaialble versions, return the most recent
if versions:
spec.versions = ver([avaialble[-1]])
else:
spec.versions = ver([pkg.version])
def concretize_architecture(spec):
"""If the spec already had an architecture, return. Otherwise if
the root of the DAG has an architecture, then use that.
Otherwise take the system's default architecture.
Intuition: Architectures won't be set a lot, and generally you
want the host system's architecture. When architectures are
mised in a spec, it is likely because the tool requries a
cross-compiled component, e.g. for tools that run on BlueGene
or Cray machines. These constraints will likely come directly
from packages, so require the user to be explicit if they want
to mess with the architecture, and revert to the default when
they're not explicit.
"""
if spec.architecture is not None:
return
if spec.root.architecture:
spec.architecture = spec.root.architecture
else:
spec.architecture = spack.arch.sys_type()
def concretize_compiler(spec):
"""Currently just sets the compiler to gcc or throws an exception
if the compiler is set to something else.
TODO: implement below description.
If the spec already has a compiler, we're done. If not, then
take the compiler used for the nearest ancestor with a concrete
compiler, or use the system default if there is no ancestor
with a compiler.
Intuition: Use the system default if no package that depends on
this one has a strict compiler requirement. Otherwise, try to
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
if spec.compiler.concrete:
if spec.compiler != spack.compilers.default_compiler():
raise spack.spec.UnknownCompilerError(str(spec.compiler))
else:
spec.compiler = spack.compilers.default_compiler()

View file

@ -18,13 +18,13 @@
from spack import * from spack import *
import spack.spec import spack.spec
import spack.error
import packages import packages
import tty import tty
import attr import attr
import validate import validate
import url import url
from spec import Compiler
from version import * from version import *
from multi_function import platform from multi_function import platform
from stage import Stage from stage import Stage
@ -249,7 +249,7 @@ class SomePackage(Package):
# These variables are per-package metadata will be defined by subclasses. # These variables are per-package metadata will be defined by subclasses.
# #
"""By default a package has no dependencies.""" """By default a package has no dependencies."""
dependencies = [] dependencies = {}
# #
# These are default values for instance variables. # These are default values for instance variables.
@ -371,21 +371,51 @@ def dependents(self):
return tuple(self._dependents) return tuple(self._dependents)
def sanity_check(self): def preorder_traversal(self, visited=None):
"""Ensure that this package and its dependencies don't have conflicting if visited is None:
requirements.""" visited = set()
deps = sorted(self.all_dependencies, key=lambda d: d.name)
if self.name in visited:
return
visited.add(self.name)
yield self
for name, spec in self.dependencies.iteritems():
for pkg in packages.get(name).preorder_traversal(visited):
yield pkg
def validate_dependencies(self):
"""Ensure that this package and its dependencies all have consistent
constraints on them.
"""
# This algorithm just attempts to merge all the constraints on the same
# package together, loses information about the source of the conflict.
# What we'd really like to know is exactly which two constraints
# conflict, but that algorithm is more expensive, so we'll do it
# the simple, less informative way for now.
merged = spack.spec.DependencyMap()
try:
for pkg in self.preorder_traversal():
for name, spec in pkg.dependencies.iteritems():
if name not in merged:
merged[name] = spec.copy()
else:
merged[name].constrain(spec)
except spack.spec.UnsatisfiableSpecError, e:
raise InvalidPackageDependencyError(
"Package %s has inconsistent dependency constraints: %s"
% (self.name, e.message))
@property @property
@memoized @memoized
def all_dependencies(self): def all_dependencies(self):
"""Dict(str -> Package) of all transitive dependencies of this package.""" """Dict(str -> Package) of all transitive dependencies of this package."""
all_deps = set(self.dependencies) all_deps = {name : dep for dep in self.preorder_traversal}
for dep in self.dependencies: del all_deps[self.name]
dep_pkg = packages.get(dep.name)
all_deps = all_deps.union(dep_pkg.all_dependencies)
return all_deps return all_deps
@ -533,7 +563,7 @@ def setup_install_environment(self):
# Pass along prefixes of dependencies here # Pass along prefixes of dependencies here
path_set(SPACK_DEPENDENCIES, path_set(SPACK_DEPENDENCIES,
[dep.package.prefix for dep in self.dependencies]) [dep.package.prefix for dep in self.dependencies.values()])
# Install location # Install location
os.environ[SPACK_PREFIX] = self.prefix os.environ[SPACK_PREFIX] = self.prefix
@ -544,7 +574,7 @@ def setup_install_environment(self):
def do_install_dependencies(self): def do_install_dependencies(self):
# Pass along paths of dependencies here # Pass along paths of dependencies here
for dep in self.dependencies: for dep in self.dependencies.values():
dep.package.do_install() dep.package.do_install()
@ -607,7 +637,7 @@ def do_clean_dist(self):
@property @property
def available_versions(self): def available_versions(self):
if not self._available_versions: if not self._available_versions:
self._available_versions = VersionList() self._available_versions = ver([self.version])
try: try:
# Run curl but grab the mime type from the http headers # Run curl but grab the mime type from the http headers
listing = spack.curl('-s', '-L', self.list_url, return_output=True) listing = spack.curl('-s', '-L', self.list_url, return_output=True)
@ -617,18 +647,18 @@ def available_versions(self):
for s in strings: for s in strings:
match = re.search(wildcard, s) match = re.search(wildcard, s)
if match: if match:
self._available_versions.add(ver(match.group(0))) self._available_versions.add(Version(match.group(0)))
except CalledProcessError: if not self._available_versions:
tty.warn("Found no versions for %s" % self.name,
"Packate.available_versions may require adding the list_url attribute",
"to the package to tell Spack where to look for versions.")
except subprocess.CalledProcessError:
tty.warn("Fetching %s failed." % self.list_url, tty.warn("Fetching %s failed." % self.list_url,
"Package.available_versions requires an internet connection.", "Package.available_versions requires an internet connection.",
"Version list may be incomplete.") "Version list may be incomplete.")
if not self._available_versions:
tty.warn("Found no versions for %s" % self.name,
"Packate.available_versions may require adding the list_url attribute",
"to the package to tell Spack where to look for versions.")
self._available_versions = [self.version]
return self._available_versions return self._available_versions
@ -654,3 +684,10 @@ def __call__(self, *args, **kwargs):
args = (jobs,) + args args = (jobs,) + args
super(MakeExecutable, self).__call__(*args, **kwargs) super(MakeExecutable, self).__call__(*args, **kwargs)
class InvalidPackageDependencyError(spack.error.SpackError):
"""Raised when package specification is inconsistent with requirements of
its dependencies."""
def __init__(self, message):
super(InvalidPackageDependencyError, self).__init__(message)

View file

@ -20,6 +20,7 @@
instances = {} instances = {}
def get(pkg_name): def get(pkg_name):
if not pkg_name in instances: if not pkg_name in instances:
package_class = get_class_for_package_name(pkg_name) package_class = get_class_for_package_name(pkg_name)
@ -85,9 +86,18 @@ def get_class_for_package_name(pkg_name):
else: else:
raise UnknownPackageError(pkg_name) raise UnknownPackageError(pkg_name)
# Figure out pacakges module from spack.packages_path
# This allows us to change the module path.
if not re.match(r'%s' % spack.module_path, spack.packages_path):
raise RuntimeError("Packages path is not a submodule of spack.")
# TODO: replace this with a proper package DB class, instead of this hackiness.
packages_path = re.sub(spack.module_path + '\/+', 'spack.', spack.packages_path)
packages_module = re.sub(r'\/', '.', packages_path)
class_name = pkg_name.capitalize() class_name = pkg_name.capitalize()
try: try:
module_name = "%s.%s" % (__name__, pkg_name) module_name = "%s.%s" % (packages_module, pkg_name)
module = __import__(module_name, fromlist=[class_name]) module = __import__(module_name, fromlist=[class_name])
except ImportError, e: except ImportError, e:
tty.die("Error while importing %s.%s:\n%s" % (pkg_name, class_name, e.message)) tty.die("Error while importing %s.%s:\n%s" % (pkg_name, class_name, e.message))
@ -107,8 +117,8 @@ def compute_dependents():
if pkg._dependents is None: if pkg._dependents is None:
pkg._dependents = [] pkg._dependents = []
for dep in pkg.dependencies: for name, dep in pkg.dependencies.iteritems():
dpkg = get(dep.name) dpkg = get(name)
if dpkg._dependents is None: if dpkg._dependents is None:
dpkg._dependents = [] dpkg._dependents = []
dpkg._dependents.append(pkg.name) dpkg._dependents.append(pkg.name)
@ -130,8 +140,8 @@ def quote(string):
deps = [] deps = []
for pkg in all_packages(): for pkg in all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
for dep in pkg.dependencies: for dep_name, dep in pkg.dependencies.iteritems():
deps.append((pkg.name, dep.name)) deps.append((pkg.name, dep_name))
out.write('\n') out.write('\n')
for pair in deps: for pair in deps:

View file

@ -11,7 +11,7 @@ class Libdwarf(Package):
list_url = "http://reality.sgiweb.org/davea/dwarf.html" list_url = "http://reality.sgiweb.org/davea/dwarf.html"
depends_on("libelf@0:1") depends_on("libelf")
def clean(self): def clean(self):

View file

@ -91,12 +91,16 @@ def expect(self, id):
self.next_token_error("Unexpected end of input") self.next_token_error("Unexpected end of input")
sys.exit(1) sys.exit(1)
def parse(self, text): def setup(self, text):
self.text = text self.text = text
self.push_tokens(self.lexer.lex(text)) self.push_tokens(self.lexer.lex(text))
def parse(self, text):
self.setup(text)
return self.do_parse() return self.do_parse()
class ParseError(spack.error.SpackError): class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing.""" """Raised when we don't hit an error while parsing."""
def __init__(self, message, string, pos): def __init__(self, message, string, pos):

View file

@ -54,10 +54,10 @@ def depends_on(*specs):
""" """
# Get the enclosing package's scope and add deps to it. # Get the enclosing package's scope and add deps to it.
locals = sys._getframe(1).f_locals locals = sys._getframe(1).f_locals
dependencies = locals.setdefault("dependencies", []) dependencies = locals.setdefault("dependencies", {})
for string in specs: for string in specs:
for spec in spack.spec.parse(string): for spec in spack.spec.parse(string):
dependencies.append(spec) dependencies[spec.name] = spec
def provides(*args): def provides(*args):

View file

@ -67,6 +67,7 @@
import tty import tty
import spack.parse import spack.parse
import spack.error import spack.error
import spack.concretize
import spack.compilers import spack.compilers
import spack.compilers.gcc import spack.compilers.gcc
import spack.packages as packages import spack.packages as packages
@ -137,9 +138,8 @@ def satisfies(self, other):
def constrain(self, other): def constrain(self, other):
if not self.satisfies(other.compiler): if not self.satisfies(other):
raise UnsatisfiableCompilerSpecError( raise UnsatisfiableCompilerSpecError(self, other)
"%s does not satisfy %s" % (self.compiler, other.compiler))
self.versions.intersect(other.versions) self.versions.intersect(other.versions)
@ -149,23 +149,6 @@ def concrete(self):
return self.versions.concrete return self.versions.concrete
def _concretize(self):
"""If this spec could describe more than one version, variant, or build
of a package, this will resolve it to be concrete.
"""
# TODO: support compilers other than GCC.
if self.concrete:
return
gcc_version = spack.compilers.gcc.get_version()
self.versions = VersionList([gcc_version])
def concretized(self):
clone = self.copy()
clone._concretize()
return clone
@property @property
def version(self): def version(self):
if not self.concrete: if not self.concrete:
@ -243,13 +226,34 @@ def __str__(self):
@key_ordering @key_ordering
class Spec(object): class Spec(object):
def __init__(self, name): def __init__(self, spec_like):
self.name = name # Copy if spec_like is a Spec.
self.versions = VersionList() if type(spec_like) == Spec:
self.variants = VariantMap() self._dup(spec_like)
self.architecture = None return
self.compiler = None
self.dependencies = DependencyMap() # Parse if the spec_like is a string.
if type(spec_like) != str:
raise TypeError("Can't make spec out of %s" % type(spec_like))
spec_list = SpecParser().parse(spec_like)
if len(spec_list) > 1:
raise ValueError("More than one spec in string: " + spec_like)
if len(spec_list) < 1:
raise ValueError("String contains no specs: " + spec_like)
# Take all the attributes from the first parsed spec without copying
# This is a little bit nasty, but it's nastier to make the parser
# write directly into this Spec object.
other = spec_list[0]
self.name = other.name
self.parent = other.parent
self.versions = other.versions
self.variants = other.variants
self.architecture = other.architecture
self.compiler = other.compiler
self.dependencies = other.dependencies
# #
# Private routines here are called by the parser when building a spec. # Private routines here are called by the parser when building a spec.
@ -285,6 +289,21 @@ def _add_dependency(self, dep):
if dep.name in self.dependencies: if dep.name in self.dependencies:
raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep) raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep)
self.dependencies[dep.name] = dep self.dependencies[dep.name] = dep
dep.parent = self
@property
def root(self):
"""Follow parent links and find the root of this spec's DAG."""
root = self
while root.parent is not None:
root = root.parent
return root
@property
def package(self):
return packages.get(self.name)
@property @property
@ -296,6 +315,20 @@ def concrete(self):
and self.dependencies.concrete) and self.dependencies.concrete)
def preorder_traversal(self, visited=None):
if visited is None:
visited = set()
if id(self) in visited:
return
visited.add(id(self))
yield self
for dep in self.dependencies.itervalues():
for spec in dep.preorder_traversal(visited):
yield spec
def _concretize(self): def _concretize(self):
"""A spec is concrete if it describes one build of a package uniquely. """A spec is concrete if it describes one build of a package uniquely.
This will ensure that this spec is concrete. This will ensure that this spec is concrete.
@ -327,30 +360,40 @@ def _concretize(self):
# TODO: handle variants. # TODO: handle variants.
pkg = packages.get(self.name)
# Take the highest version in a range # Take the highest version in a range
if not self.versions.concrete: if not self.versions.concrete:
preferred = self.versions.highest() or pkg.version preferred = self.versions.highest() or self.package.version
self.versions = VersionList([preferred]) self.versions = VersionList([preferred])
# Ensure dependencies have right versions # Ensure dependencies have right versions
@property def flatten(self):
def traverse_deps(self, visited=None): """Pull all dependencies up to the root (this spec).
"""Yields dependencies in depth-first order""" Merge constraints for dependencies with the same name, and if they
if not visited: conflict, throw an exception. """
visited = set() # This ensures that the package descriptions themselves are consistent
self.package.validate_dependencies()
for name in sorted(self.dependencies.keys()): # Once that is guaranteed, we know any constraint violations are due
dep = dependencies[name] # to the spec -- so they're the user's fault, not Spack's.
if dep in visited: flat_deps = DependencyMap()
continue try:
for spec in self.preorder_traversal():
if spec.name not in flat_deps:
flat_deps[spec.name] = spec
else:
flat_deps[spec.name].constrain(spec)
for d in dep.traverse_deps(seen): except UnsatisfiableSpecError, e:
yield d # This REALLY shouldn't happen unless something is wrong in spack.
yield dep # It means we got a spec DAG with two instances of the same package
# that had inconsistent constraints. There's no way for a user to
# produce a spec like this (the parser adds all deps to the root),
# so this means OUR code is not sane!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
self.dependencies = flat_deps
def _normalize_helper(self, visited, spec_deps): def _normalize_helper(self, visited, spec_deps):
@ -362,9 +405,7 @@ def _normalize_helper(self, visited, spec_deps):
# Combine constraints from package dependencies with # Combine constraints from package dependencies with
# information in this spec's dependencies. # information in this spec's dependencies.
pkg = packages.get(self.name) pkg = packages.get(self.name)
for pkg_dep in pkg.dependencies: for name, pkg_dep in self.package.dependencies.iteritems():
name = pkg_dep.name
if name not in spec_deps: if name not in spec_deps:
# Clone the spec from the package # Clone the spec from the package
spec_deps[name] = pkg_dep.copy() spec_deps[name] = pkg_dep.copy()
@ -372,23 +413,29 @@ def _normalize_helper(self, visited, spec_deps):
try: try:
# intersect package information with spec info # intersect package information with spec info
spec_deps[name].constrain(pkg_dep) spec_deps[name].constrain(pkg_dep)
except UnsatisfiableSpecError, e: except UnsatisfiableSpecError, e:
error_type = type(e) e.message = "Invalid spec: '%s'. "
raise error_type( e.message += "Package %s requires %s %s, but spec asked for %s"
"Violated depends_on constraint from package %s: %s" e.message %= (spec_deps[name], name, e.constraint_type,
% (self.name, e.message)) e.required, e.provided)
raise e
# Add merged spec to my deps and recurse # Add merged spec to my deps and recurse
self.dependencies[name] = spec_deps[name] self._add_dependency(spec_deps[name])
self.dependencies[name]._normalize_helper(visited, spec_deps) self.dependencies[name]._normalize_helper(visited, spec_deps)
def normalize(self): def normalize(self):
if any(dep.dependencies for dep in self.dependencies.values()): # Ensure first that all packages exist.
raise SpecError("Spec has already been normalized.")
self.validate_package_names() self.validate_package_names()
# Then ensure that the packages mentioned are sane, that the
# provided spec is sane, and that all dependency specs are in the
# root node of the spec. Flatten will do this for us.
self.flatten()
# Now that we're flat we can get all our dependencies at once.
spec_deps = self.dependencies spec_deps = self.dependencies
self.dependencies = DependencyMap() self.dependencies = DependencyMap()
@ -404,29 +451,25 @@ def normalize(self):
def validate_package_names(self): def validate_package_names(self):
for name in self.dependencies: packages.get(self.name)
packages.get(name) for name, dep in self.dependencies.iteritems():
dep.validate_package_names()
def constrain(self, other): def constrain(self, other):
if not self.versions.overlaps(other.versions): if not self.versions.overlaps(other.versions):
raise UnsatisfiableVersionSpecError( raise UnsatisfiableVersionSpecError(self.versions, other.versions)
"%s does not satisfy %s" % (self.versions, other.versions))
conflicting_variants = [ for v in other.variants:
v for v in other.variants if v in self.variants and if (v in self.variants and
self.variants[v].enabled != other.variants[v].enabled] self.variants[v].enabled != other.variants[v].enabled):
raise UnsatisfiableVariantSpecError(self.variants[v],
if conflicting_variants: other.variants[v])
raise UnsatisfiableVariantSpecError(comma_and(
"%s does not satisfy %s" % (self.variants[v], other.variants[v])
for v in conflicting_variants))
if self.architecture is not None and other.architecture is not None: if self.architecture is not None and other.architecture is not None:
if self.architecture != other.architecture: if self.architecture != other.architecture:
raise UnsatisfiableArchitectureSpecError( raise UnsatisfiableArchitectureSpecError(self.architecture,
"Asked for architecture %s, but required %s" other.architecture)
% (self.architecture, other.architecture))
if self.compiler is not None and other.compiler is not None: if self.compiler is not None and other.compiler is not None:
self.compiler.constrain(other.compiler) self.compiler.constrain(other.compiler)
@ -457,16 +500,23 @@ def concretized(self):
return clone return clone
def _dup(self, other):
"""Copy the spec other into self. This is a
first-party, overwriting copy."""
# TODO: this needs to handle DAGs.
self.name = other.name
self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture
self.compiler = None
if other.compiler:
self.compiler = other.compiler.copy()
self.dependencies = other.dependencies.copy()
def copy(self): def copy(self):
clone = Spec(self.name) """Return a deep copy of this spec."""
clone.versions = self.versions.copy() return Spec(self)
clone.variants = self.variants.copy()
clone.architecture = self.architecture
clone.compiler = None
if self.compiler:
clone.compiler = self.compiler.copy()
clone.dependencies = self.dependencies.copy()
return clone
@property @property
@ -478,7 +528,7 @@ def version(self):
def _cmp_key(self): def _cmp_key(self):
return (self.name, self.versions, self.variants, return (self.name, self.versions, self.variants,
self.architecture, self.compiler) self.architecture, self.compiler, self.dependencies)
def colorized(self): def colorized(self):
@ -505,7 +555,7 @@ def str_without_deps(self):
def tree(self, indent=""): def tree(self, indent=""):
"""Prints out this spec and its dependencies, tree-formatted """Prints out this spec and its dependencies, tree-formatted
with indentation.""" with indentation. Each node also has an id."""
out = indent + self.str_without_deps() out = indent + self.str_without_deps()
for dep in sorted(self.dependencies.keys()): for dep in sorted(self.dependencies.keys()):
out += "\n" + self.dependencies[dep].tree(indent + " ") out += "\n" + self.dependencies[dep].tree(indent + " ")
@ -566,8 +616,22 @@ def do_parse(self):
def spec(self): def spec(self):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
self.check_identifier() self.check_identifier()
spec = Spec(self.token.value)
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
spec.name = self.token.value
spec.parent = None
spec.versions = VersionList()
spec.variants = VariantMap()
spec.architecture = None
spec.compiler = None
spec.dependencies = DependencyMap()
# record this so that we know whether version is
# unspecified or not.
added_version = False added_version = False
while self.next: while self.next:
@ -661,34 +725,10 @@ def check_identifier(self):
def parse(string): def parse(string):
"""Returns a list of specs from an input string.""" """Returns a list of specs from an input string.
return SpecParser().parse(string) For creating one spec, see Spec() constructor.
def parse_one(string):
"""Parses a string containing only one spec, then returns that
spec. If more than one spec is found, raises a ValueError.
""" """
spec_list = parse(string) return SpecParser().parse(string)
if len(spec_list) > 1:
raise ValueError("string contains more than one spec!")
elif len(spec_list) < 1:
raise ValueError("string contains no specs!")
return spec_list[0]
def make_spec(spec_like):
if type(spec_like) == str:
specs = parse(spec_like)
if len(specs) != 1:
raise ValueError("String contains multiple specs: '%s'" % spec_like)
return specs[0]
elif type(spec_like) == Spec:
return spec_like
else:
raise TypeError("Can't make spec out of %s" % type(spec_like))
class SpecError(spack.error.SpackError): class SpecError(spack.error.SpackError):
@ -728,6 +768,13 @@ def __init__(self, message):
super(DuplicateArchitectureError, self).__init__(message) super(DuplicateArchitectureError, self).__init__(message)
class InconsistentSpecError(SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
constraints."""
def __init__(self, message):
super(InconsistentSpecError, self).__init__(message)
class InvalidDependencyException(SpecError): class InvalidDependencyException(SpecError):
"""Raised when a dependency in a spec is not actually a dependency """Raised when a dependency in a spec is not actually a dependency
of the package.""" of the package."""
@ -736,30 +783,39 @@ def __init__(self, message):
class UnsatisfiableSpecError(SpecError): class UnsatisfiableSpecError(SpecError):
"""Raised when a spec conflicts with package constraints.""" """Raised when a spec conflicts with package constraints.
def __init__(self, message): Provide the requirement that was violated when raising."""
super(UnsatisfiableSpecError, self).__init__(message) def __init__(self, provided, required, constraint_type):
super(UnsatisfiableSpecError, self).__init__(
"%s does not satisfy %s" % (provided, required))
self.provided = provided
self.required = required
self.constraint_type = constraint_type
class UnsatisfiableVersionSpecError(UnsatisfiableSpecError): class UnsatisfiableVersionSpecError(UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints.""" """Raised when a spec version conflicts with package constraints."""
def __init__(self, message): def __init__(self, provided, required):
super(UnsatisfiableVersionSpecError, self).__init__(message) super(UnsatisfiableVersionSpecError, self).__init__(
provided, required, "version")
class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError): class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints.""" """Raised when a spec comiler conflicts with package constraints."""
def __init__(self, message): def __init__(self, provided, required):
super(UnsatisfiableCompilerSpecError, self).__init__(message) super(UnsatisfiableCompilerSpecError, self).__init__(
provided, required, "compiler")
class UnsatisfiableVariantSpecError(UnsatisfiableSpecError): class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints.""" """Raised when a spec variant conflicts with package constraints."""
def __init__(self, message): def __init__(self, provided, required):
super(UnsatisfiableVariantSpecError, self).__init__(message) super(UnsatisfiableVariantSpecError, self).__init__(
provided, required, "variant")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError): class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints.""" """Raised when a spec architecture conflicts with package constraints."""
def __init__(self, message): def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(message) super(UnsatisfiableArchitectureSpecError, self).__init__(
provided, required, "architecture")

View file

@ -1,11 +1,11 @@
import unittest import unittest
import spack.spec from spack.spec import Spec
class ConcretizeTest(unittest.TestCase): class ConcretizeTest(unittest.TestCase):
def check_concretize(self, abstract_spec): def check_concretize(self, abstract_spec):
abstract = spack.spec.parse_one(abstract_spec) abstract = Spec(abstract_spec)
print abstract print abstract
print abstract.concretized() print abstract.concretized()
print abstract.concretized().concrete print abstract.concretized().concrete

View file

@ -0,0 +1,14 @@
from spack import *
class Callpath(Package):
homepage = "https://github.com/tgamblin/callpath"
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
md5 = "foobarbaz"
depends_on("dyninst")
depends_on("mpich")
def install(self, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View file

@ -0,0 +1,14 @@
from spack import *
class Dyninst(Package):
homepage = "https://paradyn.org"
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
depends_on("libelf")
depends_on("libdwarf")
def install(self, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View file

@ -0,0 +1,55 @@
from spack import *
import os
# Only build certain parts of dwarf because the other ones break.
dwarf_dirs = ['libdwarf', 'dwarfdump2']
class Libdwarf(Package):
homepage = "http://reality.sgiweb.org/davea/dwarf.html"
url = "http://reality.sgiweb.org/davea/libdwarf-20130207.tar.gz"
md5 = "64b42692e947d5180e162e46c689dfbf"
list_url = "http://reality.sgiweb.org/davea/dwarf.html"
depends_on("libelf")
def clean(self):
for dir in dwarf_dirs:
with working_dir(dir):
if os.path.exists('Makefile'):
make('clean')
def install(self, prefix):
# dwarf build does not set arguments for ar properly
make.add_default_arg('ARFLAGS=rcs')
# Dwarf doesn't provide an install, so we have to do it.
mkdirp(bin, include, lib, man1)
with working_dir('libdwarf'):
configure("--prefix=%s" % prefix, '--enable-shared')
make()
install('libdwarf.a', lib)
install('libdwarf.so', lib)
install('libdwarf.h', include)
install('dwarf.h', include)
with working_dir('dwarfdump2'):
configure("--prefix=%s" % prefix)
# This makefile has strings of copy commands that
# cause a race in parallel
make(parallel=False)
install('dwarfdump', bin)
install('dwarfdump.conf', lib)
install('dwarfdump.1', man1)
@platform('macosx_10.8_x86_64')
def install(self, prefix):
raise UnsupportedPlatformError(
"libdwarf doesn't currently build on Mac OS X.")

View file

@ -0,0 +1,16 @@
from spack import *
class Libelf(Package):
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
md5 = "4136d7b4c04df68b686570afa26988ac"
def install(self, prefix):
configure("--prefix=%s" % prefix,
"--enable-shared",
"--disable-dependency-tracking",
"--disable-debug")
make()
# The mkdir commands in libelf's intsall can fail in parallel
make("install", parallel=False)

View file

@ -0,0 +1,11 @@
from spack import *
class Mpich(Package):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
def install(self, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View file

@ -0,0 +1,14 @@
from spack import *
class Mpileaks(Package):
homepage = "http://www.llnl.gov"
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
md5 = "foobarbaz"
depends_on("mpich")
depends_on("callpath")
def install(self, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View file

@ -0,0 +1,117 @@
"""
These tests check validation of dummy packages. You can find the dummy
packages directories that these tests use in:
spack/lib/spack/spack/test/mock_packages
Each test validates conditions with the packages in those directories.
"""
import unittest
import spack
import spack.package
import spack.packages as packages
from spack.util.lang import new_path, list_modules
from spack.spec import Spec
mock_packages_path = new_path(spack.module_path, 'test', 'mock_packages')
def set_pkg_dep(pkg, spec):
"""Alters dependence information for a pacakge.
Use this to mock up constraints.
"""
spec = Spec(spec)
packages.get(pkg).dependencies[spec.name] = spec
class ValidationTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Use a different packages directory for these tests. We want to use
# mocked up packages that don't interfere with the real ones.
cls.real_packages_path = spack.packages_path
spack.packages_path = mock_packages_path
# First time through, record original relationships bt/w packages
cls.original_deps = {}
for name in list_modules(mock_packages_path):
pkg = packages.get(name)
cls.original_deps[name] = [
spec for spec in pkg.dependencies.values()]
@classmethod
def restore(cls):
# each time through restore original dependencies & constraints
for pkg_name, deps in cls.original_deps.iteritems():
packages.get(pkg_name).dependencies.clear()
for dep in deps:
set_pkg_dep(pkg_name, dep)
@classmethod
def tearDownClass(cls):
"""Restore the real packages path after any test."""
cls.restore()
spack.packages_path = cls.real_packages_path
def setUp(self):
"""Before each test, restore deps between packages to original state."""
ValidationTest.restore()
def test_conflicting_package_constraints(self):
set_pkg_dep('mpileaks', 'mpich@1.0')
set_pkg_dep('callpath', 'mpich@2.0')
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.package.InvalidPackageDependencyError,
spec.package.validate_dependencies)
def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
try:
mpileaks.package.validate_dependencies()
except spack.package.InvalidPackageDependencyError, e:
self.fail("validate_dependencies raised an exception: %s", e.message)
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
def test_unsatisfiable_version(self):
set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
def test_unsatisfiable_compiler(self):
set_pkg_dep('mpileaks', 'mpich%gcc')
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_compiler_version(self):
set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_variant(self):
set_pkg_dep('mpileaks', 'mpich+debug')
spec = Spec('mpileaks ^mpich~debug ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVariantSpecError, spec.normalize)
def test_unsatisfiable_architecture(self):
set_pkg_dep('mpileaks', 'mpich=bgqos_0')
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)

View file

@ -61,23 +61,40 @@ def check_lex(self, tokens, spec):
def check_satisfies(self, lspec, rspec): def check_satisfies(self, lspec, rspec):
l = spack.spec.parse_one(lspec) l, r = Spec(lspec), Spec(rspec)
r = spack.spec.parse_one(rspec) self.assertTrue(l.satisfies(r))
self.assertTrue(l.satisfies(r) and r.satisfies(l)) self.assertTrue(r.satisfies(l))
# These should not raise try:
l.constrain(r) l.constrain(r)
r.constrain(l) r.constrain(l)
except SpecError, e:
self.fail("Got a SpecError in constrain!", e.message)
def assert_unsatisfiable(lspec, rspec):
l, r = Spec(lspec), Spec(rspec)
self.assertFalse(l.satisfies(r))
self.assertFalse(r.satisfies(l))
self.assertRaises(l.constrain, r)
self.assertRaises(r.constrain, l)
def check_constrain(self, expected, constrained, constraint): def check_constrain(self, expected, constrained, constraint):
exp = spack.spec.parse_one(expected) exp = Spec(expected)
constrained = spack.spec.parse_one(constrained) constrained = Spec(constrained)
constraint = spack.spec.parse_one(constraint) constraint = Spec(constraint)
constrained.constrain(constraint) constrained.constrain(constraint)
self.assertEqual(exp, constrained) self.assertEqual(exp, constrained)
def check_invalid_constraint(self, constrained, constraint):
constrained = Spec(constrained)
constraint = Spec(constraint)
self.assertRaises(UnsatisfiableSpecError, constrained.constrain, constraint)
# ================================================================================ # ================================================================================
# Parse checks # Parse checks
# =============================================================================== # ===============================================================================
@ -145,7 +162,28 @@ def test_satisfies(self):
def test_constrain(self): def test_constrain(self):
self.check_constrain('libelf@0:1', 'libelf', 'libelf@0:1') self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6',
'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7')
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
def test_invalid_constraint(self):
self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
self.check_invalid_constraint('libelf+debug', 'libelf~debug')
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
# ================================================================================ # ================================================================================

View file

@ -1,4 +1,5 @@
import os import os
import re
import subprocess import subprocess
import spack.tty as tty import spack.tty as tty

View file

@ -5,6 +5,9 @@
import inspect import inspect
from spack.util.filesystem import new_path from spack.util.filesystem import new_path
# Ignore emacs backups when listing modules
ignore_modules = [r'^\.#', '~$']
def has_method(cls, name): def has_method(cls, name):
for base in inspect.getmro(cls): for base in inspect.getmro(cls):
@ -27,21 +30,24 @@ def memoizer(*args, **kwargs):
return memoizer return memoizer
def list_modules(directory): def list_modules(directory, **kwargs):
"""Lists all of the modules, excluding __init__.py, in """Lists all of the modules, excluding __init__.py, in
a particular directory.""" a particular directory."""
list_directories = kwargs.setdefault('directories', True)
for name in os.listdir(directory): for name in os.listdir(directory):
if name == '__init__.py': if name == '__init__.py':
continue continue
path = new_path(directory, name) path = new_path(directory, name)
if os.path.isdir(path): if list_directories and os.path.isdir(path):
init_py = new_path(path, '__init__.py') init_py = new_path(path, '__init__.py')
if os.path.isfile(init_py): if os.path.isfile(init_py):
yield name yield name
elif name.endswith('.py'): elif name.endswith('.py'):
yield re.sub('.py$', '', name) if not any(re.search(pattern, name) for pattern in ignore_modules):
yield re.sub('.py$', '', name)
def key_ordering(cls): def key_ordering(cls):