New, more consistent package directory structure.
- Packages now live in <package_name>/package.py - spack.packages refactored to use a PackageDB object instead of monolithic module. - Implementation of mock_packages_test.py is greatly simplified - Added test to exercise install/uninstall code because that wasn't covered by existing tests and kept breaking.
This commit is contained in:
parent
74ec74d73c
commit
b8b334e86c
64 changed files with 554 additions and 486 deletions
|
@ -74,9 +74,10 @@ args = parser.parse_args()
|
|||
spack.verbose = args.verbose
|
||||
spack.debug = args.debug
|
||||
if args.mock:
|
||||
from spack.util.filesystem import new_path
|
||||
mock_path = new_path(spack.module_path, 'test', 'mock_packages')
|
||||
spack.packages_path = mock_path
|
||||
from llnl.util.filesystem import join_path
|
||||
from spack.packages import PackageDB
|
||||
mock_path = join_path(spack.module_path, 'test', 'mock_packages')
|
||||
spack.db = PackageDB(mock_path)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
|
|
|
@ -34,7 +34,6 @@
|
|||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
import spack.util.crypto
|
||||
from spack.stage import Stage, FailedDownloadError
|
||||
from spack.version import *
|
||||
|
@ -81,7 +80,7 @@ def get_checksums(versions, urls, **kwargs):
|
|||
|
||||
def checksum(parser, args):
|
||||
# get the package we're going to generate checksums for
|
||||
pkg = packages.get(args.package)
|
||||
pkg = spack.db.get(args.package)
|
||||
|
||||
# If the user asked for specific versions, use those.
|
||||
versions = [ver(v) for v in args.versions]
|
||||
|
|
|
@ -26,8 +26,8 @@
|
|||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
import spack.stage as stage
|
||||
|
||||
description = "Remove staged files for packages"
|
||||
|
@ -49,7 +49,7 @@ def clean(parser, args):
|
|||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = packages.get(spec)
|
||||
package = spack.db.get(spec)
|
||||
if args.dist:
|
||||
package.do_clean_dist()
|
||||
elif args.work:
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
import spack
|
||||
import spack.cmd
|
||||
import spack.package
|
||||
import spack.packages as packages
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
import spack.cmd.checksum
|
||||
|
@ -131,7 +130,7 @@ def create(parser, args):
|
|||
tty.msg("Couldn't guess a name for this package.")
|
||||
while not name:
|
||||
new_name = raw_input("Name: ")
|
||||
if packages.valid_name(name):
|
||||
if spack.db.valid_name(name):
|
||||
name = new_name
|
||||
else:
|
||||
print "Package name can only contain A-Z, a-z, 0-9, '_' and '-'"
|
||||
|
@ -141,11 +140,11 @@ def create(parser, args):
|
|||
|
||||
tty.msg("Creating template for package %s" % name)
|
||||
|
||||
pkg_path = packages.filename_for_package_name(name)
|
||||
pkg_path = spack.db.filename_for_package_name(name)
|
||||
if os.path.exists(pkg_path) and not args.force:
|
||||
tty.die("%s already exists." % pkg_path)
|
||||
|
||||
class_name = packages.class_name_for_package_name(name)
|
||||
class_name = spack.db.class_name_for_package_name(name)
|
||||
versions = list(reversed(spack.package.find_versions_of_archive(url)))
|
||||
|
||||
archives_to_fetch = 1
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
|
||||
description = "Open package files in $EDITOR"
|
||||
|
||||
|
@ -67,7 +66,7 @@ def edit(parser, args):
|
|||
if not name:
|
||||
path = spack.packages_path
|
||||
else:
|
||||
path = packages.filename_for_package_name(name)
|
||||
path = spack.db.filename_for_package_name(name)
|
||||
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
|
@ -78,7 +77,7 @@ def edit(parser, args):
|
|||
tty.die("No package '%s'. Use spack create, or supply -f/--force "
|
||||
"to edit a new file." % name)
|
||||
else:
|
||||
class_name = packages.class_name_for_package_name(name)
|
||||
class_name = spack.db.class_name_for_package_name(name)
|
||||
|
||||
with closing(open(path, "w")) as pkg_file:
|
||||
pkg_file.write(
|
||||
|
|
|
@ -24,8 +24,8 @@
|
|||
##############################################################################
|
||||
import argparse
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
|
||||
description = "Fetch archives for packages"
|
||||
|
||||
|
@ -46,5 +46,5 @@ def fetch(parser, args):
|
|||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = packages.get(spec)
|
||||
package = spack.db.get(spec)
|
||||
package.do_fetch()
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
|
||||
import spack
|
||||
import spack.spec
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
|
||||
|
||||
description ="Find installed spack packages"
|
||||
|
@ -83,7 +83,7 @@ def hasher():
|
|||
|
||||
# Make a dict with specs keyed by architecture and compiler.
|
||||
index = hasher()
|
||||
for spec in packages.installed_package_specs():
|
||||
for spec in spack.db.installed_package_specs():
|
||||
if query_specs and not any(spec.satisfies(q) for q in query_specs):
|
||||
continue
|
||||
|
||||
|
|
|
@ -23,9 +23,8 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
|
||||
description = "Write out inter-package dependencies in dot graph format"
|
||||
|
||||
def graph(parser, args):
|
||||
packages.graph_dependencies()
|
||||
spack.db.graph_dependencies()
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
import textwrap
|
||||
from llnl.util.tty.colify import colify
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
|
||||
description = "Get detailed information on a particular package"
|
||||
|
||||
|
@ -35,7 +34,7 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def info(parser, args):
|
||||
package = packages.get(args.name)
|
||||
package = spack.db.get(args.name)
|
||||
print "Package: ", package.name
|
||||
print "Homepage: ", package.homepage
|
||||
print "Download: ", package.url
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
import argparse
|
||||
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
import spack.cmd
|
||||
|
||||
description = "Build and install packages"
|
||||
|
@ -56,6 +55,6 @@ def install(parser, args):
|
|||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
|
||||
for spec in specs:
|
||||
package = packages.get(spec)
|
||||
package = spack.db.get(spec)
|
||||
package.dirty = args.dirty
|
||||
package.do_install()
|
||||
|
|
|
@ -22,8 +22,8 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import spack.packages as packages
|
||||
from llnl.util.tty.colify import colify
|
||||
import spack
|
||||
|
||||
description ="List available spack packages"
|
||||
|
||||
|
@ -33,4 +33,4 @@ def setup_parser(subparser):
|
|||
|
||||
def list(parser, args):
|
||||
# Print all the package names in columns
|
||||
colify(packages.all_package_names())
|
||||
colify(spack.db.all_package_names())
|
||||
|
|
|
@ -29,9 +29,8 @@
|
|||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, join_path
|
||||
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
from spack.stage import Stage
|
||||
|
||||
|
||||
|
@ -46,7 +45,7 @@ def setup_parser(subparser):
|
|||
|
||||
def mirror(parser, args):
|
||||
if not args.packages:
|
||||
args.packages = [p for p in packages.all_package_names()]
|
||||
args.packages = [p for p in spack.db.all_package_names()]
|
||||
|
||||
if os.path.isfile(args.directory):
|
||||
tty.error("%s already exists and is a file." % args.directory)
|
||||
|
@ -59,7 +58,7 @@ def mirror(parser, args):
|
|||
|
||||
# Iterate through packages and download all the safe tarballs for each of them
|
||||
for pkg_name in args.packages:
|
||||
pkg = packages.get(pkg_name)
|
||||
pkg = spack.db.get(pkg_name)
|
||||
|
||||
# Skip any package that has no checksummed versions.
|
||||
if not pkg.versions:
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
import argparse
|
||||
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
|
||||
|
||||
description="Patch expanded archive sources in preparation for install"
|
||||
|
@ -47,5 +47,5 @@ def patch(parser, args):
|
|||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = packages.get(spec)
|
||||
package = spack.db.get(spec)
|
||||
package.do_patch()
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages
|
||||
|
||||
description ="List packages that provide a particular virtual package"
|
||||
|
||||
|
@ -39,4 +39,4 @@ def setup_parser(subparser):
|
|||
|
||||
def providers(parser, args):
|
||||
for spec in spack.cmd.parse_specs(args.vpkg_spec):
|
||||
colify(sorted(spack.packages.providers_for(spec)), indent=4)
|
||||
colify(sorted(spack.db.providers_for(spec)), indent=4)
|
||||
|
|
|
@ -24,9 +24,8 @@
|
|||
##############################################################################
|
||||
import argparse
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
|
||||
|
||||
description="Expand downloaded archive in preparation for install"
|
||||
|
||||
|
@ -47,5 +46,5 @@ def stage(parser, args):
|
|||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = packages.get(spec)
|
||||
package = spack.db.get(spec)
|
||||
package.do_stage()
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
from llnl.util.lang import list_modules
|
||||
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
import spack.test
|
||||
|
||||
description ="Run unit tests"
|
||||
|
|
|
@ -26,8 +26,8 @@
|
|||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.packages as packages
|
||||
|
||||
description="Remove an installed package"
|
||||
|
||||
|
@ -49,7 +49,7 @@ def uninstall(parser, args):
|
|||
# Fail and ask user to be unambiguous if it doesn't
|
||||
pkgs = []
|
||||
for spec in specs:
|
||||
matching_specs = packages.get_installed(spec)
|
||||
matching_specs = spack.db.get_installed(spec)
|
||||
if len(matching_specs) > 1:
|
||||
tty.die("%s matches multiple packages. Which one did you mean?"
|
||||
% spec, *matching_specs)
|
||||
|
@ -58,7 +58,7 @@ def uninstall(parser, args):
|
|||
tty.die("%s does not match any installed packages." % spec)
|
||||
|
||||
installed_spec = matching_specs[0]
|
||||
pkgs.append(packages.get(installed_spec))
|
||||
pkgs.append(spack.db.get(installed_spec))
|
||||
|
||||
# Sort packages to be uninstalled by the number of installed dependents
|
||||
# This ensures we do things in the right order
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
##############################################################################
|
||||
import os
|
||||
from llnl.util.tty.colify import colify
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
|
||||
description ="List available versions of a package"
|
||||
|
||||
|
@ -33,5 +33,5 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def versions(parser, args):
|
||||
pkg = packages.get(args.package)
|
||||
pkg = spack.db.get(args.package)
|
||||
colify(reversed(pkg.fetch_available_versions()))
|
||||
|
|
|
@ -35,7 +35,6 @@
|
|||
"""
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.packages
|
||||
import spack.spec
|
||||
from spack.version import *
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
from spack.util.executable import *
|
||||
from spack.directory_layout import SpecHashDirectoryLayout
|
||||
from spack.concretize import DefaultConcretizer
|
||||
from spack.packages import PackageDB
|
||||
|
||||
# This lives in $prefix/lib/spac/spack/__file__
|
||||
prefix = ancestor(__file__, 4)
|
||||
|
@ -41,7 +42,6 @@
|
|||
lib_path = join_path(prefix, "lib", "spack")
|
||||
env_path = join_path(lib_path, "env")
|
||||
module_path = join_path(lib_path, "spack")
|
||||
packages_path = join_path(module_path, "packages")
|
||||
compilers_path = join_path(module_path, "compilers")
|
||||
test_path = join_path(module_path, "test")
|
||||
|
||||
|
@ -50,6 +50,12 @@
|
|||
|
||||
install_path = join_path(prefix, "opt")
|
||||
|
||||
#
|
||||
# Set up the packages database.
|
||||
#
|
||||
db = PackageDB(join_path(module_path, "packages"))
|
||||
|
||||
|
||||
#
|
||||
# This controls how spack lays out install prefixes and
|
||||
# stage directories.
|
||||
|
|
|
@ -44,19 +44,19 @@
|
|||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cwrite
|
||||
from llnl.util.filesystem import touch
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.lang import *
|
||||
|
||||
from spack import *
|
||||
import spack
|
||||
import spack.spec
|
||||
import spack.error
|
||||
import spack.packages as packages
|
||||
import spack.url as url
|
||||
import spack.util.crypto as crypto
|
||||
from spack.version import *
|
||||
from spack.stage import Stage
|
||||
from spack.util.web import get_pages
|
||||
from spack.util.environment import *
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.compression import allowed_archive
|
||||
|
||||
"""Allowed URL schemes for spack packages."""
|
||||
|
@ -480,7 +480,7 @@ def preorder_traversal(self, visited=None, **kwargs):
|
|||
yield spec
|
||||
continue
|
||||
|
||||
for pkg in packages.get(name).preorder_traversal(visited, **kwargs):
|
||||
for pkg in spack.db.get(name).preorder_traversal(visited, **kwargs):
|
||||
yield pkg
|
||||
|
||||
|
||||
|
@ -539,7 +539,7 @@ def installed_dependents(self):
|
|||
"""Return a list of the specs of all installed packages that depend
|
||||
on this one."""
|
||||
dependents = []
|
||||
for spec in packages.installed_package_specs():
|
||||
for spec in spack.db.installed_package_specs():
|
||||
if self.name in spec.dependencies:
|
||||
dep_spec = spec.dependencies[self.name]
|
||||
if self.spec == dep_spec:
|
||||
|
@ -594,7 +594,7 @@ def do_fetch(self):
|
|||
|
||||
self.stage.fetch()
|
||||
|
||||
if self.version in self.versions:
|
||||
if spack.do_checksum and self.version in self.versions:
|
||||
digest = self.versions[self.version]
|
||||
checker = crypto.Checker(digest)
|
||||
if checker.check(self.stage.archive_file):
|
||||
|
@ -720,28 +720,28 @@ def setup_install_environment(self):
|
|||
|
||||
# Add spack environment at front of path and pass the
|
||||
# lib location along so the compiler script can find spack
|
||||
os.environ[SPACK_LIB] = lib_path
|
||||
os.environ[spack.SPACK_LIB] = spack.lib_path
|
||||
|
||||
# Fix for case-insensitive file systems. Conflicting links are
|
||||
# in directories called "case*" within the env directory.
|
||||
env_paths = [env_path]
|
||||
for file in os.listdir(env_path):
|
||||
path = join_path(env_path, file)
|
||||
env_paths = [spack.env_path]
|
||||
for file in os.listdir(spack.env_path):
|
||||
path = join_path(spack.env_path, file)
|
||||
if file.startswith("case") and os.path.isdir(path):
|
||||
env_paths.append(path)
|
||||
path_put_first("PATH", env_paths)
|
||||
path_set(SPACK_ENV_PATH, env_paths)
|
||||
path_set(spack.SPACK_ENV_PATH, env_paths)
|
||||
|
||||
# Pass along prefixes of dependencies here
|
||||
path_set(
|
||||
SPACK_DEPENDENCIES,
|
||||
spack.SPACK_DEPENDENCIES,
|
||||
[dep.package.prefix for dep in self.spec.dependencies.values()])
|
||||
|
||||
# Install location
|
||||
os.environ[SPACK_PREFIX] = self.prefix
|
||||
os.environ[spack.SPACK_PREFIX] = self.prefix
|
||||
|
||||
# Build root for logging.
|
||||
os.environ[SPACK_BUILD_ROOT] = self.stage.expanded_archive_path
|
||||
os.environ[spack.SPACK_BUILD_ROOT] = self.stage.expanded_archive_path
|
||||
|
||||
|
||||
def do_install_dependencies(self):
|
||||
|
@ -887,7 +887,7 @@ def __init__(self, name, parallel):
|
|||
|
||||
def __call__(self, *args, **kwargs):
|
||||
parallel = kwargs.get('parallel', self.parallel)
|
||||
disable_parallel = env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
disable_parallel = env_flag(spack.SPACK_NO_PARALLEL_MAKE)
|
||||
|
||||
if parallel and not disable_parallel:
|
||||
jobs = "-j%d" % multiprocessing.cpu_count()
|
||||
|
|
|
@ -28,14 +28,22 @@
|
|||
import string
|
||||
import inspect
|
||||
import glob
|
||||
import imp
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import list_modules
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack
|
||||
import spack.error
|
||||
import spack.spec
|
||||
from spack.virtual import ProviderIndex
|
||||
|
||||
# Name of module under which packages are imported
|
||||
_imported_packages_module = 'spack.packages'
|
||||
|
||||
# Name of the package file inside a package directory
|
||||
_package_file_name = 'package.py'
|
||||
|
||||
# Valid package names can contain '-' but can't start with it.
|
||||
valid_package_re = r'^\w[\w-]*$'
|
||||
|
@ -43,222 +51,19 @@
|
|||
# Don't allow consecutive [_-] in package names
|
||||
invalid_package_re = r'[_-][_-]+'
|
||||
|
||||
instances = {}
|
||||
|
||||
|
||||
def _autospec(function):
|
||||
"""Decorator that automatically converts the argument of a single-arg
|
||||
function to a Spec."""
|
||||
def converter(arg):
|
||||
if not isinstance(arg, spack.spec.Spec):
|
||||
arg = spack.spec.Spec(arg)
|
||||
return function(arg)
|
||||
return converter
|
||||
|
||||
|
||||
class ProviderIndex(object):
|
||||
"""This is a dict of dicts used for finding providers of particular
|
||||
virtual dependencies. The dict of dicts looks like:
|
||||
|
||||
{ vpkg name :
|
||||
{ full vpkg spec : package providing spec } }
|
||||
|
||||
Callers can use this to first find which packages provide a vpkg,
|
||||
then find a matching full spec. e.g., in this scenario:
|
||||
|
||||
{ 'mpi' :
|
||||
{ mpi@:1.1 : mpich,
|
||||
mpi@:2.3 : mpich2@1.9: } }
|
||||
|
||||
Calling providers_for(spec) will find specs that provide a
|
||||
matching implementation of MPI.
|
||||
"""
|
||||
def __init__(self, specs, **kwargs):
|
||||
# TODO: come up with another name for this. This "restricts" values to
|
||||
# the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and
|
||||
# keeps things as broad as possible, so it's really the wrong name)
|
||||
self.restrict = kwargs.setdefault('restrict', False)
|
||||
|
||||
self.providers = {}
|
||||
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if spec.virtual:
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
|
||||
|
||||
def update(self, spec):
|
||||
if type(spec) != spack.spec.Spec:
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
assert(not spec.virtual)
|
||||
|
||||
pkg = spec.package
|
||||
for provided_spec, provider_spec in pkg.provided.iteritems():
|
||||
if provider_spec.satisfies(spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
if provided_name not in self.providers:
|
||||
self.providers[provided_name] = {}
|
||||
|
||||
if self.restrict:
|
||||
self.providers[provided_name][provided_spec] = spec
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain it so that
|
||||
# it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
self.providers[provided_name][provided_spec] = constrained
|
||||
|
||||
|
||||
def providers_for(self, *vpkg_specs):
|
||||
"""Gives specs of all packages that provide virtual packages
|
||||
with the supplied specs."""
|
||||
providers = set()
|
||||
for vspec in vpkg_specs:
|
||||
# Allow string names to be passed as input, as well as specs
|
||||
if type(vspec) == str:
|
||||
vspec = spack.spec.Spec(vspec)
|
||||
|
||||
# Add all the providers that satisfy the vpkg spec.
|
||||
if vspec.name in self.providers:
|
||||
for provider_spec, spec in self.providers[vspec.name].items():
|
||||
if provider_spec.satisfies(vspec, deps=False):
|
||||
providers.add(spec)
|
||||
|
||||
# Return providers in order
|
||||
return sorted(providers)
|
||||
|
||||
|
||||
# TODO: this is pretty darned nasty, and inefficient.
|
||||
def _cross_provider_maps(self, lmap, rmap):
|
||||
result = {}
|
||||
for lspec in lmap:
|
||||
for rspec in rmap:
|
||||
try:
|
||||
constrained = lspec.copy().constrain(rspec)
|
||||
if lmap[lspec].name != rmap[rspec].name:
|
||||
continue
|
||||
result[constrained] = lmap[lspec].copy().constrain(
|
||||
rmap[rspec], deps=False)
|
||||
except spack.spec.UnsatisfiableSpecError:
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
def __contains__(self, name):
|
||||
"""Whether a particular vpkg name is in the index."""
|
||||
return name in self.providers
|
||||
|
||||
|
||||
def satisfies(self, other):
|
||||
"""Check that providers of virtual specs are compatible."""
|
||||
common = set(self.providers) & set(other.providers)
|
||||
if not common:
|
||||
return True
|
||||
|
||||
result = {}
|
||||
for name in common:
|
||||
crossed = self._cross_provider_maps(self.providers[name],
|
||||
other.providers[name])
|
||||
if crossed:
|
||||
result[name] = crossed
|
||||
|
||||
return bool(result)
|
||||
|
||||
|
||||
|
||||
@_autospec
|
||||
def get(spec):
|
||||
if spec.virtual:
|
||||
raise UnknownPackageError(spec.name)
|
||||
|
||||
if not spec in instances:
|
||||
package_class = get_class_for_package_name(spec.name)
|
||||
instances[spec.name] = package_class(spec)
|
||||
|
||||
return instances[spec.name]
|
||||
|
||||
|
||||
@_autospec
|
||||
def get_installed(spec):
|
||||
return [s for s in installed_package_specs() if s.satisfies(spec)]
|
||||
|
||||
|
||||
@_autospec
|
||||
def providers_for(vpkg_spec):
|
||||
if not hasattr(providers_for, 'index'):
|
||||
providers_for.index = ProviderIndex(all_package_names())
|
||||
|
||||
providers = providers_for.index.providers_for(vpkg_spec)
|
||||
if not providers:
|
||||
raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
|
||||
return providers
|
||||
|
||||
|
||||
def valid_package_name(pkg_name):
|
||||
"""Return whether the pkg_name is valid for use in Spack."""
|
||||
return (re.match(valid_package_re, pkg_name) and
|
||||
not re.search(invalid_package_re, pkg_name))
|
||||
|
||||
|
||||
def validate_package_name(pkg_name):
|
||||
"""Raise an exception if pkg_name is not valid."""
|
||||
if not valid_package_name(pkg_name):
|
||||
raise InvalidPackageNameError(pkg_name)
|
||||
|
||||
|
||||
def dirname_for_package_name(pkg_name):
|
||||
"""Get the directory name for a particular package would use, even if it's a
|
||||
foo.py package and not a directory with a foo/__init__.py file."""
|
||||
return join_path(spack.packages_path, pkg_name)
|
||||
|
||||
|
||||
def filename_for_package_name(pkg_name):
|
||||
"""Get the filename for the module we should load for a particular package.
|
||||
The package can be either in a standalone .py file, or it can be in
|
||||
a directory with an __init__.py file.
|
||||
|
||||
Package "foo" in standalone .py file:
|
||||
packages/foo.py
|
||||
|
||||
Package "foo" in directory:
|
||||
packages/foo/__init__.py
|
||||
|
||||
The second form is used when there are files (like patches) that need
|
||||
to be stored along with the package.
|
||||
|
||||
If the package doesn't exist yet, this will just return the name
|
||||
of the standalone .py file.
|
||||
"""
|
||||
validate_package_name(pkg_name)
|
||||
pkg_dir = dirname_for_package_name(pkg_name)
|
||||
|
||||
if os.path.isdir(pkg_dir):
|
||||
init_file = join_path(pkg_dir, '__init__.py')
|
||||
return init_file
|
||||
else:
|
||||
pkg_file = "%s.py" % pkg_dir
|
||||
return pkg_file
|
||||
|
||||
|
||||
def installed_package_specs():
|
||||
return spack.install_layout.all_specs()
|
||||
|
||||
|
||||
def all_package_names():
|
||||
"""Generator function for all packages."""
|
||||
for module in list_modules(spack.packages_path):
|
||||
yield module
|
||||
|
||||
|
||||
def all_packages():
|
||||
for name in all_package_names():
|
||||
yield get(name)
|
||||
|
||||
|
||||
def class_name_for_package_name(pkg_name):
|
||||
"""Get a name for the class the package file should contain. Note that
|
||||
conflicts don't matter because the classes are in different modules.
|
||||
|
@ -277,89 +82,185 @@ def class_name_for_package_name(pkg_name):
|
|||
return class_name
|
||||
|
||||
|
||||
def exists(pkg_name):
|
||||
"""Whether a package with the supplied name exists ."""
|
||||
return os.path.exists(filename_for_package_name(pkg_name))
|
||||
def _autospec(function):
|
||||
"""Decorator that automatically converts the argument of a single-arg
|
||||
function to a Spec."""
|
||||
def converter(self, spec_like):
|
||||
if not isinstance(spec_like, spack.spec.Spec):
|
||||
spec_like = spack.spec.Spec(spec_like)
|
||||
return function(self, spec_like)
|
||||
return converter
|
||||
|
||||
|
||||
def packages_module():
|
||||
# TODO: replace this with a proper package DB class, instead of this hackiness.
|
||||
packages_path = re.sub(spack.module_path + '\/+', 'spack.', spack.packages_path)
|
||||
packages_module = re.sub(r'/', '.', packages_path)
|
||||
return packages_module
|
||||
class PackageDB(object):
|
||||
def __init__(self, root):
|
||||
"""Construct a new package database from a root directory."""
|
||||
self.root = root
|
||||
self.instances = {}
|
||||
self.provider_index = None
|
||||
|
||||
|
||||
def get_class_for_package_name(pkg_name):
|
||||
file_name = filename_for_package_name(pkg_name)
|
||||
@_autospec
|
||||
def get(self, spec):
|
||||
if spec.virtual:
|
||||
raise UnknownPackageError(spec.name)
|
||||
|
||||
if os.path.exists(file_name):
|
||||
if not os.path.isfile(file_name):
|
||||
tty.die("Something's wrong. '%s' is not a file!" % file_name)
|
||||
if not os.access(file_name, os.R_OK):
|
||||
tty.die("Cannot read '%s'!" % file_name)
|
||||
else:
|
||||
raise UnknownPackageError(pkg_name)
|
||||
if not spec in self.instances:
|
||||
package_class = self.get_class_for_package_name(spec.name)
|
||||
self.instances[spec.name] = package_class(spec)
|
||||
|
||||
# Figure out pacakges module from spack.packages_path
|
||||
# This allows us to change the module path.
|
||||
if not re.match(r'%s' % spack.module_path, spack.packages_path):
|
||||
raise RuntimeError("Packages path is not a submodule of spack.")
|
||||
|
||||
class_name = class_name_for_package_name(pkg_name)
|
||||
try:
|
||||
module_name = "%s.%s" % (packages_module(), pkg_name)
|
||||
module = __import__(module_name, fromlist=[class_name])
|
||||
except ImportError, e:
|
||||
tty.die("Error while importing %s.%s:\n%s" % (pkg_name, class_name, e.message))
|
||||
|
||||
cls = getattr(module, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
||||
|
||||
return cls
|
||||
return self.instances[spec.name]
|
||||
|
||||
|
||||
def compute_dependents():
|
||||
"""Reads in all package files and sets dependence information on
|
||||
Package objects in memory.
|
||||
"""
|
||||
if not hasattr(compute_dependents, index):
|
||||
compute_dependents.index = {}
|
||||
|
||||
for pkg in all_packages():
|
||||
if pkg._dependents is None:
|
||||
pkg._dependents = []
|
||||
|
||||
for name, dep in pkg.dependencies.iteritems():
|
||||
dpkg = get(name)
|
||||
if dpkg._dependents is None:
|
||||
dpkg._dependents = []
|
||||
dpkg._dependents.append(pkg.name)
|
||||
@_autospec
|
||||
def get_installed(self, spec):
|
||||
return [s for s in self.installed_package_specs() if s.satisfies(spec)]
|
||||
|
||||
|
||||
def graph_dependencies(out=sys.stdout):
|
||||
"""Print out a graph of all the dependencies between package.
|
||||
Graph is in dot format."""
|
||||
out.write('digraph G {\n')
|
||||
out.write(' label = "Spack Dependencies"\n')
|
||||
out.write(' labelloc = "b"\n')
|
||||
out.write(' rankdir = "LR"\n')
|
||||
out.write(' ranksep = "5"\n')
|
||||
out.write('\n')
|
||||
@_autospec
|
||||
def providers_for(self, vpkg_spec):
|
||||
if self.provider_index is None:
|
||||
self.provider_index = ProviderIndex(self.all_package_names())
|
||||
|
||||
def quote(string):
|
||||
return '"%s"' % string
|
||||
providers = self.provider_index.providers_for(vpkg_spec)
|
||||
if not providers:
|
||||
raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
|
||||
return providers
|
||||
|
||||
deps = []
|
||||
for pkg in all_packages():
|
||||
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
|
||||
for dep_name, dep in pkg.dependencies.iteritems():
|
||||
deps.append((pkg.name, dep_name))
|
||||
out.write('\n')
|
||||
|
||||
for pair in deps:
|
||||
out.write(' "%s" -> "%s"\n' % pair)
|
||||
out.write('}\n')
|
||||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
directory that contains its package.py file."""
|
||||
return join_path(self.root, pkg_name)
|
||||
|
||||
|
||||
def filename_for_package_name(self, pkg_name):
|
||||
"""Get the filename for the module we should load for a particular
|
||||
package. Packages for a pacakge DB live in
|
||||
``$root/<package_name>/package.py``
|
||||
|
||||
This will return a proper package.py path even if the
|
||||
package doesn't exist yet, so callers will need to ensure
|
||||
the package exists before importing.
|
||||
"""
|
||||
validate_package_name(pkg_name)
|
||||
pkg_dir = self.dirname_for_package_name(pkg_name)
|
||||
return join_path(pkg_dir, _package_file_name)
|
||||
|
||||
|
||||
def installed_package_specs(self):
|
||||
"""Read installed package names straight from the install directory
|
||||
layout.
|
||||
"""
|
||||
return spack.install_layout.all_specs()
|
||||
|
||||
|
||||
@memoized
|
||||
def all_package_names(self):
|
||||
"""Generator function for all packages. This looks for
|
||||
``<pkg_name>/package.py`` files within the root direcotry"""
|
||||
all_package_names = []
|
||||
for pkg_name in os.listdir(self.root):
|
||||
pkg_dir = join_path(self.root, pkg_name)
|
||||
pkg_file = join_path(pkg_dir, _package_file_name)
|
||||
if os.path.isfile(pkg_file):
|
||||
all_package_names.append(pkg_name)
|
||||
all_package_names.sort()
|
||||
return all_package_names
|
||||
|
||||
|
||||
def all_packages(self):
|
||||
for name in self.all_package_names():
|
||||
yield get(name)
|
||||
|
||||
|
||||
def exists(self, pkg_name):
|
||||
"""Whether a package with the supplied name exists ."""
|
||||
return os.path.exists(self.filename_for_package_name(pkg_name))
|
||||
|
||||
|
||||
@memoized
|
||||
def get_class_for_package_name(self, pkg_name):
|
||||
"""Get an instance of the class for a particular package.
|
||||
|
||||
This method uses Python's ``imp`` package to load python
|
||||
source from a Spack package's ``package.py`` file. A
|
||||
normal python import would only load each package once, but
|
||||
because we do this dynamically, the method needs to be
|
||||
memoized to ensure there is only ONE package class
|
||||
instance, per package, per database.
|
||||
"""
|
||||
file_path = self.filename_for_package_name(pkg_name)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
if not os.path.isfile(file_path):
|
||||
tty.die("Something's wrong. '%s' is not a file!" % file_path)
|
||||
if not os.access(file_path, os.R_OK):
|
||||
tty.die("Cannot read '%s'!" % file_path)
|
||||
else:
|
||||
raise UnknownPackageError(pkg_name)
|
||||
|
||||
# Figure out pacakges module based on self.root
|
||||
if not re.match(r'%s' % spack.module_path, self.root):
|
||||
raise RuntimeError("Packages path is not a submodule of spack.")
|
||||
|
||||
class_name = class_name_for_package_name(pkg_name)
|
||||
try:
|
||||
module_name = _imported_packages_module + '.' + pkg_name
|
||||
module = imp.load_source(module_name, file_path)
|
||||
|
||||
except ImportError, e:
|
||||
tty.die("Error while importing %s from %s:\n%s" % (
|
||||
pkg_name, file_path, e.message))
|
||||
|
||||
cls = getattr(module, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
def compute_dependents(self):
|
||||
"""Reads in all package files and sets dependence information on
|
||||
Package objects in memory.
|
||||
"""
|
||||
if not hasattr(compute_dependents, index):
|
||||
compute_dependents.index = {}
|
||||
|
||||
for pkg in all_packages():
|
||||
if pkg._dependents is None:
|
||||
pkg._dependents = []
|
||||
|
||||
for name, dep in pkg.dependencies.iteritems():
|
||||
dpkg = get(name)
|
||||
if dpkg._dependents is None:
|
||||
dpkg._dependents = []
|
||||
dpkg._dependents.append(pkg.name)
|
||||
|
||||
|
||||
def graph_dependencies(self, out=sys.stdout):
|
||||
"""Print out a graph of all the dependencies between package.
|
||||
Graph is in dot format."""
|
||||
out.write('digraph G {\n')
|
||||
out.write(' label = "Spack Dependencies"\n')
|
||||
out.write(' labelloc = "b"\n')
|
||||
out.write(' rankdir = "LR"\n')
|
||||
out.write(' ranksep = "5"\n')
|
||||
out.write('\n')
|
||||
|
||||
def quote(string):
|
||||
return '"%s"' % string
|
||||
|
||||
deps = []
|
||||
for pkg in all_packages():
|
||||
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
|
||||
for dep_name, dep in pkg.dependencies.iteritems():
|
||||
deps.append((pkg.name, dep_name))
|
||||
out.write('\n')
|
||||
|
||||
for pair in deps:
|
||||
out.write(' "%s" -> "%s"\n' % pair)
|
||||
out.write('}\n')
|
||||
|
||||
|
||||
class InvalidPackageNameError(spack.error.SpackError):
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
import spack
|
||||
import spack.stage
|
||||
import spack.error
|
||||
import spack.packages as packages
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
|
@ -55,7 +54,7 @@ def __init__(self, pkg_name, path_or_url, level):
|
|||
if '://' in path_or_url:
|
||||
self.url = path_or_url
|
||||
else:
|
||||
pkg_dir = packages.dirname_for_package_name(pkg_name)
|
||||
pkg_dir = spack.db.dirname_for_package_name(pkg_name)
|
||||
self.path = join_path(pkg_dir, path_or_url)
|
||||
if not os.path.isfile(self.path):
|
||||
raise NoSuchPatchFileError(pkg_name, self.path)
|
||||
|
|
|
@ -80,7 +80,6 @@ class Mpileaks(Package):
|
|||
|
||||
from spack.patch import Patch
|
||||
from spack.spec import Spec, parse_anonymous_spec
|
||||
from spack.packages import packages_module
|
||||
|
||||
|
||||
"""Adds a dependencies local variable in the locals of
|
||||
|
|
|
@ -99,16 +99,16 @@
|
|||
from llnl.util.lang import *
|
||||
from llnl.util.tty.color import *
|
||||
|
||||
import spack
|
||||
import spack.parse
|
||||
import spack.error
|
||||
import spack.compilers
|
||||
import spack.compilers.gcc
|
||||
import spack.packages as packages
|
||||
|
||||
from spack.version import *
|
||||
from spack.util.string import *
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from spack.virtual import ProviderIndex
|
||||
|
||||
# Convenient names for color formats so that other things can use them
|
||||
compiler_color = '@g'
|
||||
|
@ -379,7 +379,7 @@ def root(self):
|
|||
|
||||
@property
|
||||
def package(self):
|
||||
return packages.get(self)
|
||||
return spack.db.get(self)
|
||||
|
||||
|
||||
@property
|
||||
|
@ -391,7 +391,7 @@ def virtual(self):
|
|||
Possible idea: just use conventin and make virtual deps all
|
||||
caps, e.g., MPI vs mpi.
|
||||
"""
|
||||
return not packages.exists(self.name)
|
||||
return not spack.db.exists(self.name)
|
||||
|
||||
|
||||
@property
|
||||
|
@ -532,7 +532,7 @@ def _expand_virtual_packages(self):
|
|||
return
|
||||
|
||||
for spec in virtuals:
|
||||
providers = packages.providers_for(spec)
|
||||
providers = spack.db.providers_for(spec)
|
||||
concrete = spack.concretizer.choose_provider(spec, providers)
|
||||
concrete = concrete.copy()
|
||||
spec._replace_with(concrete)
|
||||
|
@ -624,7 +624,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
|||
|
||||
# Combine constraints from package dependencies with
|
||||
# constraints on the spec's dependencies.
|
||||
pkg = packages.get(self.name)
|
||||
pkg = spack.db.get(self.name)
|
||||
for name, pkg_dep in self.package.dependencies.items():
|
||||
# If it's a virtual dependency, try to find a provider
|
||||
if pkg_dep.virtual:
|
||||
|
@ -653,7 +653,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
|||
else:
|
||||
# if it's a real dependency, check whether it provides something
|
||||
# already required in the spec.
|
||||
index = packages.ProviderIndex([pkg_dep], restrict=True)
|
||||
index = ProviderIndex([pkg_dep], restrict=True)
|
||||
for vspec in (v for v in spec_deps.values() if v.virtual):
|
||||
if index.providers_for(vspec):
|
||||
vspec._replace_with(pkg_dep)
|
||||
|
@ -718,7 +718,7 @@ def normalize(self):
|
|||
# Remove virtual deps that are already provided by something in the spec
|
||||
spec_packages = [d.package for d in spec_deps.values() if not d.virtual]
|
||||
|
||||
index = packages.ProviderIndex(spec_deps.values(), restrict=True)
|
||||
index = ProviderIndex(spec_deps.values(), restrict=True)
|
||||
|
||||
visited = set()
|
||||
self._normalize_helper(visited, spec_deps, index)
|
||||
|
@ -754,7 +754,7 @@ def validate_names(self):
|
|||
for spec in self.preorder_traversal():
|
||||
# Don't get a package for a virtual name.
|
||||
if not spec.virtual:
|
||||
packages.get(spec.name)
|
||||
spack.db.get(spec.name)
|
||||
|
||||
# validate compiler name in addition to the package name.
|
||||
if spec.compiler:
|
||||
|
@ -888,10 +888,8 @@ def satisfies_dependencies(self, other):
|
|||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = packages.ProviderIndex(
|
||||
self.preorder_traversal(), restrict=True)
|
||||
other_index = packages.ProviderIndex(
|
||||
other.preorder_traversal(), restrict=True)
|
||||
self_index = ProviderIndex(self.preorder_traversal(), restrict=True)
|
||||
other_index = ProviderIndex(other.preorder_traversal(), restrict=True)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
if not self_index.satisfies(other_index):
|
||||
|
|
|
@ -30,6 +30,8 @@
|
|||
|
||||
import spack
|
||||
|
||||
import spack.test.install
|
||||
|
||||
|
||||
"""Names of tests to be included in Spack's test suite"""
|
||||
test_names = ['versions',
|
||||
|
@ -40,7 +42,8 @@
|
|||
'spec_semantics',
|
||||
'spec_dag',
|
||||
'concretize',
|
||||
'multimethod']
|
||||
'multimethod',
|
||||
'install']
|
||||
|
||||
|
||||
def list_tests():
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
##############################################################################
|
||||
import unittest
|
||||
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
@ -113,22 +113,22 @@ def test_concretize_with_provides_when(self):
|
|||
we ask for some advanced version.
|
||||
"""
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
|
||||
for spec in packages.providers_for('mpi@2.1')))
|
||||
for spec in spack.db.providers_for('mpi@2.1')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in packages.providers_for('mpi@2.2')))
|
||||
for spec in spack.db.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in packages.providers_for('mpi@2.2')))
|
||||
for spec in spack.db.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in packages.providers_for('mpi@2')))
|
||||
for spec in spack.db.providers_for('mpi@2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in packages.providers_for('mpi@3')))
|
||||
for spec in spack.db.providers_for('mpi@3')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2')
|
||||
for spec in packages.providers_for('mpi@3')))
|
||||
for spec in spack.db.providers_for('mpi@3')))
|
||||
|
||||
|
||||
def test_virtual_is_fully_expanded_for_callpath(self):
|
||||
|
@ -162,8 +162,4 @@ def test_virtual_is_fully_expanded_for_mpileaks(self):
|
|||
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
|
||||
spec = Spec('indirect_mpich')
|
||||
spec.normalize()
|
||||
|
||||
print
|
||||
print spec.tree(color=True)
|
||||
|
||||
spec.concretize()
|
||||
|
|
98
lib/spack/spack/test/install.py
Normal file
98
lib/spack/spack/test/install.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://scalability-llnl.github.io/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
import shutil
|
||||
from contextlib import closing
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
dir_name = 'trivial-1.0'
|
||||
archive_name = 'trivial-1.0.tar.gz'
|
||||
install_test_package = 'trivial_install_test_package'
|
||||
|
||||
class InstallTest(MockPackagesTest):
|
||||
"""Tests install and uninstall on a trivial package."""
|
||||
|
||||
def setUp(self):
|
||||
super(InstallTest, self).setUp()
|
||||
|
||||
self.stage = Stage('not_a_real_url')
|
||||
archive_dir = join_path(self.stage.path, dir_name)
|
||||
dummy_configure = join_path(archive_dir, 'configure')
|
||||
|
||||
mkdirp(archive_dir)
|
||||
with closing(open(dummy_configure, 'w')) as configure:
|
||||
configure.write(
|
||||
"#!/bin/sh\n"
|
||||
"prefix=$(echo $1 | sed 's/--prefix=//')\n"
|
||||
"cat > Makefile <<EOF\n"
|
||||
"all:\n"
|
||||
"\techo Building...\n\n"
|
||||
"install:\n"
|
||||
"\tmkdir -p $prefix\n"
|
||||
"\ttouch $prefix/dummy_file\n"
|
||||
"EOF\n")
|
||||
os.chmod(dummy_configure, 0755)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
tar = which('tar')
|
||||
tar('-czf', archive_name, dir_name)
|
||||
|
||||
# We use a fake pacakge, so skip the checksum.
|
||||
spack.do_checksum = False
|
||||
|
||||
def tearDown(self):
|
||||
super(InstallTest, self).tearDown()
|
||||
|
||||
if self.stage is not None:
|
||||
self.stage.destroy()
|
||||
|
||||
# Turn checksumming back on
|
||||
spack.do_checksum = True
|
||||
|
||||
|
||||
def test_install_and_uninstall(self):
|
||||
# Get a basic concrete spec for the trivial install package.
|
||||
spec = Spec(install_test_package)
|
||||
spec.concretize()
|
||||
|
||||
# Get the package
|
||||
pkg = spack.db.get(spec)
|
||||
|
||||
# Fake some values
|
||||
archive_path = join_path(self.stage.path, archive_name)
|
||||
pkg.url = 'file://' + archive_path
|
||||
|
||||
try:
|
||||
pkg.do_install()
|
||||
pkg.do_uninstall()
|
||||
except:
|
||||
if pkg: pkg.remove_prefix()
|
||||
raise
|
|
@ -1,24 +0,0 @@
|
|||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://scalability-llnl.github.io/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
|
@ -24,16 +24,15 @@
|
|||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
class DirectoryPkg(Package):
|
||||
"""This is a fake package that tests spack's ability to load packages in
|
||||
directories with __init__.py files.
|
||||
"""
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/directory-pkg-1.0.tar.gz"
|
||||
class TrivialInstallTestPackage(Package):
|
||||
"""This package is a stub with a trivial install method. It allows us
|
||||
to test the install and uninstall logic of spack."""
|
||||
homepage = "http://www.example.com/trivial_install"
|
||||
url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz"
|
||||
|
||||
versions = { '1.0' : '0123456789abcdef0123456789abcdef' }
|
||||
versions = { '1.0' : 'foobarbaz' }
|
||||
|
||||
this_is_a_directory_pkg = True
|
||||
|
||||
def install(self):
|
||||
pass
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix)
|
||||
make()
|
||||
make('install')
|
|
@ -28,54 +28,31 @@
|
|||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack
|
||||
import spack.packages as packages
|
||||
from spack.packages import PackageDB
|
||||
from spack.spec import Spec
|
||||
|
||||
mock_packages_path = join_path(spack.module_path, 'test', 'mock_packages')
|
||||
original_deps = None
|
||||
|
||||
|
||||
def set_pkg_dep(pkg, spec):
|
||||
"""Alters dependence information for a pacakge.
|
||||
Use this to mock up constraints.
|
||||
"""
|
||||
spec = Spec(spec)
|
||||
packages.get(pkg).dependencies[spec.name] = spec
|
||||
|
||||
|
||||
def restore_dependencies():
|
||||
# each time through restore original dependencies & constraints
|
||||
global original_deps
|
||||
for pkg_name, deps in original_deps.iteritems():
|
||||
packages.get(pkg_name).dependencies.clear()
|
||||
for dep in deps:
|
||||
set_pkg_dep(pkg_name, dep)
|
||||
spack.db.get(pkg).dependencies[spec.name] = spec
|
||||
|
||||
|
||||
class MockPackagesTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
# Use a different packages directory for these tests. We want to use
|
||||
# mocked up packages that don't interfere with the real ones.
|
||||
cls.real_packages_path = spack.packages_path
|
||||
spack.packages_path = mock_packages_path
|
||||
|
||||
# First time through, record original relationships bt/w packages
|
||||
global original_deps
|
||||
original_deps = {}
|
||||
for name in list_modules(mock_packages_path):
|
||||
pkg = packages.get(name)
|
||||
original_deps[name] = [
|
||||
spec for spec in pkg.dependencies.values()]
|
||||
def setUp(self):
|
||||
# Use the mock packages database for these tests. This allows
|
||||
# us to set up contrived packages that don't interfere with
|
||||
# real ones.
|
||||
self.real_db = spack.db
|
||||
spack.db = PackageDB(mock_packages_path)
|
||||
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
def tearDown(self):
|
||||
"""Restore the real packages path after any test."""
|
||||
restore_dependencies()
|
||||
spack.packages_path = cls.real_packages_path
|
||||
|
||||
|
||||
def setUp(self):
|
||||
"""Before each test, restore deps between packages to original state."""
|
||||
restore_dependencies()
|
||||
#restore_dependencies()
|
||||
spack.db = self.real_db
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
"""
|
||||
import unittest
|
||||
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
from spack.multimethod import *
|
||||
from spack.version import *
|
||||
from spack.spec import Spec
|
||||
|
@ -38,37 +38,37 @@
|
|||
class MultiMethodTest(MockPackagesTest):
|
||||
|
||||
def test_no_version_match(self):
|
||||
pkg = packages.get('multimethod@2.0')
|
||||
pkg = spack.db.get('multimethod@2.0')
|
||||
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
|
||||
|
||||
|
||||
def test_one_version_match(self):
|
||||
pkg = packages.get('multimethod@1.0')
|
||||
pkg = spack.db.get('multimethod@1.0')
|
||||
self.assertEqual(pkg.no_version_2(), 1)
|
||||
|
||||
pkg = packages.get('multimethod@3.0')
|
||||
pkg = spack.db.get('multimethod@3.0')
|
||||
self.assertEqual(pkg.no_version_2(), 3)
|
||||
|
||||
pkg = packages.get('multimethod@4.0')
|
||||
pkg = spack.db.get('multimethod@4.0')
|
||||
self.assertEqual(pkg.no_version_2(), 4)
|
||||
|
||||
|
||||
def test_version_overlap(self):
|
||||
pkg = packages.get('multimethod@2.0')
|
||||
pkg = spack.db.get('multimethod@2.0')
|
||||
self.assertEqual(pkg.version_overlap(), 1)
|
||||
|
||||
pkg = packages.get('multimethod@5.0')
|
||||
pkg = spack.db.get('multimethod@5.0')
|
||||
self.assertEqual(pkg.version_overlap(), 2)
|
||||
|
||||
|
||||
def test_mpi_version(self):
|
||||
pkg = packages.get('multimethod^mpich@3.0.4')
|
||||
pkg = spack.db.get('multimethod^mpich@3.0.4')
|
||||
self.assertEqual(pkg.mpi_version(), 3)
|
||||
|
||||
pkg = packages.get('multimethod^mpich2@1.2')
|
||||
pkg = spack.db.get('multimethod^mpich2@1.2')
|
||||
self.assertEqual(pkg.mpi_version(), 2)
|
||||
|
||||
pkg = packages.get('multimethod^mpich@1.0')
|
||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
||||
self.assertEqual(pkg.mpi_version(), 1)
|
||||
|
||||
|
||||
|
@ -76,54 +76,54 @@ def test_undefined_mpi_version(self):
|
|||
# This currently fails because provides() doesn't do
|
||||
# the right thing undefined version ranges.
|
||||
# TODO: fix this.
|
||||
pkg = packages.get('multimethod^mpich@0.4')
|
||||
pkg = spack.db.get('multimethod^mpich@0.4')
|
||||
self.assertEqual(pkg.mpi_version(), 0)
|
||||
|
||||
|
||||
def test_default_works(self):
|
||||
pkg = packages.get('multimethod%gcc')
|
||||
pkg = spack.db.get('multimethod%gcc')
|
||||
self.assertEqual(pkg.has_a_default(), 'gcc')
|
||||
|
||||
pkg = packages.get('multimethod%intel')
|
||||
pkg = spack.db.get('multimethod%intel')
|
||||
self.assertEqual(pkg.has_a_default(), 'intel')
|
||||
|
||||
pkg = packages.get('multimethod%pgi')
|
||||
pkg = spack.db.get('multimethod%pgi')
|
||||
self.assertEqual(pkg.has_a_default(), 'default')
|
||||
|
||||
|
||||
def test_architecture_match(self):
|
||||
pkg = packages.get('multimethod=x86_64')
|
||||
pkg = spack.db.get('multimethod=x86_64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
|
||||
|
||||
pkg = packages.get('multimethod=ppc64')
|
||||
pkg = spack.db.get('multimethod=ppc64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
|
||||
|
||||
pkg = packages.get('multimethod=ppc32')
|
||||
pkg = spack.db.get('multimethod=ppc32')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
|
||||
|
||||
pkg = packages.get('multimethod=arm64')
|
||||
pkg = spack.db.get('multimethod=arm64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'arm64')
|
||||
|
||||
pkg = packages.get('multimethod=macos')
|
||||
pkg = spack.db.get('multimethod=macos')
|
||||
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
|
||||
|
||||
|
||||
def test_dependency_match(self):
|
||||
pkg = packages.get('multimethod^zmpi')
|
||||
pkg = spack.db.get('multimethod^zmpi')
|
||||
self.assertEqual(pkg.different_by_dep(), 'zmpi')
|
||||
|
||||
pkg = packages.get('multimethod^mpich')
|
||||
pkg = spack.db.get('multimethod^mpich')
|
||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||
|
||||
# If we try to switch on some entirely different dep, it's ambiguous,
|
||||
# but should take the first option
|
||||
pkg = packages.get('multimethod^foobar')
|
||||
pkg = spack.db.get('multimethod^foobar')
|
||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||
|
||||
|
||||
def test_virtual_dep_match(self):
|
||||
pkg = packages.get('multimethod^mpich2')
|
||||
pkg = spack.db.get('multimethod^mpich2')
|
||||
self.assertEqual(pkg.different_by_virtual_dep(), 2)
|
||||
|
||||
pkg = packages.get('multimethod^mpich@1.0')
|
||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
||||
self.assertEqual(pkg.different_by_virtual_dep(), 1)
|
||||
|
|
|
@ -25,45 +25,29 @@
|
|||
import unittest
|
||||
|
||||
from spack.test.mock_packages_test import *
|
||||
import spack.packages as packages
|
||||
import spack
|
||||
|
||||
class PackagesTest(MockPackagesTest):
|
||||
|
||||
def test_load_regular_package(self):
|
||||
pkg = packages.get('mpich')
|
||||
def test_load_package(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
|
||||
|
||||
def test_regular_package_name(self):
|
||||
pkg = packages.get('mpich')
|
||||
def test_package_name(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
self.assertEqual(pkg.name, 'mpich')
|
||||
|
||||
|
||||
def test_regular_package_filename(self):
|
||||
filename = packages.filename_for_package_name('mpich')
|
||||
self.assertEqual(filename, join_path(mock_packages_path, 'mpich.py'))
|
||||
def test_package_filename(self):
|
||||
filename = spack.db.filename_for_package_name('mpich')
|
||||
self.assertEqual(filename, join_path(mock_packages_path, 'mpich', 'package.py'))
|
||||
|
||||
|
||||
def test_regular_package_name(self):
|
||||
pkg = packages.get('mpich')
|
||||
def test_package_name(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
self.assertEqual(pkg.name, 'mpich')
|
||||
|
||||
|
||||
def test_load_directory_package(self):
|
||||
pkg = packages.get('directory-pkg')
|
||||
self.assertTrue(hasattr(pkg, 'this_is_a_directory_pkg'))
|
||||
self.assertTrue(pkg.this_is_a_directory_pkg)
|
||||
|
||||
|
||||
def test_directory_package_name(self):
|
||||
pkg = packages.get('directory-pkg')
|
||||
self.assertEqual(pkg.name, 'directory-pkg')
|
||||
|
||||
|
||||
def test_directory_package_filename(self):
|
||||
filename = packages.filename_for_package_name('directory-pkg')
|
||||
self.assertEqual(filename, join_path(mock_packages_path, 'directory-pkg/__init__.py'))
|
||||
|
||||
|
||||
def test_nonexisting_package_filename(self):
|
||||
filename = packages.filename_for_package_name('some-nonexisting-package')
|
||||
self.assertEqual(filename, join_path(mock_packages_path, 'some-nonexisting-package.py'))
|
||||
filename = spack.db.filename_for_package_name('some-nonexisting-package')
|
||||
self.assertEqual(filename, join_path(mock_packages_path, 'some-nonexisting-package', 'package.py'))
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
"""
|
||||
import spack
|
||||
import spack.package
|
||||
import spack.packages as packages
|
||||
|
||||
from llnl.util.lang import list_modules
|
||||
|
||||
|
|
142
lib/spack/spack/virtual.py
Normal file
142
lib/spack/spack/virtual.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://scalability-llnl.github.io/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
"""
|
||||
The ``virtual`` module contains utility classes for virtual dependencies.
|
||||
"""
|
||||
import spack.spec
|
||||
|
||||
class ProviderIndex(object):
|
||||
"""This is a dict of dicts used for finding providers of particular
|
||||
virtual dependencies. The dict of dicts looks like:
|
||||
|
||||
{ vpkg name :
|
||||
{ full vpkg spec : package providing spec } }
|
||||
|
||||
Callers can use this to first find which packages provide a vpkg,
|
||||
then find a matching full spec. e.g., in this scenario:
|
||||
|
||||
{ 'mpi' :
|
||||
{ mpi@:1.1 : mpich,
|
||||
mpi@:2.3 : mpich2@1.9: } }
|
||||
|
||||
Calling providers_for(spec) will find specs that provide a
|
||||
matching implementation of MPI.
|
||||
"""
|
||||
def __init__(self, specs, **kwargs):
|
||||
# TODO: come up with another name for this. This "restricts" values to
|
||||
# the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and
|
||||
# keeps things as broad as possible, so it's really the wrong name)
|
||||
self.restrict = kwargs.setdefault('restrict', False)
|
||||
|
||||
self.providers = {}
|
||||
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if spec.virtual:
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
|
||||
|
||||
def update(self, spec):
|
||||
if type(spec) != spack.spec.Spec:
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
assert(not spec.virtual)
|
||||
|
||||
pkg = spec.package
|
||||
for provided_spec, provider_spec in pkg.provided.iteritems():
|
||||
if provider_spec.satisfies(spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
if provided_name not in self.providers:
|
||||
self.providers[provided_name] = {}
|
||||
|
||||
if self.restrict:
|
||||
self.providers[provided_name][provided_spec] = spec
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain it so that
|
||||
# it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
self.providers[provided_name][provided_spec] = constrained
|
||||
|
||||
|
||||
def providers_for(self, *vpkg_specs):
|
||||
"""Gives specs of all packages that provide virtual packages
|
||||
with the supplied specs."""
|
||||
providers = set()
|
||||
for vspec in vpkg_specs:
|
||||
# Allow string names to be passed as input, as well as specs
|
||||
if type(vspec) == str:
|
||||
vspec = spack.spec.Spec(vspec)
|
||||
|
||||
# Add all the providers that satisfy the vpkg spec.
|
||||
if vspec.name in self.providers:
|
||||
for provider_spec, spec in self.providers[vspec.name].items():
|
||||
if provider_spec.satisfies(vspec, deps=False):
|
||||
providers.add(spec)
|
||||
|
||||
# Return providers in order
|
||||
return sorted(providers)
|
||||
|
||||
|
||||
# TODO: this is pretty darned nasty, and inefficient.
|
||||
def _cross_provider_maps(self, lmap, rmap):
|
||||
result = {}
|
||||
for lspec in lmap:
|
||||
for rspec in rmap:
|
||||
try:
|
||||
constrained = lspec.copy().constrain(rspec)
|
||||
if lmap[lspec].name != rmap[rspec].name:
|
||||
continue
|
||||
result[constrained] = lmap[lspec].copy().constrain(
|
||||
rmap[rspec], deps=False)
|
||||
except spack.spec.UnsatisfiableSpecError:
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
def __contains__(self, name):
|
||||
"""Whether a particular vpkg name is in the index."""
|
||||
return name in self.providers
|
||||
|
||||
|
||||
def satisfies(self, other):
|
||||
"""Check that providers of virtual specs are compatible."""
|
||||
common = set(self.providers) & set(other.providers)
|
||||
if not common:
|
||||
return True
|
||||
|
||||
result = {}
|
||||
for name in common:
|
||||
crossed = self._cross_provider_maps(self.providers[name],
|
||||
other.providers[name])
|
||||
if crossed:
|
||||
result[name] = crossed
|
||||
|
||||
return bool(result)
|
Loading…
Reference in a new issue