Added support for querying by tags (#4786)

* Added support to query packages by tags.
    - The querying commands `spack list`, `spack find` and `spack info` have
      been modified to support querying by tags. Tests have been added to
      check that the feature is working correctly under what should be the
      most frequent use cases.

* Refactored Repo class to make insertion of new file caches easier.
    - Added the class FastPackageChecker. This class is a Mapping from
      package names to stat info, that gets memoized for faster access.

    - Extracted the creation of a ProviderIndex to its own factory function.

* Added a cache file for tags.

    - Following what was done for providers, a TagIndex class has been added.
      This class can serialize and deserialize objects from json. Repo and
      RepoPath have a new method 'packages_with_tags', that uses the TagIndex
      to compute a list of package names that have all the tags passed as
      arguments.

      On Ubuntu 14.04 the effect if the cache reduces the time for spack list
      from ~3sec. to ~0.3sec. after the cache has been built.

* Fixed colorization of `spack info`
This commit is contained in:
Massimiliano Culpo 2017-09-06 03:44:42 +02:00 committed by Todd Gamblin
parent feefdedadf
commit d1a5857a03
12 changed files with 526 additions and 132 deletions

View file

@ -26,15 +26,23 @@
import argparse import argparse
import spack.cmd import spack.cmd
import spack.store
import spack.modules import spack.modules
import spack.spec
import spack.store
from spack.util.pattern import Args from spack.util.pattern import Args
__all__ = ['add_common_arguments'] __all__ = ['add_common_arguments']
_arguments = {} _arguments = {}
def add_common_arguments(parser, list_of_arguments): def add_common_arguments(parser, list_of_arguments):
"""Extend a parser with extra arguments
Args:
parser: parser to be extended
list_of_arguments: arguments to be added to the parser
"""
for argument in list_of_arguments: for argument in list_of_arguments:
if argument not in _arguments: if argument not in _arguments:
message = 'Trying to add non existing argument "{0}" to a command' message = 'Trying to add non existing argument "{0}" to a command'
@ -133,3 +141,7 @@ def __call__(self, parser, namespace, values, option_string=None):
_arguments['very_long'] = Args( _arguments['very_long'] = Args(
'-L', '--very-long', action='store_true', '-L', '--very-long', action='store_true',
help='show full dependency hashes as well as versions') help='show full dependency hashes as well as versions')
_arguments['tags'] = Args(
'-t', '--tags', action='append',
help='filter a package query by tags')

View file

@ -25,8 +25,8 @@
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
import spack
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
from spack.cmd import display_specs from spack.cmd import display_specs
description = "list and search installed packages" description = "list and search installed packages"
@ -54,7 +54,7 @@ def setup_parser(subparser):
const='deps', const='deps',
help='show full dependency DAG of installed packages') help='show full dependency DAG of installed packages')
arguments.add_common_arguments(subparser, ['long', 'very_long']) arguments.add_common_arguments(subparser, ['long', 'very_long', 'tags'])
subparser.add_argument('-f', '--show-flags', subparser.add_argument('-f', '--show-flags',
action='store_true', action='store_true',
@ -123,11 +123,16 @@ def find(parser, args):
# Exit early if no package matches the constraint # Exit early if no package matches the constraint
if not query_specs and args.constraint: if not query_specs and args.constraint:
msg = "No package matches the query: {0}".format( msg = "No package matches the query: {0}"
' '.join(args.constraint)) msg = msg.format(' '.join(args.constraint))
tty.msg(msg) tty.msg(msg)
return return
# If tags have been specified on the command line, filter by tags
if args.tags:
packages_with_tags = spack.repo.packages_with_tags(*args.tags)
query_specs = [x for x in query_specs if x.name in packages_with_tags]
# Display the result # Display the result
if sys.stdout.isatty(): if sys.stdout.isatty():
tty.msg("%d installed packages." % len(query_specs)) tty.msg("%d installed packages." % len(query_specs))

View file

@ -26,15 +26,15 @@
import textwrap import textwrap
from six.moves import zip_longest
from llnl.util.tty.colify import *
import llnl.util.tty.color as color import llnl.util.tty.color as color
import spack import spack
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
import spack.spec import spack.spec
from llnl.util.tty.colify import *
from six.moves import zip_longest
description = 'get detailed information on a particular package' description = 'get detailed information on a particular package'
section = 'basic' section = 'basic'
level = 'short' level = 'short'
@ -156,9 +156,9 @@ def print_text_info(pkg):
color.cprint('') color.cprint('')
color.cprint(section_title('Description:')) color.cprint(section_title('Description:'))
if pkg.__doc__: if pkg.__doc__:
print(pkg.format_doc(indent=4)) color.cprint(pkg.format_doc(indent=4))
else: else:
print(" None") color.cprint(" None")
color.cprint(section_title('Homepage: ') + pkg.homepage) color.cprint(section_title('Homepage: ') + pkg.homepage)
@ -167,6 +167,14 @@ def print_text_info(pkg):
color.cprint('') color.cprint('')
color.cprint(section_title('Maintainers: ') + mnt) color.cprint(section_title('Maintainers: ') + mnt)
color.cprint('')
color.cprint(section_title("Tags: "))
if hasattr(pkg, 'tags'):
tags = sorted(pkg.tags)
colify(tags, indent=4)
else:
color.cprint(" None")
color.cprint('') color.cprint('')
color.cprint(section_title('Preferred version: ')) color.cprint(section_title('Preferred version: '))
@ -220,7 +228,7 @@ def print_text_info(pkg):
if deps: if deps:
colify(deps, indent=4) colify(deps, indent=4)
else: else:
print(' None') color.cprint(' None')
color.cprint('') color.cprint('')
color.cprint(section_title('Virtual Packages: ')) color.cprint(section_title('Virtual Packages: '))
@ -238,7 +246,9 @@ def print_text_info(pkg):
print(line) print(line)
else: else:
print(" None") color.cprint(" None")
color.cprint('')
def info(parser, args): def info(parser, args):

View file

@ -29,12 +29,15 @@
import fnmatch import fnmatch
import re import re
import sys import sys
from six import StringIO from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
import spack
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack
import spack.cmd.common.arguments as arguments
description = "list and search available packages" description = "list and search available packages"
section = "basic" section = "basic"
level = "short" level = "short"
@ -60,6 +63,8 @@ def setup_parser(subparser):
'--format', default='name_only', choices=formatters, '--format', default='name_only', choices=formatters,
help='format to be used to print the output [default: name_only]') help='format to be used to print the output [default: name_only]')
arguments.add_common_arguments(subparser, ['tags'])
def filter_by_name(pkgs, args): def filter_by_name(pkgs, args):
""" """
@ -183,5 +188,12 @@ def list(parser, args):
pkgs = set(spack.repo.all_package_names()) pkgs = set(spack.repo.all_package_names())
# Filter the set appropriately # Filter the set appropriately
sorted_packages = filter_by_name(pkgs, args) sorted_packages = filter_by_name(pkgs, args)
# Filter by tags
if args.tags:
packages_with_tags = set(spack.repo.packages_with_tags(*args.tags))
sorted_packages = set(sorted_packages) & packages_with_tags
sorted_packages = sorted(sorted_packages)
# Print to stdout # Print to stdout
formatters[args.format](sorted_packages) formatters[args.format](sorted_packages)

View file

@ -35,7 +35,7 @@ class FileCache(object):
"""This class manages cached data in the filesystem. """This class manages cached data in the filesystem.
- Cache files are fetched and stored by unique keys. Keys can be relative - Cache files are fetched and stored by unique keys. Keys can be relative
paths, so that thre can be some hierarchy in the cache. paths, so that there can be some hierarchy in the cache.
- The FileCache handles locking cache files for reading and writing, so - The FileCache handles locking cache files for reading and writing, so
client code need not manage locks for cache entries. client code need not manage locks for cache entries.

View file

@ -22,6 +22,7 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import collections
import os import os
import stat import stat
import shutil import shutil
@ -31,11 +32,18 @@
import imp import imp
import re import re
import traceback import traceback
from bisect import bisect_left import json
try:
from collections.abc import Mapping
except ImportError:
from collections import Mapping
from types import ModuleType from types import ModuleType
import yaml import yaml
import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import * from llnl.util.filesystem import *
@ -93,6 +101,242 @@ def __getattr__(self, name):
return getattr(self, name) return getattr(self, name)
class FastPackageChecker(Mapping):
"""Cache that maps package names to the stats obtained on the
'package.py' files associated with them.
For each repository a cache is maintained at class level, and shared among
all instances referring to it. Update of the global cache is done lazily
during instance initialization.
"""
#: Global cache, reused by every instance
_paths_cache = {}
def __init__(self, packages_path):
#: The path of the repository managed by this instance
self.packages_path = packages_path
# If the cache we need is not there yet, then build it appropriately
if packages_path not in self._paths_cache:
self._paths_cache[packages_path] = self._create_new_cache()
#: Reference to the appropriate entry in the global cache
self._packages_to_stats = self._paths_cache[packages_path]
def _create_new_cache(self):
"""Create a new cache for packages in a repo.
The implementation here should try to minimize filesystem
calls. At the moment, it is O(number of packages) and makes
about one stat call per package. This is reasonably fast, and
avoids actually importing packages in Spack, which is slow.
"""
# Create a dictionary that will store the mapping between a
# package name and its stat info
cache = {}
for pkg_name in os.listdir(self.packages_path):
# Skip non-directories in the package root.
pkg_dir = join_path(self.packages_path, pkg_name)
# Warn about invalid names that look like packages.
if not valid_module_name(pkg_name):
msg = 'Skipping package at {0}. '
msg += '"{1]" is not a valid Spack module name.'
tty.warn(msg.format(pkg_dir, pkg_name))
continue
# Construct the file name from the directory
pkg_file = os.path.join(
self.packages_path, pkg_name, package_file_name
)
# Use stat here to avoid lots of calls to the filesystem.
try:
sinfo = os.stat(pkg_file)
except OSError as e:
if e.errno == errno.ENOENT:
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn("Can't read package file %s." % pkg_file)
continue
raise e
# If it's not a file, skip it.
if stat.S_ISDIR(sinfo.st_mode):
continue
# If it is a file, then save the stats under the
# appropriate key
cache[pkg_name] = sinfo
return cache
def __getitem__(self, item):
return self._packages_to_stats[item]
def __iter__(self):
return iter(self._packages_to_stats)
def __len__(self):
return len(self._packages_to_stats)
class TagIndex(Mapping):
"""Maps tags to list of packages."""
def __init__(self):
self._tag_dict = collections.defaultdict(list)
def to_json(self, stream):
json.dump({'tags': self._tag_dict}, stream)
@staticmethod
def from_json(stream):
d = json.load(stream)
r = TagIndex()
for tag, list in d['tags'].items():
r[tag].extend(list)
return r
def __getitem__(self, item):
return self._tag_dict[item]
def __iter__(self):
return iter(self._tag_dict)
def __len__(self):
return len(self._tag_dict)
def update_package(self, pkg_name):
"""Updates a package in the tag index.
Args:
pkg_name (str): name of the package to be removed from the index
"""
package = spack.repo.get(pkg_name)
# Remove the package from the list of packages, if present
for pkg_list in self._tag_dict.values():
if pkg_name in pkg_list:
pkg_list.remove(pkg_name)
# Add it again under the appropriate tags
for tag in getattr(package, 'tags', []):
self._tag_dict[tag].append(package.name)
@llnl.util.lang.memoized
def make_provider_index_cache(packages_path, namespace):
"""Lazily updates the provider index cache associated with a repository,
if need be, then returns it. Caches results for later look-ups.
Args:
packages_path: path of the repository
namespace: namespace of the repository
Returns:
instance of ProviderIndex
"""
# Map that goes from package names to stat info
fast_package_checker = FastPackageChecker(packages_path)
# Filename of the provider index cache
cache_filename = 'providers/{0}-index.yaml'.format(namespace)
# Compute which packages needs to be updated in the cache
index_mtime = spack.misc_cache.mtime(cache_filename)
needs_update = [
x for x, sinfo in fast_package_checker.items()
if sinfo.st_mtime > index_mtime
]
# Read the old ProviderIndex, or make a new one.
index_existed = spack.misc_cache.init_entry(cache_filename)
if index_existed and not needs_update:
# If the provider index exists and doesn't need an update
# just read from it
with spack.misc_cache.read_transaction(cache_filename) as f:
index = ProviderIndex.from_yaml(f)
else:
# Otherwise we need a write transaction to update it
with spack.misc_cache.write_transaction(cache_filename) as (old, new):
index = ProviderIndex.from_yaml(old) if old else ProviderIndex()
for pkg_name in needs_update:
namespaced_name = '{0}.{1}'.format(namespace, pkg_name)
index.remove_provider(namespaced_name)
index.update(namespaced_name)
index.to_yaml(new)
return index
@llnl.util.lang.memoized
def make_tag_index_cache(packages_path, namespace):
"""Lazily updates the tag index cache associated with a repository,
if need be, then returns it. Caches results for later look-ups.
Args:
packages_path: path of the repository
namespace: namespace of the repository
Returns:
instance of TagIndex
"""
# Map that goes from package names to stat info
fast_package_checker = FastPackageChecker(packages_path)
# Filename of the provider index cache
cache_filename = 'tags/{0}-index.json'.format(namespace)
# Compute which packages needs to be updated in the cache
index_mtime = spack.misc_cache.mtime(cache_filename)
needs_update = [
x for x, sinfo in fast_package_checker.items()
if sinfo.st_mtime > index_mtime
]
# Read the old ProviderIndex, or make a new one.
index_existed = spack.misc_cache.init_entry(cache_filename)
if index_existed and not needs_update:
# If the provider index exists and doesn't need an update
# just read from it
with spack.misc_cache.read_transaction(cache_filename) as f:
index = TagIndex.from_json(f)
else:
# Otherwise we need a write transaction to update it
with spack.misc_cache.write_transaction(cache_filename) as (old, new):
index = TagIndex.from_json(old) if old else TagIndex()
for pkg_name in needs_update:
namespaced_name = '{0}.{1}'.format(namespace, pkg_name)
index.update_package(namespaced_name)
index.to_json(new)
return index
class RepoPath(object): class RepoPath(object):
"""A RepoPath is a list of repos that function as one. """A RepoPath is a list of repos that function as one.
@ -220,6 +464,12 @@ def all_package_names(self):
self._all_package_names = sorted(all_pkgs, key=lambda n: n.lower()) self._all_package_names = sorted(all_pkgs, key=lambda n: n.lower())
return self._all_package_names return self._all_package_names
def packages_with_tags(self, *tags):
r = set()
for repo in self.repos:
r |= set(repo.packages_with_tags(*tags))
return sorted(r)
def all_packages(self): def all_packages(self):
for name in self.all_package_names(): for name in self.all_package_names():
yield self.get(name) yield self.get(name)
@ -422,21 +672,18 @@ def check(condition, msg):
self._classes = {} self._classes = {}
self._instances = {} self._instances = {}
# list of packages that are newer than the index. # Maps that goes from package name to corresponding file stat
self._needs_update = [] self._fast_package_checker = FastPackageChecker(self.packages_path)
# Index of virtual dependencies # Index of virtual dependencies, computed lazily
self._provider_index = None self._provider_index = None
# Cached list of package names. # Index of tags, computed lazily
self._all_package_names = None self._tag_index = None
# make sure the namespace for packages in this repo exists. # make sure the namespace for packages in this repo exists.
self._create_namespace() self._create_namespace()
# Unique filename for cache of virtual dependency providers
self._cache_file = 'providers/%s-index.yaml' % self.namespace
def _create_namespace(self): def _create_namespace(self):
"""Create this repo's namespace module and insert it into sys.modules. """Create this repo's namespace module and insert it into sys.modules.
@ -617,41 +864,28 @@ def purge(self):
"""Clear entire package instance cache.""" """Clear entire package instance cache."""
self._instances.clear() self._instances.clear()
def _update_provider_index(self):
# Check modification dates of all packages
self._fast_package_check()
def read():
with open(self.index_file) as f:
self._provider_index = ProviderIndex.from_yaml(f)
# Read the old ProviderIndex, or make a new one.
key = self._cache_file
index_existed = spack.misc_cache.init_entry(key)
if index_existed and not self._needs_update:
with spack.misc_cache.read_transaction(key) as f:
self._provider_index = ProviderIndex.from_yaml(f)
else:
with spack.misc_cache.write_transaction(key) as (old, new):
if old:
self._provider_index = ProviderIndex.from_yaml(old)
else:
self._provider_index = ProviderIndex()
for pkg_name in self._needs_update:
namespaced_name = '%s.%s' % (self.namespace, pkg_name)
self._provider_index.remove_provider(namespaced_name)
self._provider_index.update(namespaced_name)
self._provider_index.to_yaml(new)
@property @property
def provider_index(self): def provider_index(self):
"""A provider index with names *specific* to this repo.""" """A provider index with names *specific* to this repo."""
if self._provider_index is None: if self._provider_index is None:
self._update_provider_index() self._provider_index = make_provider_index_cache(
self.packages_path, self.namespace
)
return self._provider_index return self._provider_index
@property
def tag_index(self):
"""A provider index with names *specific* to this repo."""
if self._tag_index is None:
self._tag_index = make_tag_index_cache(
self.packages_path, self.namespace
)
return self._tag_index
@_autospec @_autospec
def providers_for(self, vpkg_spec): def providers_for(self, vpkg_spec):
providers = self.provider_index.providers_for(vpkg_spec) providers = self.provider_index.providers_for(vpkg_spec)
@ -689,73 +923,18 @@ def filename_for_package_name(self, spec):
pkg_dir = self.dirname_for_package_name(spec.name) pkg_dir = self.dirname_for_package_name(spec.name)
return join_path(pkg_dir, package_file_name) return join_path(pkg_dir, package_file_name)
def _fast_package_check(self):
"""List packages in the repo and check whether index is up to date.
Both of these opreations require checking all `package.py`
files so we do them at the same time. We list the repo
directory and look at package.py files, and we compare the
index modification date with the ost recently modified package
file, storing the result.
The implementation here should try to minimize filesystem
calls. At the moment, it is O(number of packages) and makes
about one stat call per package. This is resonably fast, and
avoids actually importing packages in Spack, which is slow.
"""
if self._all_package_names is None:
self._all_package_names = []
# Get index modification time.
index_mtime = spack.misc_cache.mtime(self._cache_file)
for pkg_name in os.listdir(self.packages_path):
# Skip non-directories in the package root.
pkg_dir = join_path(self.packages_path, pkg_name)
# Warn about invalid names that look like packages.
if not valid_module_name(pkg_name):
msg = ("Skipping package at %s. "
"'%s' is not a valid Spack module name.")
tty.warn(msg % (pkg_dir, pkg_name))
continue
# construct the file name from the directory
pkg_file = join_path(
self.packages_path, pkg_name, package_file_name)
# Use stat here to avoid lots of calls to the filesystem.
try:
sinfo = os.stat(pkg_file)
except OSError as e:
if e.errno == errno.ENOENT:
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn("Can't read package file %s." % pkg_file)
continue
raise e
# if it's not a file, skip it.
if stat.S_ISDIR(sinfo.st_mode):
continue
# All checks passed. Add it to the list.
self._all_package_names.append(pkg_name)
# record the package if it is newer than the index.
if sinfo.st_mtime > index_mtime:
self._needs_update.append(pkg_name)
self._all_package_names.sort()
return self._all_package_names
def all_package_names(self): def all_package_names(self):
"""Returns a sorted list of all package names in the Repo.""" """Returns a sorted list of all package names in the Repo."""
self._fast_package_check() return sorted(self._fast_package_checker.keys())
return self._all_package_names
def packages_with_tags(self, *tags):
v = set(self.all_package_names())
index = self.tag_index
for t in tags:
v &= set(index[t])
return sorted(v)
def all_packages(self): def all_packages(self):
"""Iterator over all packages in the repository. """Iterator over all packages in the repository.
@ -768,16 +947,7 @@ def all_packages(self):
def exists(self, pkg_name): def exists(self, pkg_name):
"""Whether a package with the supplied name exists.""" """Whether a package with the supplied name exists."""
if self._all_package_names: return pkg_name in self._fast_package_checker
# This does a binary search in the sorted list.
idx = bisect_left(self.all_package_names(), pkg_name)
return (idx < len(self._all_package_names) and
self._all_package_names[idx] == pkg_name)
# If we haven't generated the full package list, don't.
# Just check whether the file exists.
filename = self.filename_for_package_name(pkg_name)
return os.path.exists(filename)
def is_virtual(self, pkg_name): def is_virtual(self, pkg_name):
"""True if the package with this name is virtual, False otherwise.""" """True if the package with this name is virtual, False otherwise."""

View file

@ -22,12 +22,40 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse
import pytest
import spack.cmd.find import spack.cmd.find
from spack.util.pattern import Bunch from spack.util.pattern import Bunch
@pytest.fixture(scope='module')
def parser():
"""Returns the parser for the module command"""
prs = argparse.ArgumentParser()
spack.cmd.find.setup_parser(prs)
return prs
@pytest.fixture()
def specs():
s = []
return s
@pytest.fixture()
def mock_display(monkeypatch, specs):
"""Monkeypatches the display function to return its first argument"""
def display(x, *args, **kwargs):
specs.extend(x)
monkeypatch.setattr(spack.cmd.find, 'display_specs', display)
def test_query_arguments(): def test_query_arguments():
query_arguments = spack.cmd.find.query_arguments query_arguments = spack.cmd.find.query_arguments
# Default arguments # Default arguments
args = Bunch( args = Bunch(
only_missing=False, only_missing=False,
@ -36,6 +64,7 @@ def test_query_arguments():
explicit=False, explicit=False,
implicit=False implicit=False
) )
q_args = query_arguments(args) q_args = query_arguments(args)
assert 'installed' in q_args assert 'installed' in q_args
assert 'known' in q_args assert 'known' in q_args
@ -43,11 +72,39 @@ def test_query_arguments():
assert q_args['installed'] is True assert q_args['installed'] is True
assert q_args['known'] is any assert q_args['known'] is any
assert q_args['explicit'] is any assert q_args['explicit'] is any
# Check that explicit works correctly # Check that explicit works correctly
args.explicit = True args.explicit = True
q_args = query_arguments(args) q_args = query_arguments(args)
assert q_args['explicit'] is True assert q_args['explicit'] is True
args.explicit = False args.explicit = False
args.implicit = True args.implicit = True
q_args = query_arguments(args) q_args = query_arguments(args)
assert q_args['explicit'] is False assert q_args['explicit'] is False
@pytest.mark.usefixtures('database', 'mock_display')
class TestFindWithTags(object):
def test_tag1(self, parser, specs):
args = parser.parse_args(['--tags', 'tag1'])
spack.cmd.find.find(parser, args)
assert len(specs) == 2
assert 'mpich' in [x.name for x in specs]
assert 'mpich2' in [x.name for x in specs]
def test_tag2(self, parser, specs):
args = parser.parse_args(['--tags', 'tag2'])
spack.cmd.find.find(parser, args)
assert len(specs) == 1
assert 'mpich' in [x.name for x in specs]
def test_tag2_tag3(self, parser, specs):
args = parser.parse_args(['--tags', 'tag2', '--tags', 'tag3'])
spack.cmd.find.find(parser, args)
assert len(specs) == 0

View file

@ -22,13 +22,39 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse
import pytest import pytest
import spack.cmd.info
from spack.main import SpackCommand from spack.main import SpackCommand
info = SpackCommand('info') info = SpackCommand('info')
@pytest.fixture(scope='module')
def parser():
"""Returns the parser for the module command"""
prs = argparse.ArgumentParser()
spack.cmd.info.setup_parser(prs)
return prs
@pytest.fixture()
def info_lines():
lines = []
return lines
@pytest.fixture()
def mock_print(monkeypatch, info_lines):
def _print(*args):
info_lines.extend(args)
monkeypatch.setattr(spack.cmd.info.color, 'cprint', _print, raising=False)
@pytest.mark.parametrize('pkg', [ @pytest.mark.parametrize('pkg', [
'openmpi', 'openmpi',
'trilinos', 'trilinos',
@ -38,3 +64,29 @@
]) ])
def test_it_just_runs(pkg): def test_it_just_runs(pkg):
info(pkg) info(pkg)
@pytest.mark.parametrize('pkg_query', [
'hdf5',
'cloverleaf3d',
'trilinos'
])
@pytest.mark.usefixtures('mock_print')
def test_info_fields(pkg_query, parser, info_lines):
expected_fields = (
'Description:',
'Homepage:',
'Safe versions:',
'Variants:',
'Installation Phases:',
'Virtual Packages:',
'Tags:'
)
args = parser.parse_args([pkg_query])
spack.cmd.info.info(parser, args)
for text in expected_fields:
match = [x for x in info_lines if text in x]
assert match

View file

@ -0,0 +1,73 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import pytest
import spack.cmd.list
@pytest.fixture(scope='module')
def parser():
"""Returns the parser for the module command"""
prs = argparse.ArgumentParser()
spack.cmd.list.setup_parser(prs)
return prs
@pytest.fixture()
def pkg_names():
pkg_names = []
return pkg_names
@pytest.fixture()
def mock_name_only(monkeypatch, pkg_names):
def name_only(x):
pkg_names.extend(x)
monkeypatch.setattr(spack.cmd.list, 'name_only', name_only)
monkeypatch.setitem(spack.cmd.list.formatters, 'name_only', name_only)
@pytest.mark.usefixtures('mock_name_only')
class TestListCommand(object):
def test_list_without_filters(self, parser, pkg_names):
args = parser.parse_args([])
spack.cmd.list.list(parser, args)
assert pkg_names
assert 'cloverleaf3d' in pkg_names
assert 'hdf5' in pkg_names
def test_list_with_filters(self, parser, pkg_names):
args = parser.parse_args(['--tags', 'proxy-app'])
spack.cmd.list.list(parser, args)
assert pkg_names
assert 'cloverleaf3d' in pkg_names
assert 'hdf5' not in pkg_names

View file

@ -31,6 +31,8 @@ class Mpich(Package):
list_url = "http://www.mpich.org/static/downloads/" list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2 list_depth = 2
tags = ['tag1', 'tag2']
variant('debug', default=False, variant('debug', default=False,
description="Compile MPICH with debug flags.") description="Compile MPICH with debug flags.")

View file

@ -31,6 +31,8 @@ class Mpich2(Package):
list_url = "http://www.mpich.org/static/downloads/" list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2 list_depth = 2
tags = ['tag1', 'tag3']
version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
version('1.4', 'foobarbaz') version('1.4', 'foobarbaz')
version('1.3', 'foobarbaz') version('1.3', 'foobarbaz')

View file

@ -31,7 +31,6 @@ class Aspa(MakefilePackage):
heterogeneous MPMD materials science simulations will place important heterogeneous MPMD materials science simulations will place important
demands upon the exascale ecosystem that need to be identified and demands upon the exascale ecosystem that need to be identified and
quantified. quantified.
tags = proxy-app
""" """
tags = ['proxy-app'] tags = ['proxy-app']
homepage = "http://www.exmatex.org/aspa.html" homepage = "http://www.exmatex.org/aspa.html"