Merge pull request #1135 from epfl-scitas/features/module_refresh_reworking

spack module : reworking of the command
This commit is contained in:
Todd Gamblin 2016-07-19 09:27:58 -07:00 committed by GitHub
commit c4b4ce7d85
14 changed files with 624 additions and 276 deletions

View file

@ -27,16 +27,18 @@
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import attr_setdefault
import spack import spack
import spack.spec
import spack.config import spack.config
import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
# #
# Settings for commands that modify configuration # Settings for commands that modify configuration
# #
# Commands that modify confguration By default modify the *highest* priority scope. # Commands that modify confguration By default modify the *highest*
# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default. # Commands that list confguration list *all* scopes by default.
default_list_scope = None default_list_scope = None
@ -107,11 +109,11 @@ def parse_specs(args, **kwargs):
return specs return specs
except spack.parse.ParseError, e: except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^") tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1) sys.exit(1)
except spack.spec.SpecError, e: except spack.spec.SpecError as e:
tty.error(e.message) tty.error(e.message)
sys.exit(1) sys.exit(1)
@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6] [1, 2, 3, '...', 6]
""" """
if len(line_list) > max_num: if len(line_list) > max_num:
return line_list[:max_num-1] + ['...'] + line_list[-1:] return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else: else:
return line_list return line_list
@ -145,3 +147,97 @@ def disambiguate_spec(spec):
tty.die(*args) tty.die(*args)
return matching_specs[0] return matching_specs[0]
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
flags = kwargs.get('show_flags', False)
variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
ffmt = '$%+' if flags else ''
vfmt = '$+' if variants else ''
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501

View file

@ -0,0 +1,24 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################

View file

@ -0,0 +1,96 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import spack.modules
from spack.util.pattern import Bunch
__all__ = ['add_common_arguments']
_arguments = {}
def add_common_arguments(parser, list_of_arguments):
for argument in list_of_arguments:
if argument not in _arguments:
message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501
raise KeyError(message.format(argument))
x = _arguments[argument]
parser.add_argument(*x.flags, **x.kwargs)
class ConstraintAction(argparse.Action):
"""Constructs a list of specs based on a constraint given on the command line
An instance of this class is supposed to be used as an argument action
in a parser. It will read a constraint and will attach a list of matching
specs to the namespace
"""
qualifiers = {}
def __call__(self, parser, namespace, values, option_string=None):
# Query specs from command line
d = self.qualifiers.get(namespace.subparser_name, {})
specs = [s for s in spack.installed_db.query(**d)]
values = ' '.join(values)
if values:
specs = [x for x in specs if x.satisfies(values, strict=True)]
namespace.specs = specs
parms = Bunch(
flags=('constraint',),
kwargs={
'nargs': '*',
'help': 'Constraint to select a subset of installed packages',
'action': ConstraintAction
})
_arguments['constraint'] = parms
parms = Bunch(
flags=('-m', '--module-type'),
kwargs={
'help': 'Type of module files',
'default': 'tcl',
'choices': spack.modules.module_types
})
_arguments['module_type'] = parms
parms = Bunch(
flags=('-y', '--yes-to-all'),
kwargs={
'action': 'store_true',
'dest': 'yes_to_all',
'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
})
_arguments['yes_to_all'] = parms
parms = Bunch(
flags=('-r', '--dependencies'),
kwargs={
'action': 'store_true',
'dest': 'recurse_dependencies',
'help': 'Recursively traverse spec dependencies'
})
_arguments['recurse_dependencies'] = parms

View file

@ -31,7 +31,7 @@
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.tty.colify import * from llnl.util.tty.colify import *
from llnl.util.tty.color import * from llnl.util.tty.color import *
from llnl.util.lang import * from spack.cmd import display_specs
description = "Find installed spack packages" description = "Find installed spack packages"
@ -104,89 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results') help='optional specs to filter results')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
flags = kwargs.get('show_flags', False)
variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
ffmt = '$%+' if flags else ''
vfmt = '$+' if variants else ''
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501
def query_arguments(args): def query_arguments(args):
# Check arguments # Check arguments
if args.explicit and args.implicit: if args.explicit and args.implicit:

View file

@ -23,135 +23,233 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function from __future__ import print_function
import collections
import os import os
import shutil import shutil
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.cmd import spack.cmd
from llnl.util.filesystem import mkdirp import spack.cmd.common.arguments as arguments
import llnl.util.filesystem as filesystem
from spack.modules import module_types from spack.modules import module_types
from spack.util.string import *
description = "Manipulate modules and dotkits." description = "Manipulate module files"
# Dictionary that will be populated with the list of sub-commands
# Each sub-command must be callable and accept 3 arguments :
# - mtype : the type of the module file
# - specs : the list of specs to be processed
# - args : namespace containing the parsed command line arguments
callbacks = {}
def subcommand(subparser_name):
"""Registers a function in the callbacks dictionary"""
def decorator(callback):
callbacks[subparser_name] = callback
return callback
return decorator
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
sp.add_parser('refresh', help='Regenerate all module files.') # spack module refresh
refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
refresh_parser.add_argument(
'--delete-tree',
help='Delete the module file tree before refresh',
action='store_true'
)
arguments.add_common_arguments(
refresh_parser, ['constraint', 'module_type', 'yes_to_all']
)
find_parser = sp.add_parser('find', help='Find module files for packages.') # spack module find
find_parser = sp.add_parser('find', help='Find module files for packages')
arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
find_parser.add_argument( # spack module rm
'module_type', rm_parser = sp.add_parser('rm', help='Remove module files')
help="Type of module to find file for. [" + arguments.add_common_arguments(
'|'.join(module_types) + "]") rm_parser, ['constraint', 'module_type', 'yes_to_all']
)
find_parser.add_argument( # spack module loads
'-r', '--dependencies', action='store_true', loads_parser = sp.add_parser(
dest='recurse_dependencies', 'loads',
help='Recursively traverse dependencies for modules to load.') help='Prompt the list of modules associated with a constraint'
)
find_parser.add_argument( loads_parser.add_argument(
'-s', '--shell', action='store_true', dest='shell', '--input-only', action='store_false', dest='shell',
help='Generate shell script (instead of input for module command)') help='Generate input for module command (instead of a shell script)'
)
find_parser.add_argument( loads_parser.add_argument(
'-p', '--prefix', dest='prefix', '-p', '--prefix', dest='prefix', default='',
help='Prepend to module names when issuing module load commands') help='Prepend to module names when issuing module load commands'
)
find_parser.add_argument( arguments.add_common_arguments(
'spec', nargs='+', loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
help='spec to find a module file for.') )
def module_find(mtype, flags, spec_array): class MultipleMatches(Exception):
"""Look at all installed packages and see if the spec provided pass
class NoMatch(Exception):
pass
@subcommand('loads')
def loads(mtype, specs, args):
"""Prompt the list of modules associated with a list of specs"""
# Get a comprehensive list of specs
if args.recurse_dependencies:
specs_from_user_constraint = specs[:]
specs = []
# FIXME : during module file creation nodes seem to be visited
# FIXME : multiple times even if cover='nodes' is given. This
# FIXME : work around permits to get a unique list of spec anyhow.
# FIXME : (same problem as in spack/modules.py)
seen = set()
seen_add = seen.add
for spec in specs_from_user_constraint:
specs.extend(
[item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501
)
module_cls = module_types[mtype]
modules = [(spec, module_cls(spec).use_name)
for spec in specs if os.path.exists(module_cls(spec).file_name)]
module_commands = {
'tcl': 'module load ',
'dotkit': 'dotkit use '
}
d = {
'command': '' if not args.shell else module_commands[mtype],
'prefix': args.prefix
}
prompt_template = '{comment}{command}{prefix}{name}'
for spec, mod in modules:
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
print(prompt_template.format(**d))
@subcommand('find')
def find(mtype, specs, args):
"""
Look at all installed packages and see if the spec provided
matches any. If it does, check whether there is a module file matches any. If it does, check whether there is a module file
of type <mtype> there, and print out the name that the user of type <mtype> there, and print out the name that the user
should type to use that package's module. should type to use that package's module.
prefix:
Prepend this to module names when issuing "module load" commands.
Some systems seem to need it.
""" """
if mtype not in module_types: if len(specs) == 0:
tty.die("Invalid module type: '%s'. Options are %s" % raise NoMatch()
(mtype, comma_or(module_types)))
# -------------------------------------- if len(specs) > 1:
def _find_modules(spec, modules_list): raise MultipleMatches()
"""Finds all modules and sub-modules for a spec"""
if str(spec.version) == 'system':
# No Spack module for system-installed packages
return
if flags.recurse_dependencies:
for dep in spec.dependencies():
_find_modules(dep, modules_list)
spec = specs.pop()
mod = module_types[mtype](spec) mod = module_types[mtype](spec)
if not os.path.isfile(mod.file_name): if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec)) tty.die("No %s module is installed for %s" % (mtype, spec))
modules_list.append((spec, mod))
# --------------------------------------
raw_specs = spack.cmd.parse_specs(spec_array)
modules = set() # Modules we will load
seen = set()
for raw_spec in raw_specs:
# ----------- Make sure the spec only resolves to ONE thing
specs = spack.installed_db.query(raw_spec)
if len(specs) == 0:
tty.die("No installed packages match spec %s" % raw_spec)
if len(specs) > 1:
tty.error("Multiple matches for spec %s. Choose one:" % raw_spec)
for s in specs:
sys.stderr.write(s.tree(color=True))
sys.exit(1)
spec = specs[0]
# ----------- Chase down modules for it and all its dependencies
modules_dups = list()
_find_modules(spec, modules_dups)
# Remove duplicates while keeping order
modules_unique = list()
for spec,mod in modules_dups:
if mod.use_name not in seen:
modules_unique.append((spec,mod))
seen.add(mod.use_name)
# Output...
if flags.shell:
module_cmd = {'tcl': 'module load', 'dotkit': 'dotkit use'}[mtype]
for spec,mod in modules_unique:
if flags.shell:
print('# %s' % spec.format())
print('%s %s%s' % (module_cmd, flags.prefix, mod.use_name))
else:
print(mod.use_name) print(mod.use_name)
def module_refresh():
"""Regenerate all module files for installed packages known to
spack (some packages may no longer exist)."""
specs = [s for s in spack.installed_db.query(installed=True, known=True)]
for name, cls in module_types.items(): @subcommand('rm')
tty.msg("Regenerating %s module files." % name) def rm(mtype, specs, args):
if os.path.isdir(cls.path): """Deletes module files associated with items in specs"""
module_cls = module_types[mtype]
specs_with_modules = [
spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
modules = [module_cls(spec) for spec in specs_with_modules]
if not modules:
tty.msg('No module file matches your query')
raise SystemExit(1)
# Ask for confirmation
if not args.yes_to_all:
tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501
spack.cmd.display_specs(specs_with_modules, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Remove the module files
for s in modules:
s.remove()
@subcommand('refresh')
def refresh(mtype, specs, args):
"""Regenerate module files for item in specs"""
# Prompt a message to the user about what is going to change
if not specs:
tty.msg('No package matches your query')
return
if not args.yes_to_all:
tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501
spack.cmd.display_specs(specs, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
cls = module_types[mtype]
# Detect name clashes
writers = [cls(spec) for spec in specs]
file2writer = collections.defaultdict(list)
for item in writers:
file2writer[item.file_name].append(item)
if len(file2writer) != len(writers):
message = 'Name clashes detected in module files:\n'
for filename, writer_list in file2writer.items():
if len(writer_list) > 1:
message += '\nfile : {0}\n'.format(filename)
for x in writer_list:
message += 'spec : {0}\n'.format(x.spec.format(color=True))
tty.error(message)
tty.error('Operation aborted')
raise SystemExit(1)
# Proceed regenerating module files
tty.msg('Regenerating {name} module files'.format(name=mtype))
if os.path.isdir(cls.path) and args.delete_tree:
shutil.rmtree(cls.path, ignore_errors=False) shutil.rmtree(cls.path, ignore_errors=False)
mkdirp(cls.path) filesystem.mkdirp(cls.path)
for spec in specs: for x in writers:
cls(spec).write() x.write(overwrite=True)
def module(parser, args): def module(parser, args):
if args.module_command == 'refresh': # Qualifiers to be used when querying the db for specs
module_refresh() constraint_qualifiers = {
'refresh': {
'installed': True,
'known': True
},
}
arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
elif args.module_command == 'find': module_type = args.module_type
module_find(args.module_type, args, args.spec) constraint = args.constraint
try:
callbacks[args.subparser_name](module_type, args.specs, args)
except MultipleMatches:
message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501
tty.error(message.format(query=constraint))
for s in args.specs:
sys.stderr.write(s.format(color=True) + '\n')
raise SystemExit(1)
except NoMatch:
message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501
tty.die(message.format(query=constraint))

View file

@ -30,7 +30,6 @@
import spack import spack
import spack.cmd import spack.cmd
import spack.repository import spack.repository
from spack.cmd.find import display_specs
description = "Remove an installed package" description = "Remove an installed package"
@ -43,21 +42,10 @@
display_args = { display_args = {
'long': True, 'long': True,
'show_flags': True, 'show_flags': True,
'variants':True 'variants': True
} }
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', dest='force', '-f', '--force', action='store_true', dest='force',
@ -65,32 +53,37 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-a', '--all', action='store_true', dest='all', '-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " + help="USE CAREFULLY. Remove ALL installed packages that match each " +
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " + "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501
"libelf are uninstalled. This is both useful and dangerous, like rm -r.") "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501
subparser.add_argument( subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents', '-d', '--dependents', action='store_true', dest='dependents',
help='Also uninstall any packages that depend on the ones given via command line.' help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501
) )
subparser.add_argument( subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all', '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
help='Assume "yes" is the answer to every confirmation asked to the user.' help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
) )
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall"
)
def concretize_specs(specs, allow_multiple_matches=False, force=False): def concretize_specs(specs, allow_multiple_matches=False, force=False):
""" """Returns a list of specs matching the non necessarily
Returns a list of specs matching the non necessarily concretized specs given from cli concretized specs given from cli
Args: Args:
specs: list of specs to be matched against installed packages specs: list of specs to be matched against installed packages
allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted) allow_multiple_matches : if True multiple matches are admitted
Return: Return:
list of specs list of specs
""" """
specs_from_cli = [] # List of specs that match expressions given via command line # List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False has_errors = False
for spec in specs: for spec in specs:
matching = spack.installed_db.query(spec) matching = spack.installed_db.query(spec)
@ -99,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1: if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec) tty.error("%s matches multiple packages:" % spec)
print() print()
display_specs(matching, **display_args) spack.cmd.display_specs(matching, **display_args)
print() print()
has_errors = True has_errors = True
@ -116,8 +109,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs): def installed_dependents(specs):
""" """Returns a dictionary that maps a spec with a list of its
Returns a dictionary that maps a spec with a list of its installed dependents installed dependents
Args: Args:
specs: list of specs to be checked for dependents specs: list of specs to be checked for dependents
@ -147,7 +140,7 @@ def do_uninstall(specs, force):
try: try:
# should work if package is known to spack # should work if package is known to spack
packages.append(item.package) packages.append(item.package)
except spack.repository.UnknownPackageError as e: except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still # The package.py file has gone away -- but still
# want to uninstall. # want to uninstall.
spack.Package(item).do_uninstall(force=True) spack.Package(item).do_uninstall(force=True)
@ -169,17 +162,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction(): with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages) specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli # Gets the list of installed specs that match the ones give via cli
uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli # takes care of '-a' is given in the cli
dependent_list = installed_dependents(uninstall_list) # takes care of '-d' uninstall_list = concretize_specs(specs, args.all, args.force)
dependent_list = installed_dependents(
uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list # Process dependent_list and update uninstall_list
has_error = False has_error = False
if dependent_list and not args.dependents and not args.force: if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items(): for spec, lst in dependent_list.items():
tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True)) tty.error("Will not uninstall %s" %
spec.format("$_$@$%@$#", color=True))
print('') print('')
print("The following packages depend on it:") print("The following packages depend on it:")
display_specs(lst, **display_args) spack.cmd.display_specs(lst, **display_args)
print('') print('')
has_error = True has_error = True
elif args.dependents: elif args.dependents:
@ -188,14 +184,14 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list)) uninstall_list = list(set(uninstall_list))
if has_error: if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
if not args.yes_to_all: if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ") tty.msg("The following packages will be uninstalled : ")
print('') print('')
display_specs(uninstall_list, **display_args) spack.cmd.display_specs(uninstall_list, **display_args)
print('') print('')
ask_for_confirmation('Do you want to proceed ? ') spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list # Uninstall everything on the list
do_uninstall(uninstall_list, args.force) do_uninstall(uninstall_list, args.force)

View file

@ -328,6 +328,11 @@
'anyOf': [ 'anyOf': [
{ {
'properties': { 'properties': {
'hash_length': {
'type': 'integer',
'minimum': 0,
'default': 7
},
'whitelist': {'$ref': '#/definitions/array_of_strings'}, 'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'}, 'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': { 'naming_scheme': {

View file

@ -188,6 +188,8 @@ def parse_config_options(module_generator):
##### #####
# Automatic loading loads # Automatic loading loads
module_file_actions['hash_length'] = module_configuration.get(
'hash_length', 7)
module_file_actions['autoload'] = dependencies( module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none')) module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites # Prerequisites
@ -237,6 +239,7 @@ class EnvModule(object):
formats = {} formats = {}
class __metaclass__(type): class __metaclass__(type):
def __init__(cls, name, bases, dict): def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict) type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION[ if cls.name != 'env_module' and cls.name in CONFIGURATION[
@ -295,7 +298,9 @@ def use_name(self):
if constraint in self.spec: if constraint in self.spec:
suffixes.append(suffix) suffixes.append(suffix)
# Always append the hash to make the module file unique # Always append the hash to make the module file unique
suffixes.append(self.spec.dag_hash()) hash_length = configuration.pop('hash_length', 7)
if hash_length != 0:
suffixes.append(self.spec.dag_hash(length=hash_length))
name = '-'.join(suffixes) name = '-'.join(suffixes)
return name return name
@ -338,7 +343,7 @@ def blacklisted(self):
return False return False
def write(self): def write(self, overwrite=False):
""" """
Writes out a module file for this object. Writes out a module file for this object.
@ -399,6 +404,15 @@ def write(self):
for line in self.module_specific_content(module_configuration): for line in self.module_specific_content(module_configuration):
module_file_content += line module_file_content += line
# Print a warning in case I am accidentally overwriting
# a module file that is already there (name clash)
if not overwrite and os.path.exists(self.file_name):
message = 'Module file already exists : skipping creation\n'
message += 'file : {0.file_name}\n'
message += 'spec : {0.spec}'
tty.warn(message.format(self))
return
# Dump to file # Dump to file
with open(self.file_name, 'w') as f: with open(self.file_name, 'w') as f:
f.write(module_file_content) f.write(module_file_content)
@ -454,7 +468,7 @@ def remove(self):
class Dotkit(EnvModule): class Dotkit(EnvModule):
name = 'dotkit' name = 'dotkit'
path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n', PrependPath: 'dk_alter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n' SetEnv: 'dk_setenv {name} {value}\n'
@ -466,7 +480,7 @@ class Dotkit(EnvModule):
@property @property
def file_name(self): def file_name(self):
return join_path(spack.share_path, "dotkit", self.spec.architecture, return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name) '%s.dk' % self.use_name)
@property @property
@ -494,7 +508,7 @@ def prerequisite(self, spec):
class TclModule(EnvModule): class TclModule(EnvModule):
name = 'tcl' name = 'tcl'
path = join_path(spack.share_path, "modules")
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n', AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
@ -514,7 +528,7 @@ class TclModule(EnvModule):
@property @property
def file_name(self): def file_name(self):
return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name) return join_path(self.path, self.spec.architecture, self.use_name)
@property @property
def header(self): def header(self):

View file

@ -40,7 +40,8 @@
'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml', 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml',
'optional_deps', 'make_executable', 'build_system_guess', 'lock', 'optional_deps', 'make_executable', 'build_system_guess', 'lock',
'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd' 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd',
'cmd.module'
] ]

View file

@ -27,11 +27,7 @@
import spack.cmd.find import spack.cmd.find
import unittest import unittest
from spack.util.pattern import Bunch
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class FindTest(unittest.TestCase): class FindTest(unittest.TestCase):

View file

@ -0,0 +1,83 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import os.path
import spack.cmd.module as module
import spack.modules as modules
import spack.test.mock_database
class TestModule(spack.test.mock_database.MockDatabase):
def _get_module_files(self, args):
return [
modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501
]
def test_module_common_operations(self):
parser = argparse.ArgumentParser()
module.setup_parser(parser)
# Try to remove a non existing module [tcl]
args = parser.parse_args(['rm', 'doesnotexist'])
self.assertRaises(SystemExit, module.module, parser, args)
# Remove existing modules [tcl]
args = parser.parse_args(['rm', '-y', 'mpileaks'])
module_files = self._get_module_files(args)
for item in module_files:
self.assertTrue(os.path.exists(item))
module.module(parser, args)
for item in module_files:
self.assertFalse(os.path.exists(item))
# Add them back [tcl]
args = parser.parse_args(['refresh', '-y', 'mpileaks'])
module.module(parser, args)
for item in module_files:
self.assertTrue(os.path.exists(item))
# TODO : test the --delete-tree option
# TODO : this requires having a separate directory for test modules
# Try to find a module with multiple matches
args = parser.parse_args(['find', 'mpileaks'])
self.assertRaises(SystemExit, module.module, parser, args)
# Try to find a module with no matches
args = parser.parse_args(['find', 'doesnotexist'])
self.assertRaises(SystemExit, module.module, parser, args)
# Try to find a module
args = parser.parse_args(['find', 'libelf'])
module.module(parser, args)
# Remove existing modules [dotkit]
args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
module_files = self._get_module_files(args)
for item in module_files:
self.assertTrue(os.path.exists(item))
module.module(parser, args)
for item in module_files:
self.assertFalse(os.path.exists(item))
# Add them back [dotkit]
args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
module.module(parser, args)
for item in module_files:
self.assertTrue(os.path.exists(item))
# TODO : add tests for loads and find to check the prompt format

View file

@ -28,42 +28,50 @@
def composite(interface=None, method_list=None, container=list): def composite(interface=None, method_list=None, container=list):
""" """Returns a class decorator that patches a class adding all the methods
Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given it needs to be a composite for a given interface.
interface.
:param interface: class exposing the interface to which the composite object must conform. Only non-private and :param interface: class exposing the interface to which the composite
non-special methods will be taken into account object must conform. Only non-private and non-special methods will be
taken into account
:param method_list: names of methods that should be part of the composite :param method_list: names of methods that should be part of the composite
:param container: container for the composite object (default = list). Must fulfill the MutableSequence contract. :param container: container for the composite object (default = list).
The composite class will expose the container API to manage object composition Must fulfill the MutableSequence contract. The composite class will expose
the container API to manage object composition
:return: class decorator :return: class decorator
""" """
# Check if container fulfills the MutableSequence contract and raise an exception if it doesn't # Check if container fulfills the MutableSequence contract and raise an
# The patched class returned by the decorator will inherit from the container class to expose the # exception if it doesn't. The patched class returned by the decorator will
# interface needed to manage objects composition # inherit from the container class to expose the interface needed to manage
# objects composition
if not issubclass(container, collections.MutableSequence): if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract") raise TypeError("Container must fulfill the MutableSequence contract")
# Check if at least one of the 'interface' or the 'method_list' arguments are defined # Check if at least one of the 'interface' or the 'method_list' arguments
# are defined
if interface is None and method_list is None: if interface is None and method_list is None:
raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501
def cls_decorator(cls): def cls_decorator(cls):
# Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden # Retrieve the base class of the composite. Inspect its methods and
# decide which ones will be overridden
def no_special_no_private(x): def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_') return inspect.ismethod(x) and not x.__name__.startswith('_')
# Patch the behavior of each of the methods in the previous list. This is done associating an instance of the # Patch the behavior of each of the methods in the previous list.
# descriptor below to any method that needs to be patched. # This is done associating an instance of the descriptor below to
# any method that needs to be patched.
class IterateOver(object): class IterateOver(object):
"""Decorator used to patch methods in a composite.
It iterates over all the items in the instance containing the
associated attribute and calls for each of them an attribute
with the same name
""" """
Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
associated attribute and calls for each of them an attribute with the same name
"""
def __init__(self, name, func=None): def __init__(self, name, func=None):
self.name = name self.name = name
self.func = func self.func = func
@ -72,8 +80,9 @@ def __get__(self, instance, owner):
def getter(*args, **kwargs): def getter(*args, **kwargs):
for item in instance: for item in instance:
getattr(item, self.name)(*args, **kwargs) getattr(item, self.name)(*args, **kwargs)
# If we are using this descriptor to wrap a method from an interface, then we must conditionally # If we are using this descriptor to wrap a method from an
# use the `functools.wraps` decorator to set the appropriate fields. # interface, then we must conditionally use the
# `functools.wraps` decorator to set the appropriate fields
if self.func is not None: if self.func is not None:
getter = functools.wraps(self.func)(getter) getter = functools.wraps(self.func)(getter)
return getter return getter
@ -81,7 +90,8 @@ def getter(*args, **kwargs):
dictionary_for_type_call = {} dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name # Construct a dictionary with the methods explicitly passed as name
if method_list is not None: if method_list is not None:
# python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list} # python@2.7: method_list_dict = {name: IterateOver(name) for name
# in method_list}
method_list_dict = {} method_list_dict = {}
for name in method_list: for name in method_list:
method_list_dict[name] = IterateOver(name) method_list_dict[name] = IterateOver(name)
@ -89,28 +99,40 @@ def getter(*args, **kwargs):
# Construct a dictionary with the methods inspected from the interface # Construct a dictionary with the methods inspected from the interface
if interface is not None: if interface is not None:
########## ##########
# python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} # python@2.7: interface_methods = {name: method for name, method in
# inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {} interface_methods = {}
for name, method in inspect.getmembers(interface, predicate=no_special_no_private): for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501
interface_methods[name] = method interface_methods[name] = method
########## ##########
# python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} # python@2.7: interface_methods_dict = {name: IterateOver(name,
# method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {} interface_methods_dict = {}
for name, method in interface_methods.iteritems(): for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method) interface_methods_dict[name] = IterateOver(name, method)
########## ##########
dictionary_for_type_call.update(interface_methods_dict) dictionary_for_type_call.update(interface_methods_dict)
# Get the methods that are defined in the scope of the composite class and override any previous definition # Get the methods that are defined in the scope of the composite
# class and override any previous definition
########## ##########
# python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} # python@2.7: cls_method = {name: method for name, method in
# inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {} cls_method = {}
for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501
cls_method[name] = method cls_method[name] = method
########## ##########
dictionary_for_type_call.update(cls_method) dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it # Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes? # FIXME : inherit from interface if we start to use ABC classes?
wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) wrapper_class = type(cls.__name__, (cls, container),
dictionary_for_type_call)
return wrapper_class return wrapper_class
return cls_decorator return cls_decorator
class Bunch(object):
"""Carries a bunch of named attributes (from Alex Martelli bunch)"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)

View file

@ -74,25 +74,25 @@ case unload:
# tool's commands to add/remove the result from the environment. # tool's commands to add/remove the result from the environment.
switch ($_sp_subcommand) switch ($_sp_subcommand)
case "use": case "use":
set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
use $_sp_module_args $_sp_full_spec use $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "unuse": case "unuse":
set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
unuse $_sp_module_args $_sp_full_spec unuse $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "load": case "load":
set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
module load $_sp_module_args $_sp_full_spec module load $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "unload": case "unload":
set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
module unload $_sp_module_args $_sp_full_spec module unload $_sp_module_args $_sp_full_spec
endif endif

View file

@ -117,19 +117,19 @@ function spack {
# If spack module command comes back with an error, do nothing. # If spack module command comes back with an error, do nothing.
case $_sp_subcommand in case $_sp_subcommand in
"use") "use")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then
use $_sp_module_args $_sp_full_spec use $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"unuse") "unuse")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then
unuse $_sp_module_args $_sp_full_spec unuse $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"load") "load")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then
module load $_sp_module_args $_sp_full_spec module load $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"unload") "unload")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then
module unload $_sp_module_args $_sp_full_spec module unload $_sp_module_args $_sp_full_spec
fi ;; fi ;;
esac esac