Convert Python 2 idioms to Python 2/3-compatible ones.

- convert print, StringIO, except as, octals, izip
- convert print statement to print function
- convert StringIO to six.StringIO
  - remove usage of csv reader in Spec, in favor of simple regex
  - csv reader only does byte strings
- convert 0755 octal literals to 0o755
- convert `except Foo, e` to `except Foo as e`
- fix a few places `str` is used.
  - may need to switch everything to str later.
- convert iteritems usages to use six.iteritems
- fix urllib and HTMLParser
- port metaclasses to use six.with_metaclass
- More octal literal conversions for Python 2/3
- Fix a new octal literal.
- Convert `basestring` to `six.string_types`
- Convert xrange -> range
- Fix various issues with encoding, iteritems, and Python3 semantics.
- Convert contextlib.nested to explicitly nexted context managers.
- Convert use of filter() to list comprehensions.
- Replace reduce() with list comprehensions.
-  Clean up composite: replace inspect.ismethod() with callable()
- Python 3 doesn't have "method" objects; inspect.ismethod returns False.
- Need to use callable in Composite to make it work.
- Update colify to use future division.
- Fix zip() usages that need to be lists.
- Python3: Use line-buffered logging instead of unbuffered.
- Python3 raises an error with unbuffered I/O
  - See https://bugs.python.org/issue17404
This commit is contained in:
Todd Gamblin 2017-03-07 14:25:48 -08:00
parent 0331b08c64
commit 1d1a14dbe9
74 changed files with 396 additions and 323 deletions

View file

@ -24,6 +24,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import sys import sys
if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)): if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
v_info = sys.version_info[:3] v_info = sys.version_info[:3]
@ -74,7 +76,7 @@ for pyc_file in orphaned_pyc_files:
try: try:
os.remove(pyc_file) os.remove(pyc_file)
except OSError as e: except OSError as e:
print ("WARNING: Spack may fail mysteriously. " print("WARNING: Spack may fail mysteriously. "
"Couldn't remove orphaned .pyc file: %s" % pyc_file) "Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix. # If there is no working directory, use the spack prefix.

View file

@ -175,9 +175,9 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
def set_install_permissions(path): def set_install_permissions(path):
"""Set appropriate permissions on the installed file.""" """Set appropriate permissions on the installed file."""
if os.path.isdir(path): if os.path.isdir(path):
os.chmod(path, 0755) os.chmod(path, 0o755)
else: else:
os.chmod(path, 0644) os.chmod(path, 0o644)
def copy_mode(src, dest): def copy_mode(src, dest):

View file

@ -27,6 +27,7 @@
import functools import functools
import collections import collections
import inspect import inspect
from six import string_types
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = [r'^\.#', '~$'] ignore_modules = [r'^\.#', '~$']
@ -80,7 +81,7 @@ def index_by(objects, *funcs):
return objects return objects
f = funcs[0] f = funcs[0]
if isinstance(f, basestring): if isinstance(f, str):
f = lambda x: getattr(x, funcs[0]) f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple): elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p) for p in funcs[0]) f = lambda x: tuple(getattr(x, p) for p in funcs[0])
@ -326,7 +327,7 @@ def match_predicate(*args):
""" """
def match(string): def match(string):
for arg in args: for arg in args:
if isinstance(arg, basestring): if isinstance(arg, string_types):
if re.search(arg, string): if re.search(arg, string):
return True return True
elif isinstance(arg, list) or isinstance(arg, tuple): elif isinstance(arg, list) or isinstance(arg, tuple):

View file

@ -29,7 +29,7 @@
import termios import termios
import struct import struct
import traceback import traceback
from StringIO import StringIO from six import StringIO
from llnl.util.tty.color import * from llnl.util.tty.color import *
@ -93,7 +93,7 @@ def msg(message, *args, **kwargs):
else: else:
cwrite("@*b{%s==>} %s" % (st_text, cescape(message))) cwrite("@*b{%s==>} %s" % (st_text, cescape(message)))
for arg in args: for arg in args:
print indent + str(arg) print(indent + str(arg))
def info(message, *args, **kwargs): def info(message, *args, **kwargs):
@ -201,7 +201,7 @@ def get_yes_or_no(prompt, **kwargs):
if not ans: if not ans:
result = default_value result = default_value
if result is None: if result is None:
print "Please enter yes or no." print("Please enter yes or no.")
else: else:
if ans == 'y' or ans == 'yes': if ans == 'y' or ans == 'yes':
result = True result = True
@ -239,7 +239,7 @@ def hline(label=None, **kwargs):
out.write(label) out.write(label)
out.write(suffix) out.write(suffix)
print out.getvalue() print(out.getvalue())
def terminal_size(): def terminal_size():

View file

@ -25,9 +25,11 @@
""" """
Routines for printing columnar output. See colify() for more information. Routines for printing columnar output. See colify() for more information.
""" """
from __future__ import division
import os import os
import sys import sys
from StringIO import StringIO from six import StringIO
from llnl.util.tty import terminal_size from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra from llnl.util.tty.color import clen, cextra
@ -64,18 +66,18 @@ def config_variable_cols(elts, console_width, padding, cols=0):
# Get a bound on the most columns we could possibly have. # Get a bound on the most columns we could possibly have.
# 'clen' ignores length of ansi color sequences. # 'clen' ignores length of ansi color sequences.
lengths = [clen(e) for e in elts] lengths = [clen(e) for e in elts]
max_cols = max(1, console_width / (min(lengths) + padding)) max_cols = max(1, console_width // (min(lengths) + padding))
max_cols = min(len(elts), max_cols) max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value. # Range of column counts to try. If forced, use the supplied value.
col_range = [cols] if cols else xrange(1, max_cols + 1) col_range = [cols] if cols else range(1, max_cols + 1)
# Determine the most columns possible for the console width. # Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range] configs = [ColumnConfig(c) for c in col_range]
for i, length in enumerate(lengths): for i, length in enumerate(lengths):
for conf in configs: for conf in configs:
if conf.valid: if conf.valid:
col = i / ((len(elts) + conf.cols - 1) / conf.cols) col = i // ((len(elts) + conf.cols - 1) // conf.cols)
p = padding if col < (conf.cols - 1) else 0 p = padding if col < (conf.cols - 1) else 0
if conf.widths[col] < (length + p): if conf.widths[col] < (length + p):
@ -107,7 +109,7 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences. # 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding max_len = max(clen(e) for e in elts) + padding
if cols == 0: if cols == 0:
cols = max(1, console_width / max_len) cols = max(1, console_width // max_len)
cols = min(len(elts), cols) cols = min(len(elts), cols)
config = ColumnConfig(cols) config = ColumnConfig(cols)
@ -193,12 +195,12 @@ def colify(elts, **options):
raise ValueError("method must be one of: " + allowed_methods) raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols cols = config.cols
rows = (len(elts) + cols - 1) / cols rows = (len(elts) + cols - 1) // cols
rows_last_col = len(elts) % rows rows_last_col = len(elts) % rows
for row in xrange(rows): for row in range(rows):
output.write(" " * indent) output.write(" " * indent)
for col in xrange(cols): for col in range(cols):
elt = col * rows + row elt = col * rows + row
width = config.widths[col] + cextra(elts[elt]) width = config.widths[col] + cextra(elts[elt])
if col < cols - 1: if col < cols - 1:
@ -233,7 +235,7 @@ def colify_table(table, **options):
columns = len(table[0]) columns = len(table[0])
def transpose(): def transpose():
for i in xrange(columns): for i in range(columns):
for row in table: for row in table:
yield row[i] yield row[i]

View file

@ -165,8 +165,12 @@ def __exit__(self, exc_type, exc_val, exc_tb):
self.p.join(60.0) # 1 minute to join the child self.p.join(60.0) # 1 minute to join the child
def _spawn_writing_daemon(self, read, input_stream): def _spawn_writing_daemon(self, read, input_stream):
# Parent: read from child, skip the with block. # This is the Parent: read from child, skip the with block.
read_file = os.fdopen(read, 'r', 0)
# Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
read_file = os.fdopen(read, 'r', 1)
with open(self.filename, 'w') as log_file: with open(self.filename, 'w') as log_file:
with keyboard_input(input_stream): with keyboard_input(input_stream):
while True: while True:

View file

@ -96,7 +96,7 @@
try: try:
repo = spack.repository.RepoPath() repo = spack.repository.RepoPath()
sys.meta_path.append(repo) sys.meta_path.append(repo)
except spack.error.SpackError, e: except spack.error.SpackError as e:
tty.die('while initializing Spack RepoPath:', e.message) tty.die('while initializing Spack RepoPath:', e.message)

View file

@ -287,7 +287,7 @@ def find_compilers(self, *paths):
# ensure all the version calls we made are cached in the parent # ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot. # process, as well. This speeds up Spack a lot.
clist = reduce(lambda x, y: x + y, compiler_lists) clist = [comp for cl in compiler_lists for comp in cl]
return clist return clist
def find_compiler(self, cmp_cls, *path): def find_compiler(self, cmp_cls, *path):

View file

@ -57,6 +57,7 @@
import shutil import shutil
import sys import sys
import traceback import traceback
from six import iteritems
import llnl.util.lang as lang import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
@ -310,7 +311,7 @@ def set_build_environment_variables(pkg, env, dirty=False):
environment = compiler.environment environment = compiler.environment
if 'set' in environment: if 'set' in environment:
env_to_set = environment['set'] env_to_set = environment['set']
for key, value in env_to_set.iteritems(): for key, value in iteritems(env_to_set):
env.set('SPACK_ENV_SET_%s' % key, value) env.set('SPACK_ENV_SET_%s' % key, value)
env.set('%s' % key, value) env.set('%s' % key, value)
# Let shell know which variables to set # Let shell know which variables to set
@ -322,8 +323,9 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
# Add bin directories from dependencies to the PATH for the build. # Add bin directories from dependencies to the PATH for the build.
bin_dirs = reversed(filter(os.path.isdir, [ bin_dirs = reversed(
'%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')])) [d.prefix.bin for d in pkg.spec.dependencies(deptype='build')
if os.path.isdir(d.prefix.bin)])
bin_dirs = filter_system_bin_paths(bin_dirs) bin_dirs = filter_system_bin_paths(bin_dirs)
for item in bin_dirs: for item in bin_dirs:
env.prepend_path('PATH', item) env.prepend_path('PATH', item)

View file

@ -146,7 +146,7 @@ def _do_patch_config_guess(self):
if config_guess is not None: if config_guess is not None:
try: try:
check_call([config_guess], stdout=PIPE, stderr=PIPE) check_call([config_guess], stdout=PIPE, stderr=PIPE)
mod = stat(my_config_guess).st_mode & 0777 | S_IWUSR mod = stat(my_config_guess).st_mode & 0o777 | S_IWUSR
os.chmod(my_config_guess, mod) os.chmod(my_config_guess, mod)
shutil.copyfile(config_guess, my_config_guess) shutil.copyfile(config_guess, my_config_guess)
return True return True

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os import os
import re import re
import sys import sys
@ -186,7 +188,7 @@ def display_specs(specs, **kwargs):
# Traverse the index and print out each package # Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)): for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: if i > 0:
print print()
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color, header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color, architecture, spack.spec.compiler_color,
@ -205,7 +207,7 @@ def display_specs(specs, **kwargs):
for abbrv, spec in zip(abbreviated, specs): for abbrv, spec in zip(abbreviated, specs):
prefix = gray_hash(spec, hlen) if hashes else '' prefix = gray_hash(spec, hlen) if hashes else ''
print prefix + (format % (abbrv, spec.prefix)) print(prefix + (format % (abbrv, spec.prefix)))
elif mode == 'deps': elif mode == 'deps':
for spec in specs: for spec in specs:

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import spack.architecture as architecture import spack.architecture as architecture
description = "print architecture information about this machine" description = "print architecture information about this machine"
@ -36,6 +38,6 @@ def setup_parser(subparser):
def arch(parser, args): def arch(parser, args):
if args.platform: if args.platform:
print architecture.platform() print(architecture.platform())
else: else:
print architecture.sys_type() print(architecture.sys_type())

View file

@ -81,7 +81,7 @@ def _specs(self, **kwargs):
_arguments['module_type'] = Args( _arguments['module_type'] = Args(
'-m', '--module-type', '-m', '--module-type',
choices=spack.modules.module_types.keys(), choices=spack.modules.module_types.keys(),
default=spack.modules.module_types.keys()[0], default=list(spack.modules.module_types.keys())[0],
help='type of module files [default: %(default)s]') help='type of module files [default: %(default)s]')
_arguments['yes_to_all'] = Args( _arguments['yes_to_all'] = Args(

View file

@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import argparse import argparse
import sys import sys
from six import iteritems
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.compilers import spack.compilers
@ -142,36 +145,36 @@ def compiler_info(args):
tty.error("No compilers match spec %s" % cspec) tty.error("No compilers match spec %s" % cspec)
else: else:
for c in compilers: for c in compilers:
print str(c.spec) + ":" print(str(c.spec) + ":")
print "\tpaths:" print("\tpaths:")
for cpath in ['cc', 'cxx', 'f77', 'fc']: for cpath in ['cc', 'cxx', 'f77', 'fc']:
print "\t\t%s = %s" % (cpath, getattr(c, cpath, None)) print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags: if c.flags:
print "\tflags:" print("\tflags:")
for flag, flag_value in c.flags.iteritems(): for flag, flag_value in iteritems(c.flags):
print "\t\t%s = %s" % (flag, flag_value) print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0: if len(c.environment) != 0:
if len(c.environment['set']) != 0: if len(c.environment['set']) != 0:
print "\tenvironment:" print("\tenvironment:")
print "\t set:" print("\t set:")
for key, value in c.environment['set'].iteritems(): for key, value in iteritems(c.environment['set']):
print "\t %s = %s" % (key, value) print("\t %s = %s" % (key, value))
if c.extra_rpaths: if c.extra_rpaths:
print "\tExtra rpaths:" print("\tExtra rpaths:")
for extra_rpath in c.extra_rpaths: for extra_rpath in c.extra_rpaths:
print "\t\t%s" % extra_rpath print("\t\t%s" % extra_rpath)
print "\tmodules = %s" % c.modules print("\tmodules = %s" % c.modules)
print "\toperating system = %s" % c.operating_system print("\toperating system = %s" % c.operating_system)
def compiler_list(args): def compiler_list(args):
tty.msg("Available compilers") tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), index = index_by(spack.compilers.all_compilers(scope=args.scope),
lambda c: (c.spec.name, c.operating_system, c.target)) lambda c: (c.spec.name, c.operating_system, c.target))
ordered_sections = sorted(index.items(), key=lambda (k, v): k) ordered_sections = sorted(index.items(), key=lambda item: item[0])
for i, (key, compilers) in enumerate(ordered_sections): for i, (key, compilers) in enumerate(ordered_sections):
if i >= 1: if i >= 1:
print print()
name, os, target = key name, os, target = key
os_str = os os_str = os
if target: if target:

View file

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse import argparse
import llnl.util.tty as tty import llnl.util.tty as tty

View file

@ -50,4 +50,4 @@ def dependents(parser, args):
if deps: if deps:
spack.cmd.display_specs(deps) spack.cmd.display_specs(deps)
else: else:
print "No dependents" print("No dependents")

View file

@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os import os
import argparse import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.cmd import spack.cmd
import spack.build_environment as build_env import spack.build_environment as build_env
@ -64,7 +67,7 @@ def env(parser, args):
if not cmd: if not cmd:
# If no command act like the "env" command and print out env vars. # If no command act like the "env" command and print out env vars.
for key, val in os.environ.items(): for key, val in os.environ.items():
print "%s=%s" % (key, val) print("%s=%s" % (key, val))
else: else:
# Otherwise execute the command with the new environment # Otherwise execute the command with the new environment

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import re import re
import os import os
import sys import sys
@ -175,12 +177,12 @@ def prefix_relative(path):
file_list = changed_files() file_list = changed_files()
shutil.copy('.flake8', os.path.join(temp, '.flake8')) shutil.copy('.flake8', os.path.join(temp, '.flake8'))
print '=======================================================' print('=======================================================')
print 'flake8: running flake8 code checks on spack.' print('flake8: running flake8 code checks on spack.')
print print()
print 'Modified files:' print('Modified files:')
for filename in file_list: for filename in file_list:
print " %s" % filename.strip() print(" %s" % filename.strip())
print('=======================================================') print('=======================================================')
# filter files into a temporary directory with exemptions added. # filter files into a temporary directory with exemptions added.
@ -196,7 +198,7 @@ def prefix_relative(path):
if args.root_relative: if args.root_relative:
# print results relative to repo root. # print results relative to repo root.
print output print(output)
else: else:
# print results relative to current working directory # print results relative to current working directory
def cwd_relative(path): def cwd_relative(path):
@ -204,16 +206,16 @@ def cwd_relative(path):
os.path.join(spack.prefix, path.group(1)), os.getcwd()) os.path.join(spack.prefix, path.group(1)), os.getcwd())
for line in output.split('\n'): for line in output.split('\n'):
print re.sub(r'^(.*): \[', cwd_relative, line) print(re.sub(r'^(.*): \[', cwd_relative, line))
if flake8.returncode != 0: if flake8.returncode != 0:
print "Flake8 found errors." print("Flake8 found errors.")
sys.exit(1) sys.exit(1)
else: else:
print "Flake8 checks were clean." print("Flake8 checks were clean.")
finally: finally:
if args.keep_temp: if args.keep_temp:
print "temporary files are in ", temp print("temporary files are in ", temp)
else: else:
shutil.rmtree(temp, ignore_errors=True) shutil.rmtree(temp, ignore_errors=True)

View file

@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse from __future__ import print_function
import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
@ -96,5 +97,5 @@ def graph(parser, args):
elif specs: # ascii is default: user doesn't need to provide it explicitly elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug, deptype=deptype) graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]: for spec in specs[1:]:
print # extra line bt/w independent graphs print() # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug) graph_ascii(spec, debug=spack.debug)

View file

@ -22,7 +22,10 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import textwrap import textwrap
from llnl.util.tty.colify import * from llnl.util.tty.colify import *
import spack import spack
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
@ -50,12 +53,12 @@ def print_text_info(pkg):
"""Print out a plain text description of a package.""" """Print out a plain text description of a package."""
header = "{0}: ".format(pkg.build_system_class) header = "{0}: ".format(pkg.build_system_class)
print header, pkg.name print(header, pkg.name)
whitespaces = ''.join([' '] * (len(header) - len("Homepage: "))) whitespaces = ''.join([' '] * (len(header) - len("Homepage: ")))
print "Homepage:", whitespaces, pkg.homepage print("Homepage:", whitespaces, pkg.homepage)
print print()
print "Safe versions: " print("Safe versions: ")
if not pkg.versions: if not pkg.versions:
print(" None") print(" None")
@ -63,20 +66,20 @@ def print_text_info(pkg):
pad = padder(pkg.versions, 4) pad = padder(pkg.versions, 4)
for v in reversed(sorted(pkg.versions)): for v in reversed(sorted(pkg.versions)):
f = fs.for_package_version(pkg, v) f = fs.for_package_version(pkg, v)
print " %s%s" % (pad(v), str(f)) print(" %s%s" % (pad(v), str(f)))
print print()
print "Variants:" print("Variants:")
if not pkg.variants: if not pkg.variants:
print " None" print(" None")
else: else:
pad = padder(pkg.variants, 4) pad = padder(pkg.variants, 4)
maxv = max(len(v) for v in sorted(pkg.variants)) maxv = max(len(v) for v in sorted(pkg.variants))
fmt = "%%-%ss%%-10s%%s" % (maxv + 4) fmt = "%%-%ss%%-10s%%s" % (maxv + 4)
print " " + fmt % ('Name', 'Default', 'Description') print(" " + fmt % ('Name', 'Default', 'Description'))
print print()
for name in sorted(pkg.variants): for name in sorted(pkg.variants):
v = pkg.variants[name] v = pkg.variants[name]
default = 'on' if v.default else 'off' default = 'on' if v.default else 'off'
@ -85,26 +88,26 @@ def print_text_info(pkg):
lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]] lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]]
desc = "\n".join(lines) desc = "\n".join(lines)
print " " + fmt % (name, default, desc) print(" " + fmt % (name, default, desc))
print print()
print "Installation Phases:" print("Installation Phases:")
phase_str = '' phase_str = ''
for phase in pkg.phases: for phase in pkg.phases:
phase_str += " {0}".format(phase) phase_str += " {0}".format(phase)
print phase_str print(phase_str)
for deptype in ('build', 'link', 'run'): for deptype in ('build', 'link', 'run'):
print print()
print "%s Dependencies:" % deptype.capitalize() print("%s Dependencies:" % deptype.capitalize())
deps = sorted(pkg.dependencies_of_type(deptype)) deps = sorted(pkg.dependencies_of_type(deptype))
if deps: if deps:
colify(deps, indent=4) colify(deps, indent=4)
else: else:
print " None" print(" None")
print print()
print "Virtual Packages: " print("Virtual Packages: ")
if pkg.provided: if pkg.provided:
inverse_map = {} inverse_map = {}
for spec, whens in pkg.provided.items(): for spec, whens in pkg.provided.items():
@ -113,17 +116,17 @@ def print_text_info(pkg):
inverse_map[when] = set() inverse_map[when] = set()
inverse_map[when].add(spec) inverse_map[when].add(spec)
for when, specs in reversed(sorted(inverse_map.items())): for when, specs in reversed(sorted(inverse_map.items())):
print " %s provides %s" % ( print(" %s provides %s" % (
when, ', '.join(str(s) for s in specs)) when, ', '.join(str(s) for s in specs)))
else: else:
print " None" print(" None")
print print()
print "Description:" print("Description:")
if pkg.__doc__: if pkg.__doc__:
print pkg.format_doc(indent=4) print(pkg.format_doc(indent=4))
else: else:
print " None" print(" None")
def info(parser, args): def info(parser, args):

View file

@ -22,12 +22,14 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import argparse import argparse
import cgi import cgi
import fnmatch import fnmatch
import re import re
import sys import sys
from StringIO import StringIO from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
@ -123,42 +125,42 @@ def rst_table(elts):
pkgs = [spack.repo.get(name) for name in pkg_names] pkgs = [spack.repo.get(name) for name in pkg_names]
print('.. _package-list:') print('.. _package-list:')
print('') print()
print('============') print('============')
print('Package List') print('Package List')
print('============') print('============')
print('') print()
print('This is a list of things you can install using Spack. It is') print('This is a list of things you can install using Spack. It is')
print('automatically generated based on the packages in the latest Spack') print('automatically generated based on the packages in the latest Spack')
print('release.') print('release.')
print('') print()
print('Spack currently has %d mainline packages:' % len(pkgs)) print('Spack currently has %d mainline packages:' % len(pkgs))
print('') print()
print(rst_table('`%s`_' % p for p in pkg_names)) print(rst_table('`%s`_' % p for p in pkg_names))
print('') print()
# Output some text for each package. # Output some text for each package.
for pkg in pkgs: for pkg in pkgs:
print('-----') print('-----')
print('') print()
print('.. _%s:' % pkg.name) print('.. _%s:' % pkg.name)
print('') print()
# Must be at least 2 long, breaks for single letter packages like R. # Must be at least 2 long, breaks for single letter packages like R.
print('-' * max(len(pkg.name), 2)) print('-' * max(len(pkg.name), 2))
print(pkg.name) print(pkg.name)
print('-' * max(len(pkg.name), 2)) print('-' * max(len(pkg.name), 2))
print('') print()
print('Homepage:') print('Homepage:')
print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage)) print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage))
print('') print()
print('Spack package:') print('Spack package:')
print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg))) print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg)))
print('') print()
if pkg.versions: if pkg.versions:
print('Versions:') print('Versions:')
print(' ' + ', '.join(str(v) for v in print(' ' + ', '.join(str(v) for v in
reversed(sorted(pkg.versions)))) reversed(sorted(pkg.versions))))
print('') print()
for deptype in spack.alldeps: for deptype in spack.alldeps:
deps = pkg.dependencies_of_type(deptype) deps = pkg.dependencies_of_type(deptype)
@ -166,11 +168,11 @@ def rst_table(elts):
print('%s Dependencies' % deptype.capitalize()) print('%s Dependencies' % deptype.capitalize())
print(' ' + ', '.join('%s_' % d if d in pkg_names print(' ' + ', '.join('%s_' % d if d in pkg_names
else d for d in deps)) else d for d in deps))
print('') print()
print('Description:') print('Description:')
print(pkg.format_doc(indent=2)) print(pkg.format_doc(indent=2))
print('') print()
def list(parser, args): def list(parser, args):

View file

@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse from __future__ import print_function
import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
@ -70,16 +71,16 @@ def setup_parser(subparser):
def location(parser, args): def location(parser, args):
if args.module_dir: if args.module_dir:
print spack.module_path print(spack.module_path)
elif args.spack_root: elif args.spack_root:
print spack.prefix print(spack.prefix)
elif args.packages: elif args.packages:
print spack.repo.first_repo().root print(spack.repo.first_repo().root)
elif args.stages: elif args.stages:
print spack.stage_path print(spack.stage_path)
else: else:
specs = spack.cmd.parse_specs(args.spec) specs = spack.cmd.parse_specs(args.spec)
@ -91,14 +92,14 @@ def location(parser, args):
if args.install_dir: if args.install_dir:
# install_dir command matches against installed specs. # install_dir command matches against installed specs.
spec = spack.cmd.disambiguate_spec(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0])
print spec.prefix print(spec.prefix)
else: else:
spec = specs[0] spec = specs[0]
if args.package_dir: if args.package_dir:
# This one just needs the spec name. # This one just needs the spec name.
print spack.repo.dirname_for_package_name(spec.name) print(spack.repo.dirname_for_package_name(spec.name))
else: else:
# These versions need concretized specs. # These versions need concretized specs.
@ -106,11 +107,11 @@ def location(parser, args):
pkg = spack.repo.get(spec) pkg = spack.repo.get(spec)
if args.stage_dir: if args.stage_dir:
print pkg.stage.path print(pkg.stage.path)
else: # args.build_dir is the default. else: # args.build_dir is the default.
if not pkg.stage.source_path: if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. " tty.die("Build directory does not exist yet. "
"Run this to create it:", "Run this to create it:",
"spack stage " + " ".join(args.spec)) "spack stage " + " ".join(args.spec))
print pkg.stage.source_path print(pkg.stage.source_path)

View file

@ -25,7 +25,7 @@
import argparse import argparse
import hashlib import hashlib
import os import os
from urlparse import urlparse from six.moves.urllib.parse import urlparse
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.util.crypto import spack.util.crypto

View file

@ -141,7 +141,7 @@ def mirror_list(args):
fmt = "%%-%ds%%s" % (max_len + 4) fmt = "%%-%ds%%s" % (max_len + 4)
for name in mirrors: for name in mirrors:
print fmt % (name, mirrors[name]) print(fmt % (name, mirrors[name]))
def _read_specs_from_file(filename): def _read_specs_from_file(filename):
@ -152,7 +152,7 @@ def _read_specs_from_file(filename):
s = Spec(string) s = Spec(string)
s.package s.package
specs.append(s) specs.append(s)
except SpackError, e: except SpackError as e:
tty.die("Parse error in %s, line %d:" % (args.file, i + 1), tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e)) ">>> " + string, str(e))
return specs return specs

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os import os
import argparse import argparse
@ -118,13 +120,13 @@ def pkg_diff(args):
u1, u2 = diff_packages(args.rev1, args.rev2) u1, u2 = diff_packages(args.rev1, args.rev2)
if u1: if u1:
print "%s:" % args.rev1 print("%s:" % args.rev1)
colify(sorted(u1), indent=4) colify(sorted(u1), indent=4)
if u1: if u1:
print print()
if u2: if u2:
print "%s:" % args.rev2 print("%s:" % args.rev2)
colify(sorted(u2), indent=4) colify(sorted(u2), indent=4)

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os import os
import llnl.util.tty as tty import llnl.util.tty as tty
@ -161,7 +163,7 @@ def repo_list(args):
max_ns_len = max(len(r.namespace) for r in repos) max_ns_len = max(len(r.namespace) for r in repos)
for repo in repos: for repo in repos:
fmt = "%%-%ds%%s" % (max_ns_len + 4) fmt = "%%-%ds%%s" % (max_ns_len + 4)
print fmt % (repo.namespace, repo.root) print(fmt % (repo.namespace, repo.root))
def repo(parser, args): def repo(parser, args):

View file

@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse from __future__ import print_function
import argparse
import spack import spack
import spack.cmd import spack.cmd
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
@ -69,20 +70,20 @@ def spec(parser, args):
# With -y, just print YAML to output. # With -y, just print YAML to output.
if args.yaml: if args.yaml:
spec.concretize() spec.concretize()
print spec.to_yaml() print(spec.to_yaml())
continue continue
# Print some diagnostic info by default. # Print some diagnostic info by default.
print "Input spec" print("Input spec")
print "--------------------------------" print("--------------------------------")
print spec.tree(**kwargs) print(spec.tree(**kwargs))
print "Normalized" print("Normalized")
print "--------------------------------" print("--------------------------------")
spec.normalize() spec.normalize()
print spec.tree(**kwargs) print(spec.tree(**kwargs))
print "Concretized" print("Concretized")
print "--------------------------------" print("--------------------------------")
spec.concretize() spec.concretize()
print spec.tree(**kwargs) print(spec.tree(**kwargs))

View file

@ -22,12 +22,14 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import sys import sys
import os import os
import re import re
import argparse import argparse
import pytest import pytest
from StringIO import StringIO from six import StringIO
from llnl.util.filesystem import * from llnl.util.filesystem import *
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -79,7 +81,7 @@ def do_list(args, unknown_args):
output_lines.append( output_lines.append(
os.path.basename(name).replace('.py', '')) os.path.basename(name).replace('.py', ''))
else: else:
print indent + name print(indent + name)
if args.list: if args.list:
colify(output_lines) colify(output_lines)

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
@ -47,10 +49,10 @@ def versions(parser, args):
tty.msg("Remote versions (not yet checksummed):") tty.msg("Remote versions (not yet checksummed):")
if not remote_versions: if not remote_versions:
if not fetched_versions: if not fetched_versions:
print " Found no versions for %s" % pkg.name print(" Found no versions for %s" % pkg.name)
tty.debug("Check the list_url and list_depth attribute on the " tty.debug("Check the list_url and list_depth attribute on the "
"package to help Spack find versions.") "package to help Spack find versions.")
else: else:
print " Found no unckecksummed versions for %s" % pkg.name print(" Found no unckecksummed versions for %s" % pkg.name)
else: else:
colify(sorted(remote_versions, reverse=True), indent=2) colify(sorted(remote_versions, reverse=True), indent=2)

View file

@ -265,11 +265,11 @@ def check(key):
full_path, prefix, suffix = key full_path, prefix, suffix = key
version = detect_version(full_path) version = detect_version(full_path)
return (version, prefix, suffix, full_path) return (version, prefix, suffix, full_path)
except ProcessError, e: except ProcessError as e:
tty.debug( tty.debug(
"Couldn't get version for compiler %s" % full_path, e) "Couldn't get version for compiler %s" % full_path, e)
return None return None
except Exception, e: except Exception as e:
# Catching "Exception" here is fine because it just # Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable. # means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s" tty.debug("Error while executing candidate compiler %s"

View file

@ -335,7 +335,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
scope_to_compilers[scope] = compilers scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = dict() cfg_file_to_duplicates = dict()
for scope, compilers in scope_to_compilers.iteritems(): for scope, compilers in scope_to_compilers.items():
config_file = config_scopes[scope].get_section_filename('compilers') config_file = config_scopes[scope].get_section_filename('compilers')
cfg_file_to_duplicates[config_file] = compilers cfg_file_to_duplicates[config_file] = compilers
@ -401,7 +401,7 @@ def __init__(self, compiler_spec, arch_spec):
config_file_to_duplicates = get_compiler_duplicates( config_file_to_duplicates = get_compiler_duplicates(
compiler_spec, arch_spec) compiler_spec, arch_spec)
duplicate_table = list( duplicate_table = list(
(x, len(y)) for x, y in config_file_to_duplicates.iteritems()) (x, len(y)) for x, y in config_file_to_duplicates.items())
descriptor = lambda num: 'time' if num == 1 else 'times' descriptor = lambda num: 'time' if num == 1 else 'times'
duplicate_msg = ( duplicate_msg = (
lambda cfgfile, count: "{0}: {1} {2}".format( lambda cfgfile, count: "{0}: {1} {2}".format(

View file

@ -34,6 +34,8 @@
concretization policies. concretization policies.
""" """
from __future__ import print_function from __future__ import print_function
from six import iteritems
import spack import spack
import spack.spec import spack.spec
import spack.compilers import spack.compilers
@ -241,7 +243,7 @@ def concretize_version(self, spec):
def concretize_architecture(self, spec): def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the """If the spec is empty provide the defaults of the platform. If the
architecture is not a basestring, then check if either the platform, architecture is not a string type, then check if either the platform,
target or operating system are concretized. If any of the fields are target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False. architecture attribute is a namedtuple of classes) then return False.
@ -262,7 +264,7 @@ def concretize_architecture(self, spec):
while not spec.architecture.concrete and default_archs: while not spec.architecture.concrete and default_archs:
arch = default_archs.pop(0) arch = default_archs.pop(0)
replacement_fields = [k for k, v in arch.to_cmp_dict().iteritems() replacement_fields = [k for k, v in iteritems(arch.to_cmp_dict())
if v and not getattr(spec.architecture, k)] if v and not getattr(spec.architecture, k)]
for field in replacement_fields: for field in replacement_fields:
setattr(spec.architecture, field, getattr(arch, field)) setattr(spec.architecture, field, getattr(arch, field))

View file

@ -52,6 +52,8 @@
import os import os
import re import re
import sys import sys
from six import string_types
from six import iteritems
import yaml import yaml
import jsonschema import jsonschema
@ -108,7 +110,7 @@ def extend_with_default(validator_class):
"patternProperties"] "patternProperties"]
def set_defaults(validator, properties, instance, schema): def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems(): for property, subschema in iteritems(properties):
if "default" in subschema: if "default" in subschema:
instance.setdefault(property, subschema["default"]) instance.setdefault(property, subschema["default"])
for err in validate_properties( for err in validate_properties(
@ -116,10 +118,10 @@ def set_defaults(validator, properties, instance, schema):
yield err yield err
def set_pp_defaults(validator, properties, instance, schema): def set_pp_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems(): for property, subschema in iteritems(properties):
if "default" in subschema: if "default" in subschema:
if isinstance(instance, dict): if isinstance(instance, dict):
for key, val in instance.iteritems(): for key, val in iteritems(instance):
if re.match(property, key) and val is None: if re.match(property, key) and val is None:
instance[key] = subschema["default"] instance[key] = subschema["default"]
@ -306,8 +308,8 @@ def _mark_overrides(data):
elif isinstance(data, dict): elif isinstance(data, dict):
marked = {} marked = {}
for key, val in data.iteritems(): for key, val in iteritems(data):
if isinstance(key, basestring) and key.endswith(':'): if isinstance(key, string_types) and key.endswith(':'):
key = syaml.syaml_str(key[:-1]) key = syaml.syaml_str(key[:-1])
key.override = True key.override = True
marked[key] = _mark_overrides(val) marked[key] = _mark_overrides(val)
@ -348,7 +350,7 @@ def they_are(t):
# Source dict is merged into dest. # Source dict is merged into dest.
elif they_are(dict): elif they_are(dict):
for sk, sv in source.iteritems(): for sk, sv in iteritems(source):
if override(sk) or sk not in dest: if override(sk) or sk not in dest:
# if sk ended with ::, or if it's new, completely override # if sk ended with ::, or if it's new, completely override
dest[sk] = copy.copy(sv) dest[sk] = copy.copy(sv)

View file

@ -41,6 +41,8 @@
""" """
import os import os
import socket import socket
from six import string_types
from six import iteritems
from yaml.error import MarkedYAMLError, YAMLError from yaml.error import MarkedYAMLError, YAMLError
@ -260,7 +262,7 @@ def _read_from_file(self, stream, format='json'):
raise ValueError("Invalid database format: %s" % format) raise ValueError("Invalid database format: %s" % format)
try: try:
if isinstance(stream, basestring): if isinstance(stream, string_types):
with open(stream, 'r') as f: with open(stream, 'r') as f:
fdata = load(f) fdata = load(f)
else: else:
@ -511,7 +513,7 @@ def _add(self, spec, directory_layout=None, explicit=False):
new_spec, path, installed, ref_count=0, explicit=explicit) new_spec, path, installed, ref_count=0, explicit=explicit)
# Connect dependencies from the DB to the new copy. # Connect dependencies from the DB to the new copy.
for name, dep in spec.dependencies_dict(_tracked_deps).iteritems(): for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)):
dkey = dep.spec.dag_hash() dkey = dep.spec.dag_hash()
new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
self._data[dkey].ref_count += 1 self._data[dkey].ref_count += 1

View file

@ -51,6 +51,7 @@ class OpenMpi(Package):
import inspect import inspect
import os.path import os.path
import re import re
from six import string_types
import llnl.util.lang import llnl.util.lang
import spack import spack
@ -174,7 +175,7 @@ class Foo(Package):
""" """
global __all__ global __all__
if isinstance(dicts, basestring): if isinstance(dicts, string_types):
dicts = (dicts, ) dicts = (dicts, )
if not isinstance(dicts, collections.Sequence): if not isinstance(dicts, collections.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}" message = "dicts arg must be list, tuple, or string. Found {0}"

View file

@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
import exceptions
import shutil import shutil
import glob import glob
import tempfile import tempfile
@ -137,7 +136,7 @@ def remove_install_directory(self, spec):
if os.path.exists(path): if os.path.exists(path):
try: try:
shutil.rmtree(path) shutil.rmtree(path)
except exceptions.OSError as e: except OSError as e:
raise RemoveFailedError(spec, path, e) raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path) path = os.path.dirname(path)

View file

@ -291,7 +291,7 @@ def from_sourcing_files(*args, **kwargs):
shell_options = '{shell_options}'.format(**info) shell_options = '{shell_options}'.format(**info)
source_file = '{source_command} {file} {concatenate_on_success}' source_file = '{source_command} {file} {concatenate_on_success}'
dump_cmd = "import os, json; print json.dumps(dict(os.environ))" dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
dump_environment = 'python -c "%s"' % dump_cmd dump_environment = 'python -c "%s"' % dump_cmd
# Construct the command that will be executed # Construct the command that will be executed

View file

@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os import os
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import inspect import inspect

View file

@ -46,6 +46,9 @@
import shutil import shutil
import copy import copy
from functools import wraps from functools import wraps
from six import string_types
from six import with_metaclass
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import * from llnl.util.filesystem import *
import spack import spack
@ -74,21 +77,20 @@ def wrapper(self, *args, **kwargs):
return wrapper return wrapper
class FetchStrategy(object): class FSMeta(type):
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
required_attributes = None # Attributes required in version() args.
class __metaclass__(type):
"""This metaclass registers all fetch strategies in a list.""" """This metaclass registers all fetch strategies in a list."""
def __init__(cls, name, bases, dict): def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict) type.__init__(cls, name, bases, dict)
if cls.enabled: if cls.enabled:
all_strategies.append(cls) all_strategies.append(cls)
class FetchStrategy(with_metaclass(FSMeta, object)):
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
required_attributes = None # Attributes required in version() args.
def __init__(self): def __init__(self):
# The stage is initialized late, so that fetch strategies can be # The stage is initialized late, so that fetch strategies can be
# constructed at package construction time. This is where things # constructed at package construction time. This is where things
@ -319,7 +321,7 @@ def expand(self):
# top-level directory. We ignore hidden files to accomodate # top-level directory. We ignore hidden files to accomodate
# these "semi-exploding" tarballs. # these "semi-exploding" tarballs.
files = os.listdir(tarball_container) files = os.listdir(tarball_container)
non_hidden = filter(lambda f: not f.startswith('.'), files) non_hidden = [f for f in files if not f.startswith('.')]
if len(non_hidden) == 1: if len(non_hidden) == 1:
expanded_dir = os.path.join(tarball_container, non_hidden[0]) expanded_dir = os.path.join(tarball_container, non_hidden[0])
if os.path.isdir(expanded_dir): if os.path.isdir(expanded_dir):
@ -461,7 +463,7 @@ def archive(self, destination, **kwargs):
patterns = kwargs.get('exclude', None) patterns = kwargs.get('exclude', None)
if patterns is not None: if patterns is not None:
if isinstance(patterns, basestring): if isinstance(patterns, string_types):
patterns = [patterns] patterns = [patterns]
for p in patterns: for p in patterns:
tar.add_default_arg('--exclude=%s' % p) tar.add_default_arg('--exclude=%s' % p)

View file

@ -63,6 +63,7 @@
""" """
from heapq import * from heapq import *
from six import iteritems
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.tty.color import * from llnl.util.tty.color import *
@ -562,7 +563,7 @@ def label(key, label):
continue continue
# Add edges for each depends_on in the package. # Add edges for each depends_on in the package.
for dep_name, dep in spec.package.dependencies.iteritems(): for dep_name, dep in iteritems(spec.package.dependencies):
deps.add((spec.name, dep_name)) deps.add((spec.name, dep_name))
# If the package provides something, add an edge for that. # If the package provides something, add an edge for that.

View file

@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import absolute_import from __future__ import absolute_import
import os import os
import re import re
import platform import platform

View file

@ -23,15 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import spack.modules import spack.modules
from six import iteritems
def post_install(pkg): def post_install(pkg):
for item, cls in spack.modules.module_types.iteritems(): for item, cls in iteritems(spack.modules.module_types):
generator = cls(pkg.spec) generator = cls(pkg.spec)
generator.write() generator.write()
def post_uninstall(pkg): def post_uninstall(pkg):
for item, cls in spack.modules.module_types.iteritems(): for item, cls in iteritems(spack.modules.module_types):
generator = cls(pkg.spec) generator = cls(pkg.spec)
generator.remove() generator.remove()

View file

@ -46,6 +46,8 @@
import re import re
import string import string
import textwrap import textwrap
from six import iteritems
from six import with_metaclass
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
@ -213,7 +215,7 @@ def process_arglist(arglist):
for x in arglist: for x in arglist:
yield (x, ) yield (x, )
else: else:
for x in arglist.iteritems(): for x in iteritems(arglist):
yield x yield x
for method, arglist in environment_actions.items(): for method, arglist in environment_actions.items():
@ -246,18 +248,18 @@ def format_env_var_name(name):
return name.replace('-', '_').upper() return name.replace('-', '_').upper()
class EnvModule(object): class ModuleMeta(type):
name = 'env_module' """Metaclass registers modules in themodule_types dict."""
formats = {}
class __metaclass__(type):
def __init__(cls, name, bases, dict): def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict) type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in _module_config[ if cls.name != 'env_module' and cls.name in _module_config['enable']:
'enable']:
module_types[cls.name] = cls module_types[cls.name] = cls
class EnvModule(with_metaclass(ModuleMeta, object)):
name = 'env_module'
formats = {}
def __init__(self, spec=None): def __init__(self, spec=None):
self.spec = spec self.spec = spec
self.pkg = spec.package # Just stored for convenience self.pkg = spec.package # Just stored for convenience

View file

@ -54,7 +54,7 @@ def find_compilers(self, *paths):
# ensure all the version calls we made are cached in the parent # ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot. # process, as well. This speeds up Spack a lot.
clist = reduce(lambda x, y: x + y, compiler_lists) clist = [comp for cl in compiler_lists for comp in cl]
return clist return clist
def find_compiler(self, cmp_cls, *paths): def find_compiler(self, cmp_cls, *paths):

View file

@ -42,6 +42,9 @@
import sys import sys
import textwrap import textwrap
import time import time
from six import StringIO
from six import string_types
from six import with_metaclass
import llnl.util.lock import llnl.util.lock
import llnl.util.tty as tty import llnl.util.tty as tty
@ -56,7 +59,7 @@
import spack.repository import spack.repository
import spack.url import spack.url
import spack.util.web import spack.util.web
from StringIO import StringIO
from llnl.util.filesystem import * from llnl.util.filesystem import *
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.link_tree import LinkTree from llnl.util.link_tree import LinkTree
@ -238,7 +241,7 @@ def _wrapper(instance, *args, **kwargs):
return _execute_under_condition return _execute_under_condition
class PackageBase(object): class PackageBase(with_metaclass(PackageMeta, object)):
"""This is the superclass for all spack packages. """This is the superclass for all spack packages.
***The Package class*** ***The Package class***
@ -475,7 +478,6 @@ class SomePackage(Package):
Package creators override functions like install() (all of them do this), Package creators override functions like install() (all of them do this),
clean() (some of them do this), and others to provide custom behavior. clean() (some of them do this), and others to provide custom behavior.
""" """
__metaclass__ = PackageMeta
# #
# These are default values for instance variables. # These are default values for instance variables.
# #
@ -1115,6 +1117,13 @@ def _prefix_write_lock(self):
finally: finally:
self.prefix_lock.release_write() self.prefix_lock.release_write()
@contextlib.contextmanager
def _stage_and_write_lock(self):
"""Prefix lock nested in a stage."""
with self.stage:
with self._prefix_write_lock():
yield
def do_install(self, def do_install(self,
keep_prefix=False, keep_prefix=False,
keep_stage=False, keep_stage=False,
@ -1233,7 +1242,7 @@ def build_process(input_stream):
self.stage.keep = keep_stage self.stage.keep = keep_stage
with contextlib.nested(self.stage, self._prefix_write_lock()): with self._stage_and_write_lock():
# Run the pre-install hook in the child process after # Run the pre-install hook in the child process after
# the directory is created. # the directory is created.
spack.hooks.pre_install(self) spack.hooks.pre_install(self)
@ -1265,9 +1274,10 @@ def build_process(input_stream):
input_stream=input_stream input_stream=input_stream
) )
with redirection_context as log_redirection: with redirection_context as log_redirection:
for phase_name, phase in zip(self.phases, self._InstallPhase_phases): # NOQA: ignore=E501 for phase_name, phase in zip(
self.phases, self._InstallPhase_phases):
tty.msg( tty.msg(
'Executing phase : \'{0}\''.format(phase_name) # NOQA: ignore=E501 'Executing phase : \'{0}\''.format(phase_name)
) )
# Redirect stdout and stderr to daemon pipe # Redirect stdout and stderr to daemon pipe
with log_redirection: with log_redirection:
@ -1355,7 +1365,7 @@ def sanity_check_prefix(self):
"""This function checks whether install succeeded.""" """This function checks whether install succeeded."""
def check_paths(path_list, filetype, predicate): def check_paths(path_list, filetype, predicate):
if isinstance(path_list, basestring): if isinstance(path_list, string_types):
path_list = [path_list] path_list = [path_list]
for path in path_list: for path in path_list:

View file

@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from six import string_types
from six import iteritems
import spack import spack
import spack.error import spack.error
@ -180,7 +182,7 @@ def spec_preferred_variants(self, pkgname):
variants = self.preferred.get(pkg, {}).get('variants', '') variants = self.preferred.get(pkg, {}).get('variants', '')
if variants: if variants:
break break
if not isinstance(variants, basestring): if not isinstance(variants, string_types):
variants = " ".join(variants) variants = " ".join(variants)
pkg = spack.repo.get(pkgname) pkg = spack.repo.get(pkgname)
spec = spack.spec.Spec("%s %s" % (pkgname, variants)) spec = spack.spec.Spec("%s %s" % (pkgname, variants))
@ -233,7 +235,7 @@ def spec_externals(spec):
if (not pkg_paths) and (not pkg_modules): if (not pkg_paths) and (not pkg_modules):
return [] return []
for external_spec, path in pkg_paths.iteritems(): for external_spec, path in iteritems(pkg_paths):
if not path: if not path:
# skip entries without paths (avoid creating extra Specs) # skip entries without paths (avoid creating extra Specs)
continue continue
@ -242,7 +244,7 @@ def spec_externals(spec):
if external_spec.satisfies(spec): if external_spec.satisfies(spec):
external_specs.append(external_spec) external_specs.append(external_spec)
for external_spec, module in pkg_modules.iteritems(): for external_spec, module in iteritems(pkg_modules):
if not module: if not module:
continue continue

View file

@ -45,15 +45,15 @@ def compile_c_and_execute(source_file, include_flags, link_flags):
def compare_output(current_output, blessed_output): def compare_output(current_output, blessed_output):
"""Compare blessed and current output of executables.""" """Compare blessed and current output of executables."""
if not (current_output == blessed_output): if not (current_output == blessed_output):
print "Produced output does not match expected output." print("Produced output does not match expected output.")
print "Expected output:" print("Expected output:")
print '-' * 80 print('-' * 80)
print blessed_output print(blessed_output)
print '-' * 80 print('-' * 80)
print "Produced output:" print("Produced output:")
print '-' * 80 print('-' * 80)
print current_output print(current_output)
print '-' * 80 print('-' * 80)
raise RuntimeError("Ouput check failed.", raise RuntimeError("Ouput check failed.",
"See spack_output.log for details") "See spack_output.log for details")

View file

@ -25,6 +25,8 @@
import re import re
import shlex import shlex
import itertools import itertools
from six import string_types
import spack.error import spack.error
@ -118,7 +120,7 @@ def __init__(self, lexer):
def gettok(self): def gettok(self):
"""Puts the next token in the input stream into self.next.""" """Puts the next token in the input stream into self.next."""
try: try:
self.next = self.tokens.next() self.next = next(self.tokens)
except StopIteration: except StopIteration:
self.next = None self.next = None
@ -159,7 +161,7 @@ def expect(self, id):
sys.exit(1) sys.exit(1)
def setup(self, text): def setup(self, text):
if isinstance(text, basestring): if isinstance(text, string_types):
text = shlex.split(text) text = shlex.split(text)
self.text = text self.text = text
self.push_tokens(self.lexer.lex(text)) self.push_tokens(self.lexer.lex(text))

View file

@ -26,6 +26,7 @@
The ``virtual`` module contains utility classes for virtual dependencies. The ``virtual`` module contains utility classes for virtual dependencies.
""" """
from itertools import product as iproduct from itertools import product as iproduct
from six import iteritems
from pprint import pformat from pprint import pformat
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
@ -97,7 +98,7 @@ def update(self, spec):
assert(not spec.virtual) assert(not spec.virtual)
pkg = spec.package pkg = spec.package
for provided_spec, provider_specs in pkg.provided.iteritems(): for provided_spec, provider_specs in iteritems(pkg.provided):
for provider_spec in provider_specs: for provider_spec in provider_specs:
# TODO: fix this comment. # TODO: fix this comment.
# We want satisfaction other than flags # We want satisfaction other than flags
@ -201,7 +202,7 @@ def to_yaml(self, stream=None):
def from_yaml(stream): def from_yaml(stream):
try: try:
yfile = syaml.load(stream) yfile = syaml.load(stream)
except MarkedYAMLError, e: except MarkedYAMLError as e:
raise spack.spec.SpackYAMLError( raise spack.spec.SpackYAMLError(
"error parsing YAML ProviderIndex cache:", str(e)) "error parsing YAML ProviderIndex cache:", str(e))
@ -288,7 +289,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
""" """
def mapiter(mappings): def mapiter(mappings):
if isinstance(mappings, dict): if isinstance(mappings, dict):
return mappings.iteritems() return iteritems(mappings)
else: else:
return iter(mappings) return iter(mappings)

View file

@ -26,7 +26,6 @@
import stat import stat
import shutil import shutil
import errno import errno
import exceptions
import sys import sys
import inspect import inspect
import imp import imp
@ -558,7 +557,7 @@ def _read_config(self):
return yaml_data['repo'] return yaml_data['repo']
except exceptions.IOError: except IOError:
tty.die("Error reading %s when opening %s" tty.die("Error reading %s when opening %s"
% (self.config_file, self.root)) % (self.config_file, self.root))

View file

@ -97,13 +97,14 @@
""" """
import base64 import base64
import collections import collections
import csv
import ctypes import ctypes
import hashlib import hashlib
import itertools import itertools
from operator import attrgetter from operator import attrgetter
from six import StringIO
from six import string_types
from six import iteritems
import cStringIO
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import spack.architecture import spack.architecture
@ -113,7 +114,7 @@
import spack.store import spack.store
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
from cStringIO import StringIO
from llnl.util.filesystem import find_libraries from llnl.util.filesystem import find_libraries
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.tty.color import * from llnl.util.tty.color import *
@ -222,7 +223,7 @@ def canonical_deptype(deptype):
if deptype is None: if deptype is None:
return alldeps return alldeps
elif isinstance(deptype, str): elif isinstance(deptype, string_types):
return special_types.get(deptype, (deptype,)) return special_types.get(deptype, (deptype,))
elif isinstance(deptype, (tuple, list)): elif isinstance(deptype, (tuple, list)):
@ -270,7 +271,7 @@ def __init__(self, *args):
spec_like = args[0] spec_like = args[0]
if isinstance(spec_like, ArchSpec): if isinstance(spec_like, ArchSpec):
self._dup(spec_like) self._dup(spec_like)
elif isinstance(spec_like, basestring): elif isinstance(spec_like, string_types):
spec_fields = spec_like.split("-") spec_fields = spec_like.split("-")
if len(spec_fields) == 3: if len(spec_fields) == 3:
@ -391,7 +392,7 @@ def constrain(self, other):
raise UnsatisfiableArchitectureSpecError(self, other) raise UnsatisfiableArchitectureSpecError(self, other)
constrained = False constrained = False
for attr, svalue in self.to_cmp_dict().iteritems(): for attr, svalue in iteritems(self.to_cmp_dict()):
ovalue = getattr(other, attr) ovalue = getattr(other, attr)
if svalue is None and ovalue is not None: if svalue is None and ovalue is not None:
setattr(self, attr, ovalue) setattr(self, attr, ovalue)
@ -406,7 +407,7 @@ def copy(self):
@property @property
def concrete(self): def concrete(self):
return all(v for k, v in self.to_cmp_dict().iteritems()) return all(v for k, v in iteritems(self.to_cmp_dict()))
def to_cmp_dict(self): def to_cmp_dict(self):
"""Returns a dictionary that can be used for field comparison.""" """Returns a dictionary that can be used for field comparison."""
@ -464,7 +465,7 @@ def __init__(self, *args):
arg = args[0] arg = args[0]
# If there is one argument, it's either another CompilerSpec # If there is one argument, it's either another CompilerSpec
# to copy or a string to parse # to copy or a string to parse
if isinstance(arg, basestring): if isinstance(arg, string_types):
c = SpecParser().parse_compiler(arg) c = SpecParser().parse_compiler(arg)
self.name = c.name self.name = c.name
self.versions = c.versions self.versions = c.versions
@ -728,7 +729,7 @@ def copy(self):
return clone return clone
def _cmp_key(self): def _cmp_key(self):
return tuple((k, tuple(v)) for k, v in sorted(self.iteritems())) return tuple((k, tuple(v)) for k, v in sorted(iteritems(self)))
def __str__(self): def __str__(self):
sorted_keys = filter( sorted_keys = filter(
@ -918,7 +919,7 @@ def __init__(self, spec_like, *dep_like, **kwargs):
return return
# Parse if the spec_like is a string. # Parse if the spec_like is a string.
if not isinstance(spec_like, basestring): if not isinstance(spec_like, string_types):
raise TypeError("Can't make spec out of %s" % type(spec_like)) raise TypeError("Can't make spec out of %s" % type(spec_like))
spec_list = SpecParser().parse(spec_like) spec_list = SpecParser().parse(spec_like)
@ -1018,9 +1019,9 @@ def _add_variant(self, name, value):
if name in self.variants: if name in self.variants:
raise DuplicateVariantError( raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name) "Cannot specify variant '%s' twice" % name)
if isinstance(value, basestring) and value.upper() == 'TRUE': if isinstance(value, string_types) and value.upper() == 'TRUE':
value = True value = True
elif isinstance(value, basestring) and value.upper() == 'FALSE': elif isinstance(value, string_types) and value.upper() == 'FALSE':
value = False value = False
self.variants[name] = VariantSpec(name, value) self.variants[name] = VariantSpec(name, value)
@ -1056,7 +1057,7 @@ def _set_architecture(self, **kwargs):
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs) new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(*new_vals) self.architecture = ArchSpec(*new_vals)
else: else:
new_attrvals = [(a, v) for a, v in kwargs.iteritems() new_attrvals = [(a, v) for a, v in iteritems(kwargs)
if a in arch_attrs] if a in arch_attrs]
for new_attr, new_value in new_attrvals: for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr): if getattr(self.architecture, new_attr):
@ -1219,7 +1220,7 @@ def traverse_edges(self, visited=None, d=0, deptype=None,
# get initial values for kwargs # get initial values for kwargs
depth = kwargs.get('depth', False) depth = kwargs.get('depth', False)
key_fun = kwargs.get('key', id) key_fun = kwargs.get('key', id)
if isinstance(key_fun, basestring): if isinstance(key_fun, string_types):
key_fun = attrgetter(key_fun) key_fun = attrgetter(key_fun)
yield_root = kwargs.get('root', True) yield_root = kwargs.get('root', True)
cover = kwargs.get('cover', 'nodes') cover = kwargs.get('cover', 'nodes')
@ -1314,7 +1315,7 @@ def dag_hash(self, length=None):
else: else:
yaml_text = syaml.dump( yaml_text = syaml.dump(
self.to_node_dict(), default_flow_style=True, width=maxint) self.to_node_dict(), default_flow_style=True, width=maxint)
sha = hashlib.sha1(yaml_text) sha = hashlib.sha1(yaml_text.encode('utf-8'))
b32_hash = base64.b32encode(sha.digest()).lower() b32_hash = base64.b32encode(sha.digest()).lower()
if self.concrete: if self.concrete:
self._hash = b32_hash self._hash = b32_hash
@ -1421,7 +1422,7 @@ def read_yaml_dep_specs(dependency_dict):
formats so that reindex will work on old specs/databases. formats so that reindex will work on old specs/databases.
""" """
for dep_name, elt in dependency_dict.items(): for dep_name, elt in dependency_dict.items():
if isinstance(elt, basestring): if isinstance(elt, string_types):
# original format, elt is just the dependency hash. # original format, elt is just the dependency hash.
dag_hash, deptypes = elt, ['build', 'link'] dag_hash, deptypes = elt, ['build', 'link']
elif isinstance(elt, tuple): elif isinstance(elt, tuple):
@ -2413,11 +2414,8 @@ def __getitem__(self, name):
if query_parameters: if query_parameters:
# We have extra query parameters, which are comma separated # We have extra query parameters, which are comma separated
# values # values
f = cStringIO.StringIO(query_parameters.pop()) csv = query_parameters.pop().strip()
try: query_parameters = re.split(r'\s*,\s*', csv)
query_parameters = next(csv.reader(f, skipinitialspace=True))
except StopIteration:
query_parameters = ['']
try: try:
value = next( value = next(

View file

@ -29,18 +29,19 @@
import shutil import shutil
import tempfile import tempfile
import getpass import getpass
from urlparse import urljoin from six import string_types
from six import iteritems
from six.moves.urllib.parse import urljoin
import llnl.util.tty as tty import llnl.util.tty as tty
import llnl.util.lock import llnl.util.lock
from llnl.util.filesystem import * from llnl.util.filesystem import *
import spack.util.pattern as pattern
import spack import spack
import spack.config import spack.config
import spack.fetch_strategy as fs
import spack.error import spack.error
import spack.fetch_strategy as fs
import spack.util.pattern as pattern
from spack.version import * from spack.version import *
from spack.util.path import canonicalize_path from spack.util.path import canonicalize_path
from spack.util.crypto import prefix_bits, bit_length from spack.util.crypto import prefix_bits, bit_length
@ -84,7 +85,7 @@ def get_tmp_root():
if _tmp_root is None: if _tmp_root is None:
config = spack.config.get_config('config') config = spack.config.get_config('config')
candidates = config['build_stage'] candidates = config['build_stage']
if isinstance(candidates, basestring): if isinstance(candidates, string_types):
candidates = [candidates] candidates = [candidates]
path = _first_accessible_path(candidates) path = _first_accessible_path(candidates)
@ -188,7 +189,7 @@ def __init__(
""" """
# TODO: fetch/stage coupling needs to be reworked -- the logic # TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough. # TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, basestring): if isinstance(url_or_fetch_strategy, string_types):
self.fetcher = fs.from_url(url_or_fetch_strategy) self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy self.fetcher = url_or_fetch_strategy
@ -548,7 +549,7 @@ def expand_archive(self):
if not isinstance(placement, dict): if not isinstance(placement, dict):
placement = {'': placement} placement = {'': placement}
# Make the paths in the dictionary absolute and link # Make the paths in the dictionary absolute and link
for key, value in placement.iteritems(): for key, value in iteritems(placement):
target_path = join_path( target_path = join_path(
root_stage.source_path, resource.destination) root_stage.source_path, resource.destination)
destination_path = join_path(target_path, value) destination_path = join_path(target_path, value)
@ -661,7 +662,7 @@ def cache_local(self):
def _get_mirrors(): def _get_mirrors():
"""Get mirrors from spack configuration.""" """Get mirrors from spack configuration."""
config = spack.config.get_config('mirrors') config = spack.config.get_config('mirrors')
return [val for name, val in config.iteritems()] return [val for name, val in iteritems(config)]
def ensure_access(file=spack.stage_path): def ensure_access(file=spack.stage_path):

View file

@ -22,19 +22,19 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import StringIO
import argparse import argparse
import codecs import codecs
import collections import collections
import contextlib import contextlib
import unittest import unittest
from six import StringIO
import llnl.util.filesystem import llnl.util.filesystem
import spack import spack
import spack.cmd import spack.cmd
import spack.cmd.install as install import spack.cmd.install as install
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) FILE_REGISTRY = collections.defaultdict(StringIO)
# Monkey-patch open to write module files to a StringIO instance # Monkey-patch open to write module files to a StringIO instance
@ -44,7 +44,7 @@ def mock_open(filename, mode, *args):
message = 'test.test_install : unexpected opening mode for mock_open' message = 'test.test_install : unexpected opening mode for mock_open'
raise RuntimeError(message) raise RuntimeError(message)
FILE_REGISTRY[filename] = StringIO.StringIO() FILE_REGISTRY[filename] = StringIO()
try: try:
yield FILE_REGISTRY[filename] yield FILE_REGISTRY[filename]

View file

@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import pytest import pytest
from six import iteritems
import spack.spec import spack.spec
import spack.compilers as compilers import spack.compilers as compilers
@ -38,11 +39,11 @@ def test_get_compiler_duplicates(self):
cfg_file_to_duplicates = compilers.get_compiler_duplicates( cfg_file_to_duplicates = compilers.get_compiler_duplicates(
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon')) 'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
assert len(cfg_file_to_duplicates) == 1 assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = cfg_file_to_duplicates.iteritems().next() cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
assert len(duplicates) == 1 assert len(duplicates) == 1
def test_all_compilers(self): def test_all_compilers(self):
all_compilers = compilers.all_compilers() all_compilers = compilers.all_compilers()
filtered = list(x for x in all_compilers if str(x.spec) == 'clang@3.3') filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
filtered = list(x for x in filtered if x.operating_system == 'SuSE11') filtered = [x for x in filtered if x.operating_system == 'SuSE11']
assert len(filtered) == 1 assert len(filtered) == 1

View file

@ -27,11 +27,12 @@
import os import os
import re import re
import shutil import shutil
from six import StringIO
import cStringIO
import llnl.util.filesystem import llnl.util.filesystem
import llnl.util.lang import llnl.util.lang
import ordereddict_backport import ordereddict_backport
import py import py
import pytest import pytest
import spack import spack
@ -56,11 +57,8 @@ def no_stdin_duplication(monkeypatch):
"""Duplicating stdin (or any other stream) returns an empty """Duplicating stdin (or any other stream) returns an empty
cStringIO object. cStringIO object.
""" """
monkeypatch.setattr( monkeypatch.setattr(llnl.util.lang, 'duplicate_stream',
llnl.util.lang, lambda x: StringIO())
'duplicate_stream',
lambda x: cStringIO.StringIO()
)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -181,6 +179,7 @@ def config(configuration_dir):
spack.config.clear_config_caches() spack.config.clear_config_caches()
@pytest.fixture(scope='module') @pytest.fixture(scope='module')
def database(tmpdir_factory, builtin_mock, config): def database(tmpdir_factory, builtin_mock, config):
"""Creates a mock database with some packages installed note that """Creates a mock database with some packages installed note that
@ -312,7 +311,7 @@ def mock_archive():
"\ttouch $prefix/dummy_file\n" "\ttouch $prefix/dummy_file\n"
"EOF\n" "EOF\n"
) )
os.chmod(configure_path, 0755) os.chmod(configure_path, 0o755)
# Archive it # Archive it
current = tmpdir.chdir() current = tmpdir.chdir()
archive_name = '{0}.tar.gz'.format(repo_name) archive_name = '{0}.tar.gz'.format(repo_name)

View file

@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from StringIO import StringIO from six import StringIO
from spack.spec import Spec from spack.spec import Spec
from spack.graph import AsciiGraph, topological_sort, graph_dot from spack.graph import AsciiGraph, topological_sort, graph_dot

View file

@ -283,7 +283,7 @@ def test_upgrade_read_to_write_fails_with_readonly_file(self):
# ensure lock file exists the first time, so we open it read-only # ensure lock file exists the first time, so we open it read-only
# to begin wtih. # to begin wtih.
touch(self.lock_path) touch(self.lock_path)
os.chmod(self.lock_path, 0444) os.chmod(self.lock_path, 0o444)
lock = Lock(self.lock_path) lock = Lock(self.lock_path)
self.assertTrue(lock._reads == 0) self.assertTrue(lock._reads == 0)

View file

@ -46,7 +46,7 @@ def setUp(self):
with open(make_exe, 'w') as f: with open(make_exe, 'w') as f:
f.write('#!/bin/sh\n') f.write('#!/bin/sh\n')
f.write('echo "$@"') f.write('echo "$@"')
os.chmod(make_exe, 0700) os.chmod(make_exe, 0o700)
path_put_first('PATH', [self.tmpdir]) path_put_first('PATH', [self.tmpdir])

View file

@ -24,14 +24,14 @@
############################################################################## ##############################################################################
import collections import collections
import contextlib import contextlib
from six import StringIO
import cStringIO
import pytest import pytest
import spack.modules import spack.modules
import spack.spec import spack.spec
# Our "filesystem" for the tests below # Our "filesystem" for the tests below
FILE_REGISTRY = collections.defaultdict(cStringIO.StringIO) FILE_REGISTRY = collections.defaultdict(StringIO)
# Spec strings that will be used throughout the tests # Spec strings that will be used throughout the tests
mpich_spec_string = 'mpich@3.0.4' mpich_spec_string = 'mpich@3.0.4'
mpileaks_spec_string = 'mpileaks' mpileaks_spec_string = 'mpileaks'
@ -48,7 +48,7 @@ def _mock(filename, mode):
if not mode == 'w': if not mode == 'w':
raise RuntimeError('unexpected opening mode for stringio_open') raise RuntimeError('unexpected opening mode for stringio_open')
FILE_REGISTRY[filename] = cStringIO.StringIO() FILE_REGISTRY[filename] = StringIO()
try: try:
yield FILE_REGISTRY[filename] yield FILE_REGISTRY[filename]

View file

@ -86,7 +86,7 @@ def test_default_works(builtin_mock):
def test_target_match(builtin_mock): def test_target_match(builtin_mock):
platform = spack.architecture.platform() platform = spack.architecture.platform()
targets = platform.targets.values() targets = list(platform.targets.values())
for target in targets[:-1]: for target in targets[:-1]:
pkg = spack.repo.get('multimethod target=' + target.name) pkg = spack.repo.get('multimethod target=' + target.name)
assert pkg.different_by_target() == target.name assert pkg.different_by_target() == target.name

View file

@ -86,6 +86,7 @@ class CompositeFromInterface:
composite.append(self.Two()) composite.append(self.Two())
composite.add() composite.add()
self.assertEqual(self.Base.counter, 3) self.assertEqual(self.Base.counter, 3)
composite.pop() composite.pop()
composite.subtract() composite.subtract()
self.assertEqual(self.Base.counter, 2) self.assertEqual(self.Base.counter, 2)

View file

@ -37,7 +37,8 @@
mpi@:10.0: set([zmpi])}, mpi@:10.0: set([zmpi])},
'stuff': {stuff: set([externalvirtual])}} 'stuff': {stuff: set([externalvirtual])}}
""" """
import StringIO from six import StringIO
import spack import spack
from spack.provider_index import ProviderIndex from spack.provider_index import ProviderIndex
from spack.spec import Spec from spack.spec import Spec
@ -46,10 +47,10 @@
def test_yaml_round_trip(builtin_mock): def test_yaml_round_trip(builtin_mock):
p = ProviderIndex(spack.repo.all_package_names()) p = ProviderIndex(spack.repo.all_package_names())
ostream = StringIO.StringIO() ostream = StringIO()
p.to_yaml(ostream) p.to_yaml(ostream)
istream = StringIO.StringIO(ostream.getvalue()) istream = StringIO(ostream.getvalue())
q = ProviderIndex.from_yaml(istream) q = ProviderIndex.from_yaml(istream)
assert p == q assert p == q

View file

@ -31,6 +31,8 @@
default version. Once those go away, we can likely drop 2.6 and increase default version. Once those go away, we can likely drop 2.6 and increase
the minimum supported Python 3 version, as well. the minimum supported Python 3 version, as well.
""" """
from __future__ import print_function
import os import os
import sys import sys
import re import re

View file

@ -90,7 +90,7 @@ def test_preorder_node_traversal(self):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'zmpi', 'fake'] 'zmpi', 'fake']
pairs = zip([0, 1, 2, 3, 4, 2, 3], names) pairs = list(zip([0, 1, 2, 3, 4, 2, 3], names))
traversal = dag.traverse() traversal = dag.traverse()
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names
@ -104,7 +104,7 @@ def test_preorder_edge_traversal(self):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi'] 'libelf', 'zmpi', 'fake', 'zmpi']
pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names) pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names))
traversal = dag.traverse(cover='edges') traversal = dag.traverse(cover='edges')
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names
@ -118,7 +118,7 @@ def test_preorder_path_traversal(self):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi', 'fake'] 'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names) pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names))
traversal = dag.traverse(cover='paths') traversal = dag.traverse(cover='paths')
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names
@ -132,7 +132,7 @@ def test_postorder_node_traversal(self):
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi', names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
'callpath', 'mpileaks'] 'callpath', 'mpileaks']
pairs = zip([4, 3, 2, 3, 2, 1, 0], names) pairs = list(zip([4, 3, 2, 3, 2, 1, 0], names))
traversal = dag.traverse(order='post') traversal = dag.traverse(order='post')
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names
@ -146,7 +146,7 @@ def test_postorder_edge_traversal(self):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'zmpi', 'mpileaks'] 'callpath', 'zmpi', 'mpileaks']
pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names) pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names))
traversal = dag.traverse(cover='edges', order='post') traversal = dag.traverse(cover='edges', order='post')
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names
@ -160,7 +160,7 @@ def test_postorder_path_traversal(self):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'fake', 'zmpi', 'mpileaks'] 'callpath', 'fake', 'zmpi', 'mpileaks']
pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names) pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names))
traversal = dag.traverse(cover='paths', order='post') traversal = dag.traverse(cover='paths', order='post')
assert [x.name for x in traversal] == names assert [x.name for x in traversal] == names

View file

@ -46,8 +46,8 @@
""" """
import os import os
import re import re
from StringIO import StringIO from six import StringIO
from urlparse import urlsplit, urlunsplit from six.moves.urllib.parse import urlsplit, urlunsplit
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.color import * from llnl.util.tty.color import *
@ -486,7 +486,7 @@ def substitution_offsets(path):
name_offsets = offsets[1::2] name_offsets = offsets[1::2]
ver_offsets = [] ver_offsets = []
for i in xrange(0, len(name_parts), 2): for i in range(0, len(name_parts), 2):
vparts = re.split(ver, name_parts[i]) vparts = re.split(ver, name_parts[i])
voffsets = cumsum(vparts, offsets[i], len) voffsets = cumsum(vparts, offsets[i], len)
ver_offsets.extend(voffsets[1::2]) ver_offsets.extend(voffsets[1::2])

View file

@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
import re import re
import subprocess import subprocess
from six import string_types
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
@ -129,7 +129,7 @@ def __call__(self, *args, **kwargs):
raise ValueError("Cannot use `str` as input stream.") raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode): def streamify(arg, mode):
if isinstance(arg, basestring): if isinstance(arg, string_types):
return open(arg, mode), True return open(arg, mode), True
elif arg is str: elif arg is str:
return subprocess.PIPE, False return subprocess.PIPE, False

View file

@ -28,7 +28,6 @@
to pickle functions if they're passed indirectly as parameters. to pickle functions if they're passed indirectly as parameters.
""" """
from multiprocessing import Process, Pipe, Semaphore, Value from multiprocessing import Process, Pipe, Semaphore, Value
from itertools import izip
__all__ = ['spawn', 'parmap', 'Barrier'] __all__ = ['spawn', 'parmap', 'Barrier']
@ -43,7 +42,7 @@ def fun(pipe, x):
def parmap(f, X): def parmap(f, X):
pipe = [Pipe() for x in X] pipe = [Pipe() for x in X]
proc = [Process(target=spawn(f), args=(c, x)) proc = [Process(target=spawn(f), args=(c, x))
for x, (p, c) in izip(X, pipe)] for x, (p, c) in zip(X, pipe)]
[p.start() for p in proc] [p.start() for p in proc]
[p.join() for p in proc] [p.join() for p in proc]
return [p.recv() for (p, c) in pipe] return [p.recv() for (p, c) in pipe]

View file

@ -27,7 +27,7 @@
import string import string
import itertools import itertools
import re import re
from StringIO import StringIO from six import StringIO
import spack import spack

View file

@ -61,7 +61,7 @@ def cls_decorator(cls):
# Retrieve the base class of the composite. Inspect its methods and # Retrieve the base class of the composite. Inspect its methods and
# decide which ones will be overridden # decide which ones will be overridden
def no_special_no_private(x): def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_') return callable(x) and not x.__name__.startswith('_')
# Patch the behavior of each of the methods in the previous list. # Patch the behavior of each of the methods in the previous list.
# This is done associating an instance of the descriptor below to # This is done associating an instance of the descriptor below to
@ -90,42 +90,25 @@ def getter(*args, **kwargs):
return getter return getter
dictionary_for_type_call = {} dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name # Construct a dictionary with the methods explicitly passed as name
if method_list is not None: if method_list is not None:
# python@2.7: method_list_dict = {name: IterateOver(name) for name dictionary_for_type_call.update(
# in method_list} (name, IterateOver(name)) for name in method_list)
method_list_dict = {}
for name in method_list:
method_list_dict[name] = IterateOver(name)
dictionary_for_type_call.update(method_list_dict)
# Construct a dictionary with the methods inspected from the interface # Construct a dictionary with the methods inspected from the interface
if interface is not None: if interface is not None:
########## dictionary_for_type_call.update(
# python@2.7: interface_methods = {name: method for name, method in (name, IterateOver(name, method))
# inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {}
for name, method in inspect.getmembers( for name, method in inspect.getmembers(
interface, predicate=no_special_no_private): interface, predicate=no_special_no_private))
interface_methods[name] = method
##########
# python@2.7: interface_methods_dict = {name: IterateOver(name,
# method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {}
for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method)
##########
dictionary_for_type_call.update(interface_methods_dict)
# Get the methods that are defined in the scope of the composite # Get the methods that are defined in the scope of the composite
# class and override any previous definition # class and override any previous definition
########## dictionary_for_type_call.update(
# python@2.7: cls_method = {name: method for name, method in (name, method) for name, method in inspect.getmembers(
# inspect.getmembers(cls, predicate=inspect.ismethod)} cls, predicate=inspect.ismethod))
cls_method = {}
for name, method in inspect.getmembers(
cls, predicate=inspect.ismethod):
cls_method[name] = method
##########
dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it # Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes? # FIXME : inherit from interface if we start to use ABC classes?
wrapper_class = type(cls.__name__, (cls, container), wrapper_class = type(cls.__name__, (cls, container),

View file

@ -35,11 +35,11 @@ class Prefix(str):
For example, you can do something like this:: For example, you can do something like this::
prefix = Prefix('/usr') prefix = Prefix('/usr')
print prefix.lib print(prefix.lib)
print prefix.lib64 print(prefix.lib64)
print prefix.bin print(prefix.bin)
print prefix.share print(prefix.share)
print prefix.man4 print(prefix.man4)
This program would print: This program would print:
@ -52,7 +52,7 @@ class Prefix(str):
Prefix objects behave identically to strings. In fact, they Prefix objects behave identically to strings. In fact, they
subclass str. So operators like + are legal: subclass str. So operators like + are legal:
print "foobar " + prefix print("foobar " + prefix)
This prints 'foobar /usr". All of this is meant to make custom This prints 'foobar /usr". All of this is meant to make custom
installs easy. installs easy.

View file

@ -24,6 +24,9 @@
############################################################################## ##############################################################################
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """ """Simple wrapper around JSON to guarantee consistent use of load/dump. """
import json import json
from six import string_types
from six import iteritems
import spack.error import spack.error
__all__ = ['load', 'dump', 'SpackJSONError'] __all__ = ['load', 'dump', 'SpackJSONError']
@ -36,7 +39,7 @@
def load(stream): def load(stream):
"""Spack JSON needs to be ordered to support specs.""" """Spack JSON needs to be ordered to support specs."""
if isinstance(stream, basestring): if isinstance(stream, string_types):
return _byteify(json.loads(stream, object_hook=_byteify), return _byteify(json.loads(stream, object_hook=_byteify),
ignore_dicts=True) ignore_dicts=True)
else: else:
@ -64,7 +67,7 @@ def _byteify(data, ignore_dicts=False):
if isinstance(data, dict) and not ignore_dicts: if isinstance(data, dict) and not ignore_dicts:
return dict((_byteify(key, ignore_dicts=True), return dict((_byteify(key, ignore_dicts=True),
_byteify(value, ignore_dicts=True)) for key, value in _byteify(value, ignore_dicts=True)) for key, value in
data.iteritems()) iteritems(data))
# if it's anything else, return it in its original form # if it's anything else, return it in its original form
return data return data

View file

@ -137,7 +137,7 @@ def construct_mapping(self, node, deep=False):
key = self.construct_object(key_node, deep=deep) key = self.construct_object(key_node, deep=deep)
try: try:
hash(key) hash(key)
except TypeError, exc: except TypeError as exc:
raise ConstructorError( raise ConstructorError(
"while constructing a mapping", node.start_mark, "while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark) "found unacceptable key (%s)" % exc, key_node.start_mark)

View file

@ -25,10 +25,20 @@
import re import re
import os import os
import sys import sys
import urllib2
import urlparse from six.moves.urllib.request import urlopen, Request
from six.moves.urllib.error import URLError
from multiprocessing import Pool from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
try:
# Python 2 had these in the HTMLParser package.
from HTMLParser import HTMLParser, HTMLParseError
except ImportError:
# In Python 3, things moved to html.parser
from html.parser import HTMLParser
# Also, HTMLParseError is deprecated and never raised.
class HTMLParseError:
pass
import llnl.util.tty as tty import llnl.util.tty as tty
@ -80,9 +90,9 @@ def _spider(args):
# It would be nice to do this with the HTTP Accept header to avoid # It would be nice to do this with the HTTP Accept header to avoid
# one round-trip. However, most servers seem to ignore the header # one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html. # if you ask for a tarball with Accept: text/html.
req = urllib2.Request(url) req = Request(url)
req.get_method = lambda: "HEAD" req.get_method = lambda: "HEAD"
resp = urllib2.urlopen(req, timeout=TIMEOUT) resp = urlopen(req, timeout=TIMEOUT)
if "Content-type" not in resp.headers: if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url) tty.debug("ignoring page " + url)
@ -95,7 +105,7 @@ def _spider(args):
# Do the real GET request when we know it's just HTML. # Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET" req.get_method = lambda: "GET"
response = urllib2.urlopen(req, timeout=TIMEOUT) response = urlopen(req, timeout=TIMEOUT)
response_url = response.geturl() response_url = response.geturl()
# Read the page and and stick it in the map we'll return # Read the page and and stick it in the map we'll return
@ -142,7 +152,7 @@ def _spider(args):
pool.terminate() pool.terminate()
pool.join() pool.join()
except urllib2.URLError as e: except URLError as e:
tty.debug(e) tty.debug(e)
if raise_on_error: if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url) raise spack.error.NoNetworkConnectionError(str(e), url)

View file

@ -47,6 +47,7 @@
import numbers import numbers
from bisect import bisect_left from bisect import bisect_left
from functools import wraps from functools import wraps
from six import string_types
from functools_backport import total_ordering from functools_backport import total_ordering
from spack.util.spack_yaml import syaml_dict from spack.util.spack_yaml import syaml_dict
@ -216,7 +217,7 @@ def a_or_n(seg):
segments = [a_or_n(seg) for seg in version] segments = [a_or_n(seg) for seg in version]
wc = segments[0] wc = segments[0]
for i in xrange(1, len(separators)): for i in range(1, len(separators)):
wc += '(?:' + separators[i] + segments[i] wc += '(?:' + separators[i] + segments[i]
# Add possible alpha or beta indicator at the end of each segemnt # Add possible alpha or beta indicator at the end of each segemnt
@ -229,18 +230,24 @@ def __iter__(self):
def __getitem__(self, idx): def __getitem__(self, idx):
cls = type(self) cls = type(self)
if isinstance(idx, numbers.Integral): if isinstance(idx, numbers.Integral):
return self.version[idx] return self.version[idx]
elif isinstance(idx, slice): elif isinstance(idx, slice):
# Currently len(self.separators) == len(self.version) - 1 # Currently len(self.separators) == len(self.version) - 1
extendend_separators = self.separators + ('',) extendend_separators = self.separators + ('',)
string_arg = [] string_arg = []
for token, sep in zip(self.version, extendend_separators)[idx]:
pairs = zip(self.version[idx], extendend_separators[idx])
for token, sep in pairs:
string_arg.append(str(token)) string_arg.append(str(token))
string_arg.append(str(sep)) string_arg.append(str(sep))
string_arg.pop() # We don't need the last separator string_arg.pop() # We don't need the last separator
string_arg = ''.join(string_arg) string_arg = ''.join(string_arg)
return cls(string_arg) return cls(string_arg)
message = '{cls.__name__} indices must be integers' message = '{cls.__name__} indices must be integers'
raise TypeError(message.format(cls=cls)) raise TypeError(message.format(cls=cls))
@ -375,9 +382,9 @@ def intersection(self, other):
class VersionRange(object): class VersionRange(object):
def __init__(self, start, end): def __init__(self, start, end):
if isinstance(start, basestring): if isinstance(start, string_types):
start = Version(start) start = Version(start)
if isinstance(end, basestring): if isinstance(end, string_types):
end = Version(end) end = Version(end)
self.start = start self.start = start
@ -568,7 +575,7 @@ class VersionList(object):
def __init__(self, vlist=None): def __init__(self, vlist=None):
self.versions = [] self.versions = []
if vlist is not None: if vlist is not None:
if isinstance(vlist, basestring): if isinstance(vlist, string_types):
vlist = _string_to_version(vlist) vlist = _string_to_version(vlist)
if type(vlist) == VersionList: if type(vlist) == VersionList:
self.versions = vlist.versions self.versions = vlist.versions
@ -796,7 +803,7 @@ def ver(obj):
""" """
if isinstance(obj, (list, tuple)): if isinstance(obj, (list, tuple)):
return VersionList(obj) return VersionList(obj)
elif isinstance(obj, basestring): elif isinstance(obj, string_types):
return _string_to_version(obj) return _string_to_version(obj)
elif isinstance(obj, (int, float)): elif isinstance(obj, (int, float)):
return _string_to_version(str(obj)) return _string_to_version(str(obj))