Convert Python 2 idioms to Python 2/3-compatible ones.
- convert print, StringIO, except as, octals, izip - convert print statement to print function - convert StringIO to six.StringIO - remove usage of csv reader in Spec, in favor of simple regex - csv reader only does byte strings - convert 0755 octal literals to 0o755 - convert `except Foo, e` to `except Foo as e` - fix a few places `str` is used. - may need to switch everything to str later. - convert iteritems usages to use six.iteritems - fix urllib and HTMLParser - port metaclasses to use six.with_metaclass - More octal literal conversions for Python 2/3 - Fix a new octal literal. - Convert `basestring` to `six.string_types` - Convert xrange -> range - Fix various issues with encoding, iteritems, and Python3 semantics. - Convert contextlib.nested to explicitly nexted context managers. - Convert use of filter() to list comprehensions. - Replace reduce() with list comprehensions. - Clean up composite: replace inspect.ismethod() with callable() - Python 3 doesn't have "method" objects; inspect.ismethod returns False. - Need to use callable in Composite to make it work. - Update colify to use future division. - Fix zip() usages that need to be lists. - Python3: Use line-buffered logging instead of unbuffered. - Python3 raises an error with unbuffered I/O - See https://bugs.python.org/issue17404
This commit is contained in:
parent
0331b08c64
commit
1d1a14dbe9
74 changed files with 396 additions and 323 deletions
|
@ -24,6 +24,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
|
||||
v_info = sys.version_info[:3]
|
||||
|
@ -74,8 +76,8 @@ for pyc_file in orphaned_pyc_files:
|
|||
try:
|
||||
os.remove(pyc_file)
|
||||
except OSError as e:
|
||||
print ("WARNING: Spack may fail mysteriously. "
|
||||
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
|
||||
print("WARNING: Spack may fail mysteriously. "
|
||||
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
|
||||
|
||||
# If there is no working directory, use the spack prefix.
|
||||
try:
|
||||
|
|
|
@ -175,9 +175,9 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
|||
def set_install_permissions(path):
|
||||
"""Set appropriate permissions on the installed file."""
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0755)
|
||||
os.chmod(path, 0o755)
|
||||
else:
|
||||
os.chmod(path, 0644)
|
||||
os.chmod(path, 0o644)
|
||||
|
||||
|
||||
def copy_mode(src, dest):
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
import functools
|
||||
import collections
|
||||
import inspect
|
||||
from six import string_types
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
@ -80,7 +81,7 @@ def index_by(objects, *funcs):
|
|||
return objects
|
||||
|
||||
f = funcs[0]
|
||||
if isinstance(f, basestring):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
|
@ -326,7 +327,7 @@ def match_predicate(*args):
|
|||
"""
|
||||
def match(string):
|
||||
for arg in args:
|
||||
if isinstance(arg, basestring):
|
||||
if isinstance(arg, string_types):
|
||||
if re.search(arg, string):
|
||||
return True
|
||||
elif isinstance(arg, list) or isinstance(arg, tuple):
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
import termios
|
||||
import struct
|
||||
import traceback
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
from llnl.util.tty.color import *
|
||||
|
||||
|
@ -93,7 +93,7 @@ def msg(message, *args, **kwargs):
|
|||
else:
|
||||
cwrite("@*b{%s==>} %s" % (st_text, cescape(message)))
|
||||
for arg in args:
|
||||
print indent + str(arg)
|
||||
print(indent + str(arg))
|
||||
|
||||
|
||||
def info(message, *args, **kwargs):
|
||||
|
@ -201,7 +201,7 @@ def get_yes_or_no(prompt, **kwargs):
|
|||
if not ans:
|
||||
result = default_value
|
||||
if result is None:
|
||||
print "Please enter yes or no."
|
||||
print("Please enter yes or no.")
|
||||
else:
|
||||
if ans == 'y' or ans == 'yes':
|
||||
result = True
|
||||
|
@ -239,7 +239,7 @@ def hline(label=None, **kwargs):
|
|||
out.write(label)
|
||||
out.write(suffix)
|
||||
|
||||
print out.getvalue()
|
||||
print(out.getvalue())
|
||||
|
||||
|
||||
def terminal_size():
|
||||
|
|
|
@ -25,9 +25,11 @@
|
|||
"""
|
||||
Routines for printing columnar output. See colify() for more information.
|
||||
"""
|
||||
from __future__ import division
|
||||
|
||||
import os
|
||||
import sys
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import clen, cextra
|
||||
|
@ -64,18 +66,18 @@ def config_variable_cols(elts, console_width, padding, cols=0):
|
|||
# Get a bound on the most columns we could possibly have.
|
||||
# 'clen' ignores length of ansi color sequences.
|
||||
lengths = [clen(e) for e in elts]
|
||||
max_cols = max(1, console_width / (min(lengths) + padding))
|
||||
max_cols = max(1, console_width // (min(lengths) + padding))
|
||||
max_cols = min(len(elts), max_cols)
|
||||
|
||||
# Range of column counts to try. If forced, use the supplied value.
|
||||
col_range = [cols] if cols else xrange(1, max_cols + 1)
|
||||
col_range = [cols] if cols else range(1, max_cols + 1)
|
||||
|
||||
# Determine the most columns possible for the console width.
|
||||
configs = [ColumnConfig(c) for c in col_range]
|
||||
for i, length in enumerate(lengths):
|
||||
for conf in configs:
|
||||
if conf.valid:
|
||||
col = i / ((len(elts) + conf.cols - 1) / conf.cols)
|
||||
col = i // ((len(elts) + conf.cols - 1) // conf.cols)
|
||||
p = padding if col < (conf.cols - 1) else 0
|
||||
|
||||
if conf.widths[col] < (length + p):
|
||||
|
@ -107,7 +109,7 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
|
|||
# 'clen' ignores length of ansi color sequences.
|
||||
max_len = max(clen(e) for e in elts) + padding
|
||||
if cols == 0:
|
||||
cols = max(1, console_width / max_len)
|
||||
cols = max(1, console_width // max_len)
|
||||
cols = min(len(elts), cols)
|
||||
|
||||
config = ColumnConfig(cols)
|
||||
|
@ -193,12 +195,12 @@ def colify(elts, **options):
|
|||
raise ValueError("method must be one of: " + allowed_methods)
|
||||
|
||||
cols = config.cols
|
||||
rows = (len(elts) + cols - 1) / cols
|
||||
rows = (len(elts) + cols - 1) // cols
|
||||
rows_last_col = len(elts) % rows
|
||||
|
||||
for row in xrange(rows):
|
||||
for row in range(rows):
|
||||
output.write(" " * indent)
|
||||
for col in xrange(cols):
|
||||
for col in range(cols):
|
||||
elt = col * rows + row
|
||||
width = config.widths[col] + cextra(elts[elt])
|
||||
if col < cols - 1:
|
||||
|
@ -233,7 +235,7 @@ def colify_table(table, **options):
|
|||
columns = len(table[0])
|
||||
|
||||
def transpose():
|
||||
for i in xrange(columns):
|
||||
for i in range(columns):
|
||||
for row in table:
|
||||
yield row[i]
|
||||
|
||||
|
|
|
@ -165,8 +165,12 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||
self.p.join(60.0) # 1 minute to join the child
|
||||
|
||||
def _spawn_writing_daemon(self, read, input_stream):
|
||||
# Parent: read from child, skip the with block.
|
||||
read_file = os.fdopen(read, 'r', 0)
|
||||
# This is the Parent: read from child, skip the with block.
|
||||
|
||||
# Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
read_file = os.fdopen(read, 'r', 1)
|
||||
|
||||
with open(self.filename, 'w') as log_file:
|
||||
with keyboard_input(input_stream):
|
||||
while True:
|
||||
|
|
|
@ -96,7 +96,7 @@
|
|||
try:
|
||||
repo = spack.repository.RepoPath()
|
||||
sys.meta_path.append(repo)
|
||||
except spack.error.SpackError, e:
|
||||
except spack.error.SpackError as e:
|
||||
tty.die('while initializing Spack RepoPath:', e.message)
|
||||
|
||||
|
||||
|
|
|
@ -287,7 +287,7 @@ def find_compilers(self, *paths):
|
|||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x, y: x + y, compiler_lists)
|
||||
clist = [comp for cl in compiler_lists for comp in cl]
|
||||
return clist
|
||||
|
||||
def find_compiler(self, cmp_cls, *path):
|
||||
|
|
|
@ -57,6 +57,7 @@
|
|||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
from six import iteritems
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
@ -310,7 +311,7 @@ def set_build_environment_variables(pkg, env, dirty=False):
|
|||
environment = compiler.environment
|
||||
if 'set' in environment:
|
||||
env_to_set = environment['set']
|
||||
for key, value in env_to_set.iteritems():
|
||||
for key, value in iteritems(env_to_set):
|
||||
env.set('SPACK_ENV_SET_%s' % key, value)
|
||||
env.set('%s' % key, value)
|
||||
# Let shell know which variables to set
|
||||
|
@ -322,8 +323,9 @@ def set_build_environment_variables(pkg, env, dirty=False):
|
|||
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
bin_dirs = reversed(filter(os.path.isdir, [
|
||||
'%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')]))
|
||||
bin_dirs = reversed(
|
||||
[d.prefix.bin for d in pkg.spec.dependencies(deptype='build')
|
||||
if os.path.isdir(d.prefix.bin)])
|
||||
bin_dirs = filter_system_bin_paths(bin_dirs)
|
||||
for item in bin_dirs:
|
||||
env.prepend_path('PATH', item)
|
||||
|
|
|
@ -146,7 +146,7 @@ def _do_patch_config_guess(self):
|
|||
if config_guess is not None:
|
||||
try:
|
||||
check_call([config_guess], stdout=PIPE, stderr=PIPE)
|
||||
mod = stat(my_config_guess).st_mode & 0777 | S_IWUSR
|
||||
mod = stat(my_config_guess).st_mode & 0o777 | S_IWUSR
|
||||
os.chmod(my_config_guess, mod)
|
||||
shutil.copyfile(config_guess, my_config_guess)
|
||||
return True
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
@ -186,7 +188,7 @@ def display_specs(specs, **kwargs):
|
|||
# Traverse the index and print out each package
|
||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||
if i > 0:
|
||||
print
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
|
||||
architecture, spack.spec.compiler_color,
|
||||
|
@ -205,7 +207,7 @@ def display_specs(specs, **kwargs):
|
|||
|
||||
for abbrv, spec in zip(abbreviated, specs):
|
||||
prefix = gray_hash(spec, hlen) if hashes else ''
|
||||
print prefix + (format % (abbrv, spec.prefix))
|
||||
print(prefix + (format % (abbrv, spec.prefix)))
|
||||
|
||||
elif mode == 'deps':
|
||||
for spec in specs:
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "print architecture information about this machine"
|
||||
|
@ -36,6 +38,6 @@ def setup_parser(subparser):
|
|||
|
||||
def arch(parser, args):
|
||||
if args.platform:
|
||||
print architecture.platform()
|
||||
print(architecture.platform())
|
||||
else:
|
||||
print architecture.sys_type()
|
||||
print(architecture.sys_type())
|
||||
|
|
|
@ -81,7 +81,7 @@ def _specs(self, **kwargs):
|
|||
_arguments['module_type'] = Args(
|
||||
'-m', '--module-type',
|
||||
choices=spack.modules.module_types.keys(),
|
||||
default=spack.modules.module_types.keys()[0],
|
||||
default=list(spack.modules.module_types.keys())[0],
|
||||
help='type of module files [default: %(default)s]')
|
||||
|
||||
_arguments['yes_to_all'] = Args(
|
||||
|
|
|
@ -22,8 +22,11 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from six import iteritems
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.compilers
|
||||
|
@ -142,36 +145,36 @@ def compiler_info(args):
|
|||
tty.error("No compilers match spec %s" % cspec)
|
||||
else:
|
||||
for c in compilers:
|
||||
print str(c.spec) + ":"
|
||||
print "\tpaths:"
|
||||
print(str(c.spec) + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ['cc', 'cxx', 'f77', 'fc']:
|
||||
print "\t\t%s = %s" % (cpath, getattr(c, cpath, None))
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
if c.flags:
|
||||
print "\tflags:"
|
||||
for flag, flag_value in c.flags.iteritems():
|
||||
print "\t\t%s = %s" % (flag, flag_value)
|
||||
print("\tflags:")
|
||||
for flag, flag_value in iteritems(c.flags):
|
||||
print("\t\t%s = %s" % (flag, flag_value))
|
||||
if len(c.environment) != 0:
|
||||
if len(c.environment['set']) != 0:
|
||||
print "\tenvironment:"
|
||||
print "\t set:"
|
||||
for key, value in c.environment['set'].iteritems():
|
||||
print "\t %s = %s" % (key, value)
|
||||
print("\tenvironment:")
|
||||
print("\t set:")
|
||||
for key, value in iteritems(c.environment['set']):
|
||||
print("\t %s = %s" % (key, value))
|
||||
if c.extra_rpaths:
|
||||
print "\tExtra rpaths:"
|
||||
print("\tExtra rpaths:")
|
||||
for extra_rpath in c.extra_rpaths:
|
||||
print "\t\t%s" % extra_rpath
|
||||
print "\tmodules = %s" % c.modules
|
||||
print "\toperating system = %s" % c.operating_system
|
||||
print("\t\t%s" % extra_rpath)
|
||||
print("\tmodules = %s" % c.modules)
|
||||
print("\toperating system = %s" % c.operating_system)
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
tty.msg("Available compilers")
|
||||
index = index_by(spack.compilers.all_compilers(scope=args.scope),
|
||||
lambda c: (c.spec.name, c.operating_system, c.target))
|
||||
ordered_sections = sorted(index.items(), key=lambda (k, v): k)
|
||||
ordered_sections = sorted(index.items(), key=lambda item: item[0])
|
||||
for i, (key, compilers) in enumerate(ordered_sections):
|
||||
if i >= 1:
|
||||
print
|
||||
print()
|
||||
name, os, target = key
|
||||
os_str = os
|
||||
if target:
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
|
|
@ -50,4 +50,4 @@ def dependents(parser, args):
|
|||
if deps:
|
||||
spack.cmd.display_specs(deps)
|
||||
else:
|
||||
print "No dependents"
|
||||
print("No dependents")
|
||||
|
|
|
@ -22,8 +22,11 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.cmd
|
||||
import spack.build_environment as build_env
|
||||
|
@ -64,7 +67,7 @@ def env(parser, args):
|
|||
if not cmd:
|
||||
# If no command act like the "env" command and print out env vars.
|
||||
for key, val in os.environ.items():
|
||||
print "%s=%s" % (key, val)
|
||||
print("%s=%s" % (key, val))
|
||||
|
||||
else:
|
||||
# Otherwise execute the command with the new environment
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
|
@ -175,12 +177,12 @@ def prefix_relative(path):
|
|||
file_list = changed_files()
|
||||
shutil.copy('.flake8', os.path.join(temp, '.flake8'))
|
||||
|
||||
print '======================================================='
|
||||
print 'flake8: running flake8 code checks on spack.'
|
||||
print
|
||||
print 'Modified files:'
|
||||
print('=======================================================')
|
||||
print('flake8: running flake8 code checks on spack.')
|
||||
print()
|
||||
print('Modified files:')
|
||||
for filename in file_list:
|
||||
print " %s" % filename.strip()
|
||||
print(" %s" % filename.strip())
|
||||
print('=======================================================')
|
||||
|
||||
# filter files into a temporary directory with exemptions added.
|
||||
|
@ -196,7 +198,7 @@ def prefix_relative(path):
|
|||
|
||||
if args.root_relative:
|
||||
# print results relative to repo root.
|
||||
print output
|
||||
print(output)
|
||||
else:
|
||||
# print results relative to current working directory
|
||||
def cwd_relative(path):
|
||||
|
@ -204,16 +206,16 @@ def cwd_relative(path):
|
|||
os.path.join(spack.prefix, path.group(1)), os.getcwd())
|
||||
|
||||
for line in output.split('\n'):
|
||||
print re.sub(r'^(.*): \[', cwd_relative, line)
|
||||
print(re.sub(r'^(.*): \[', cwd_relative, line))
|
||||
|
||||
if flake8.returncode != 0:
|
||||
print "Flake8 found errors."
|
||||
print("Flake8 found errors.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print "Flake8 checks were clean."
|
||||
print("Flake8 checks were clean.")
|
||||
|
||||
finally:
|
||||
if args.keep_temp:
|
||||
print "temporary files are in ", temp
|
||||
print("temporary files are in ", temp)
|
||||
else:
|
||||
shutil.rmtree(temp, ignore_errors=True)
|
||||
|
|
|
@ -22,8 +22,9 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
|
@ -96,5 +97,5 @@ def graph(parser, args):
|
|||
elif specs: # ascii is default: user doesn't need to provide it explicitly
|
||||
graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
|
||||
for spec in specs[1:]:
|
||||
print # extra line bt/w independent graphs
|
||||
print() # extra line bt/w independent graphs
|
||||
graph_ascii(spec, debug=spack.debug)
|
||||
|
|
|
@ -22,7 +22,10 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import textwrap
|
||||
|
||||
from llnl.util.tty.colify import *
|
||||
import spack
|
||||
import spack.fetch_strategy as fs
|
||||
|
@ -50,12 +53,12 @@ def print_text_info(pkg):
|
|||
"""Print out a plain text description of a package."""
|
||||
header = "{0}: ".format(pkg.build_system_class)
|
||||
|
||||
print header, pkg.name
|
||||
print(header, pkg.name)
|
||||
whitespaces = ''.join([' '] * (len(header) - len("Homepage: ")))
|
||||
print "Homepage:", whitespaces, pkg.homepage
|
||||
print("Homepage:", whitespaces, pkg.homepage)
|
||||
|
||||
print
|
||||
print "Safe versions: "
|
||||
print()
|
||||
print("Safe versions: ")
|
||||
|
||||
if not pkg.versions:
|
||||
print(" None")
|
||||
|
@ -63,20 +66,20 @@ def print_text_info(pkg):
|
|||
pad = padder(pkg.versions, 4)
|
||||
for v in reversed(sorted(pkg.versions)):
|
||||
f = fs.for_package_version(pkg, v)
|
||||
print " %s%s" % (pad(v), str(f))
|
||||
print(" %s%s" % (pad(v), str(f)))
|
||||
|
||||
print
|
||||
print "Variants:"
|
||||
print()
|
||||
print("Variants:")
|
||||
if not pkg.variants:
|
||||
print " None"
|
||||
print(" None")
|
||||
else:
|
||||
pad = padder(pkg.variants, 4)
|
||||
|
||||
maxv = max(len(v) for v in sorted(pkg.variants))
|
||||
fmt = "%%-%ss%%-10s%%s" % (maxv + 4)
|
||||
|
||||
print " " + fmt % ('Name', 'Default', 'Description')
|
||||
print
|
||||
print(" " + fmt % ('Name', 'Default', 'Description'))
|
||||
print()
|
||||
for name in sorted(pkg.variants):
|
||||
v = pkg.variants[name]
|
||||
default = 'on' if v.default else 'off'
|
||||
|
@ -85,26 +88,26 @@ def print_text_info(pkg):
|
|||
lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]]
|
||||
desc = "\n".join(lines)
|
||||
|
||||
print " " + fmt % (name, default, desc)
|
||||
print(" " + fmt % (name, default, desc))
|
||||
|
||||
print
|
||||
print "Installation Phases:"
|
||||
print()
|
||||
print("Installation Phases:")
|
||||
phase_str = ''
|
||||
for phase in pkg.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
print phase_str
|
||||
print(phase_str)
|
||||
|
||||
for deptype in ('build', 'link', 'run'):
|
||||
print
|
||||
print "%s Dependencies:" % deptype.capitalize()
|
||||
print()
|
||||
print("%s Dependencies:" % deptype.capitalize())
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
print " None"
|
||||
print(" None")
|
||||
|
||||
print
|
||||
print "Virtual Packages: "
|
||||
print()
|
||||
print("Virtual Packages: ")
|
||||
if pkg.provided:
|
||||
inverse_map = {}
|
||||
for spec, whens in pkg.provided.items():
|
||||
|
@ -113,17 +116,17 @@ def print_text_info(pkg):
|
|||
inverse_map[when] = set()
|
||||
inverse_map[when].add(spec)
|
||||
for when, specs in reversed(sorted(inverse_map.items())):
|
||||
print " %s provides %s" % (
|
||||
when, ', '.join(str(s) for s in specs))
|
||||
print(" %s provides %s" % (
|
||||
when, ', '.join(str(s) for s in specs)))
|
||||
else:
|
||||
print " None"
|
||||
print(" None")
|
||||
|
||||
print
|
||||
print "Description:"
|
||||
print()
|
||||
print("Description:")
|
||||
if pkg.__doc__:
|
||||
print pkg.format_doc(indent=4)
|
||||
print(pkg.format_doc(indent=4))
|
||||
else:
|
||||
print " None"
|
||||
print(" None")
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
|
|
|
@ -22,12 +22,14 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import cgi
|
||||
import fnmatch
|
||||
import re
|
||||
import sys
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
|
@ -123,42 +125,42 @@ def rst_table(elts):
|
|||
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||
|
||||
print('.. _package-list:')
|
||||
print('')
|
||||
print()
|
||||
print('============')
|
||||
print('Package List')
|
||||
print('============')
|
||||
print('')
|
||||
print()
|
||||
print('This is a list of things you can install using Spack. It is')
|
||||
print('automatically generated based on the packages in the latest Spack')
|
||||
print('release.')
|
||||
print('')
|
||||
print()
|
||||
print('Spack currently has %d mainline packages:' % len(pkgs))
|
||||
print('')
|
||||
print()
|
||||
print(rst_table('`%s`_' % p for p in pkg_names))
|
||||
print('')
|
||||
print()
|
||||
|
||||
# Output some text for each package.
|
||||
for pkg in pkgs:
|
||||
print('-----')
|
||||
print('')
|
||||
print()
|
||||
print('.. _%s:' % pkg.name)
|
||||
print('')
|
||||
print()
|
||||
# Must be at least 2 long, breaks for single letter packages like R.
|
||||
print('-' * max(len(pkg.name), 2))
|
||||
print(pkg.name)
|
||||
print('-' * max(len(pkg.name), 2))
|
||||
print('')
|
||||
print()
|
||||
print('Homepage:')
|
||||
print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage))
|
||||
print('')
|
||||
print()
|
||||
print('Spack package:')
|
||||
print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg)))
|
||||
print('')
|
||||
print()
|
||||
if pkg.versions:
|
||||
print('Versions:')
|
||||
print(' ' + ', '.join(str(v) for v in
|
||||
reversed(sorted(pkg.versions))))
|
||||
print('')
|
||||
print()
|
||||
|
||||
for deptype in spack.alldeps:
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
|
@ -166,11 +168,11 @@ def rst_table(elts):
|
|||
print('%s Dependencies' % deptype.capitalize())
|
||||
print(' ' + ', '.join('%s_' % d if d in pkg_names
|
||||
else d for d in deps))
|
||||
print('')
|
||||
print()
|
||||
|
||||
print('Description:')
|
||||
print(pkg.format_doc(indent=2))
|
||||
print('')
|
||||
print()
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
|
|
|
@ -22,8 +22,9 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
|
@ -70,16 +71,16 @@ def setup_parser(subparser):
|
|||
|
||||
def location(parser, args):
|
||||
if args.module_dir:
|
||||
print spack.module_path
|
||||
print(spack.module_path)
|
||||
|
||||
elif args.spack_root:
|
||||
print spack.prefix
|
||||
print(spack.prefix)
|
||||
|
||||
elif args.packages:
|
||||
print spack.repo.first_repo().root
|
||||
print(spack.repo.first_repo().root)
|
||||
|
||||
elif args.stages:
|
||||
print spack.stage_path
|
||||
print(spack.stage_path)
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
@ -91,14 +92,14 @@ def location(parser, args):
|
|||
if args.install_dir:
|
||||
# install_dir command matches against installed specs.
|
||||
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||
print spec.prefix
|
||||
print(spec.prefix)
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
|
||||
if args.package_dir:
|
||||
# This one just needs the spec name.
|
||||
print spack.repo.dirname_for_package_name(spec.name)
|
||||
print(spack.repo.dirname_for_package_name(spec.name))
|
||||
|
||||
else:
|
||||
# These versions need concretized specs.
|
||||
|
@ -106,11 +107,11 @@ def location(parser, args):
|
|||
pkg = spack.repo.get(spec)
|
||||
|
||||
if args.stage_dir:
|
||||
print pkg.stage.path
|
||||
print(pkg.stage.path)
|
||||
|
||||
else: # args.build_dir is the default.
|
||||
if not pkg.stage.source_path:
|
||||
tty.die("Build directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
print pkg.stage.source_path
|
||||
print(pkg.stage.source_path)
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
from urlparse import urlparse
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.util.crypto
|
||||
|
|
|
@ -141,7 +141,7 @@ def mirror_list(args):
|
|||
fmt = "%%-%ds%%s" % (max_len + 4)
|
||||
|
||||
for name in mirrors:
|
||||
print fmt % (name, mirrors[name])
|
||||
print(fmt % (name, mirrors[name]))
|
||||
|
||||
|
||||
def _read_specs_from_file(filename):
|
||||
|
@ -152,7 +152,7 @@ def _read_specs_from_file(filename):
|
|||
s = Spec(string)
|
||||
s.package
|
||||
specs.append(s)
|
||||
except SpackError, e:
|
||||
except SpackError as e:
|
||||
tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
|
||||
">>> " + string, str(e))
|
||||
return specs
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import argparse
|
||||
|
@ -118,13 +120,13 @@ def pkg_diff(args):
|
|||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
|
||||
if u1:
|
||||
print "%s:" % args.rev1
|
||||
print("%s:" % args.rev1)
|
||||
colify(sorted(u1), indent=4)
|
||||
if u1:
|
||||
print
|
||||
print()
|
||||
|
||||
if u2:
|
||||
print "%s:" % args.rev2
|
||||
print("%s:" % args.rev2)
|
||||
colify(sorted(u2), indent=4)
|
||||
|
||||
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@ -161,7 +163,7 @@ def repo_list(args):
|
|||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print fmt % (repo.namespace, repo.root)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
|
|
|
@ -22,8 +22,9 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
@ -69,20 +70,20 @@ def spec(parser, args):
|
|||
# With -y, just print YAML to output.
|
||||
if args.yaml:
|
||||
spec.concretize()
|
||||
print spec.to_yaml()
|
||||
print(spec.to_yaml())
|
||||
continue
|
||||
|
||||
# Print some diagnostic info by default.
|
||||
print "Input spec"
|
||||
print "--------------------------------"
|
||||
print spec.tree(**kwargs)
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(**kwargs))
|
||||
|
||||
print "Normalized"
|
||||
print "--------------------------------"
|
||||
print("Normalized")
|
||||
print("--------------------------------")
|
||||
spec.normalize()
|
||||
print spec.tree(**kwargs)
|
||||
print(spec.tree(**kwargs))
|
||||
|
||||
print "Concretized"
|
||||
print "--------------------------------"
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
spec.concretize()
|
||||
print spec.tree(**kwargs)
|
||||
print(spec.tree(**kwargs))
|
||||
|
|
|
@ -22,12 +22,14 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
import pytest
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.tty.colify import colify
|
||||
|
@ -79,7 +81,7 @@ def do_list(args, unknown_args):
|
|||
output_lines.append(
|
||||
os.path.basename(name).replace('.py', ''))
|
||||
else:
|
||||
print indent + name
|
||||
print(indent + name)
|
||||
|
||||
if args.list:
|
||||
colify(output_lines)
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
from llnl.util.tty.colify import colify
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
|
@ -47,10 +49,10 @@ def versions(parser, args):
|
|||
tty.msg("Remote versions (not yet checksummed):")
|
||||
if not remote_versions:
|
||||
if not fetched_versions:
|
||||
print " Found no versions for %s" % pkg.name
|
||||
print(" Found no versions for %s" % pkg.name)
|
||||
tty.debug("Check the list_url and list_depth attribute on the "
|
||||
"package to help Spack find versions.")
|
||||
else:
|
||||
print " Found no unckecksummed versions for %s" % pkg.name
|
||||
print(" Found no unckecksummed versions for %s" % pkg.name)
|
||||
else:
|
||||
colify(sorted(remote_versions, reverse=True), indent=2)
|
||||
|
|
|
@ -265,11 +265,11 @@ def check(key):
|
|||
full_path, prefix, suffix = key
|
||||
version = detect_version(full_path)
|
||||
return (version, prefix, suffix, full_path)
|
||||
except ProcessError, e:
|
||||
except ProcessError as e:
|
||||
tty.debug(
|
||||
"Couldn't get version for compiler %s" % full_path, e)
|
||||
return None
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
tty.debug("Error while executing candidate compiler %s"
|
||||
|
|
|
@ -335,7 +335,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
|||
scope_to_compilers[scope] = compilers
|
||||
|
||||
cfg_file_to_duplicates = dict()
|
||||
for scope, compilers in scope_to_compilers.iteritems():
|
||||
for scope, compilers in scope_to_compilers.items():
|
||||
config_file = config_scopes[scope].get_section_filename('compilers')
|
||||
cfg_file_to_duplicates[config_file] = compilers
|
||||
|
||||
|
@ -401,7 +401,7 @@ def __init__(self, compiler_spec, arch_spec):
|
|||
config_file_to_duplicates = get_compiler_duplicates(
|
||||
compiler_spec, arch_spec)
|
||||
duplicate_table = list(
|
||||
(x, len(y)) for x, y in config_file_to_duplicates.iteritems())
|
||||
(x, len(y)) for x, y in config_file_to_duplicates.items())
|
||||
descriptor = lambda num: 'time' if num == 1 else 'times'
|
||||
duplicate_msg = (
|
||||
lambda cfgfile, count: "{0}: {1} {2}".format(
|
||||
|
|
|
@ -34,6 +34,8 @@
|
|||
concretization policies.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from six import iteritems
|
||||
|
||||
import spack
|
||||
import spack.spec
|
||||
import spack.compilers
|
||||
|
@ -241,7 +243,7 @@ def concretize_version(self, spec):
|
|||
|
||||
def concretize_architecture(self, spec):
|
||||
"""If the spec is empty provide the defaults of the platform. If the
|
||||
architecture is not a basestring, then check if either the platform,
|
||||
architecture is not a string type, then check if either the platform,
|
||||
target or operating system are concretized. If any of the fields are
|
||||
changed then return True. If everything is concretized (i.e the
|
||||
architecture attribute is a namedtuple of classes) then return False.
|
||||
|
@ -262,7 +264,7 @@ def concretize_architecture(self, spec):
|
|||
while not spec.architecture.concrete and default_archs:
|
||||
arch = default_archs.pop(0)
|
||||
|
||||
replacement_fields = [k for k, v in arch.to_cmp_dict().iteritems()
|
||||
replacement_fields = [k for k, v in iteritems(arch.to_cmp_dict())
|
||||
if v and not getattr(spec.architecture, k)]
|
||||
for field in replacement_fields:
|
||||
setattr(spec.architecture, field, getattr(arch, field))
|
||||
|
|
|
@ -52,6 +52,8 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
|
||||
import yaml
|
||||
import jsonschema
|
||||
|
@ -108,7 +110,7 @@ def extend_with_default(validator_class):
|
|||
"patternProperties"]
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for property, subschema in properties.iteritems():
|
||||
for property, subschema in iteritems(properties):
|
||||
if "default" in subschema:
|
||||
instance.setdefault(property, subschema["default"])
|
||||
for err in validate_properties(
|
||||
|
@ -116,10 +118,10 @@ def set_defaults(validator, properties, instance, schema):
|
|||
yield err
|
||||
|
||||
def set_pp_defaults(validator, properties, instance, schema):
|
||||
for property, subschema in properties.iteritems():
|
||||
for property, subschema in iteritems(properties):
|
||||
if "default" in subschema:
|
||||
if isinstance(instance, dict):
|
||||
for key, val in instance.iteritems():
|
||||
for key, val in iteritems(instance):
|
||||
if re.match(property, key) and val is None:
|
||||
instance[key] = subschema["default"]
|
||||
|
||||
|
@ -306,8 +308,8 @@ def _mark_overrides(data):
|
|||
|
||||
elif isinstance(data, dict):
|
||||
marked = {}
|
||||
for key, val in data.iteritems():
|
||||
if isinstance(key, basestring) and key.endswith(':'):
|
||||
for key, val in iteritems(data):
|
||||
if isinstance(key, string_types) and key.endswith(':'):
|
||||
key = syaml.syaml_str(key[:-1])
|
||||
key.override = True
|
||||
marked[key] = _mark_overrides(val)
|
||||
|
@ -348,7 +350,7 @@ def they_are(t):
|
|||
|
||||
# Source dict is merged into dest.
|
||||
elif they_are(dict):
|
||||
for sk, sv in source.iteritems():
|
||||
for sk, sv in iteritems(source):
|
||||
if override(sk) or sk not in dest:
|
||||
# if sk ended with ::, or if it's new, completely override
|
||||
dest[sk] = copy.copy(sv)
|
||||
|
|
|
@ -41,6 +41,8 @@
|
|||
"""
|
||||
import os
|
||||
import socket
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
|
||||
from yaml.error import MarkedYAMLError, YAMLError
|
||||
|
||||
|
@ -260,7 +262,7 @@ def _read_from_file(self, stream, format='json'):
|
|||
raise ValueError("Invalid database format: %s" % format)
|
||||
|
||||
try:
|
||||
if isinstance(stream, basestring):
|
||||
if isinstance(stream, string_types):
|
||||
with open(stream, 'r') as f:
|
||||
fdata = load(f)
|
||||
else:
|
||||
|
@ -511,7 +513,7 @@ def _add(self, spec, directory_layout=None, explicit=False):
|
|||
new_spec, path, installed, ref_count=0, explicit=explicit)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for name, dep in spec.dependencies_dict(_tracked_deps).iteritems():
|
||||
for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)):
|
||||
dkey = dep.spec.dag_hash()
|
||||
new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
|
||||
self._data[dkey].ref_count += 1
|
||||
|
|
|
@ -51,6 +51,7 @@ class OpenMpi(Package):
|
|||
import inspect
|
||||
import os.path
|
||||
import re
|
||||
from six import string_types
|
||||
|
||||
import llnl.util.lang
|
||||
import spack
|
||||
|
@ -174,7 +175,7 @@ class Foo(Package):
|
|||
"""
|
||||
global __all__
|
||||
|
||||
if isinstance(dicts, basestring):
|
||||
if isinstance(dicts, string_types):
|
||||
dicts = (dicts, )
|
||||
if not isinstance(dicts, collections.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import exceptions
|
||||
import shutil
|
||||
import glob
|
||||
import tempfile
|
||||
|
@ -137,7 +136,7 @@ def remove_install_directory(self, spec):
|
|||
if os.path.exists(path):
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
except exceptions.OSError as e:
|
||||
except OSError as e:
|
||||
raise RemoveFailedError(spec, path, e)
|
||||
|
||||
path = os.path.dirname(path)
|
||||
|
|
|
@ -291,7 +291,7 @@ def from_sourcing_files(*args, **kwargs):
|
|||
shell_options = '{shell_options}'.format(**info)
|
||||
source_file = '{source_command} {file} {concatenate_on_success}'
|
||||
|
||||
dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
|
||||
dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
|
||||
dump_environment = 'python -c "%s"' % dump_cmd
|
||||
|
||||
# Construct the command that will be executed
|
||||
|
|
|
@ -22,8 +22,11 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
import inspect
|
||||
|
|
|
@ -46,6 +46,9 @@
|
|||
import shutil
|
||||
import copy
|
||||
from functools import wraps
|
||||
from six import string_types
|
||||
from six import with_metaclass
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import *
|
||||
import spack
|
||||
|
@ -74,20 +77,19 @@ def wrapper(self, *args, **kwargs):
|
|||
return wrapper
|
||||
|
||||
|
||||
class FetchStrategy(object):
|
||||
class FSMeta(type):
|
||||
"""This metaclass registers all fetch strategies in a list."""
|
||||
def __init__(cls, name, bases, dict):
|
||||
type.__init__(cls, name, bases, dict)
|
||||
if cls.enabled:
|
||||
all_strategies.append(cls)
|
||||
|
||||
|
||||
class FetchStrategy(with_metaclass(FSMeta, object)):
|
||||
"""Superclass of all fetch strategies."""
|
||||
enabled = False # Non-abstract subclasses should be enabled.
|
||||
required_attributes = None # Attributes required in version() args.
|
||||
|
||||
class __metaclass__(type):
|
||||
|
||||
"""This metaclass registers all fetch strategies in a list."""
|
||||
|
||||
def __init__(cls, name, bases, dict):
|
||||
type.__init__(cls, name, bases, dict)
|
||||
if cls.enabled:
|
||||
all_strategies.append(cls)
|
||||
|
||||
def __init__(self):
|
||||
# The stage is initialized late, so that fetch strategies can be
|
||||
|
@ -319,7 +321,7 @@ def expand(self):
|
|||
# top-level directory. We ignore hidden files to accomodate
|
||||
# these "semi-exploding" tarballs.
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = filter(lambda f: not f.startswith('.'), files)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
expanded_dir = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(expanded_dir):
|
||||
|
@ -461,7 +463,7 @@ def archive(self, destination, **kwargs):
|
|||
|
||||
patterns = kwargs.get('exclude', None)
|
||||
if patterns is not None:
|
||||
if isinstance(patterns, basestring):
|
||||
if isinstance(patterns, string_types):
|
||||
patterns = [patterns]
|
||||
for p in patterns:
|
||||
tar.add_default_arg('--exclude=%s' % p)
|
||||
|
|
|
@ -63,6 +63,7 @@
|
|||
"""
|
||||
|
||||
from heapq import *
|
||||
from six import iteritems
|
||||
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.tty.color import *
|
||||
|
@ -562,7 +563,7 @@ def label(key, label):
|
|||
continue
|
||||
|
||||
# Add edges for each depends_on in the package.
|
||||
for dep_name, dep in spec.package.dependencies.iteritems():
|
||||
for dep_name, dep in iteritems(spec.package.dependencies):
|
||||
deps.add((spec.name, dep_name))
|
||||
|
||||
# If the package provides something, add an edge for that.
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
|
|
|
@ -23,15 +23,16 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import spack.modules
|
||||
from six import iteritems
|
||||
|
||||
|
||||
def post_install(pkg):
|
||||
for item, cls in spack.modules.module_types.iteritems():
|
||||
for item, cls in iteritems(spack.modules.module_types):
|
||||
generator = cls(pkg.spec)
|
||||
generator.write()
|
||||
|
||||
|
||||
def post_uninstall(pkg):
|
||||
for item, cls in spack.modules.module_types.iteritems():
|
||||
for item, cls in iteritems(spack.modules.module_types):
|
||||
generator = cls(pkg.spec)
|
||||
generator.remove()
|
||||
|
|
|
@ -46,6 +46,8 @@
|
|||
import re
|
||||
import string
|
||||
import textwrap
|
||||
from six import iteritems
|
||||
from six import with_metaclass
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
|
@ -213,7 +215,7 @@ def process_arglist(arglist):
|
|||
for x in arglist:
|
||||
yield (x, )
|
||||
else:
|
||||
for x in arglist.iteritems():
|
||||
for x in iteritems(arglist):
|
||||
yield x
|
||||
|
||||
for method, arglist in environment_actions.items():
|
||||
|
@ -246,18 +248,18 @@ def format_env_var_name(name):
|
|||
return name.replace('-', '_').upper()
|
||||
|
||||
|
||||
class EnvModule(object):
|
||||
class ModuleMeta(type):
|
||||
"""Metaclass registers modules in themodule_types dict."""
|
||||
def __init__(cls, name, bases, dict):
|
||||
type.__init__(cls, name, bases, dict)
|
||||
if cls.name != 'env_module' and cls.name in _module_config['enable']:
|
||||
module_types[cls.name] = cls
|
||||
|
||||
|
||||
class EnvModule(with_metaclass(ModuleMeta, object)):
|
||||
name = 'env_module'
|
||||
formats = {}
|
||||
|
||||
class __metaclass__(type):
|
||||
|
||||
def __init__(cls, name, bases, dict):
|
||||
type.__init__(cls, name, bases, dict)
|
||||
if cls.name != 'env_module' and cls.name in _module_config[
|
||||
'enable']:
|
||||
module_types[cls.name] = cls
|
||||
|
||||
def __init__(self, spec=None):
|
||||
self.spec = spec
|
||||
self.pkg = spec.package # Just stored for convenience
|
||||
|
|
|
@ -54,7 +54,7 @@ def find_compilers(self, *paths):
|
|||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x, y: x + y, compiler_lists)
|
||||
clist = [comp for cl in compiler_lists for comp in cl]
|
||||
return clist
|
||||
|
||||
def find_compiler(self, cmp_cls, *paths):
|
||||
|
|
|
@ -42,6 +42,9 @@
|
|||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
from six import StringIO
|
||||
from six import string_types
|
||||
from six import with_metaclass
|
||||
|
||||
import llnl.util.lock
|
||||
import llnl.util.tty as tty
|
||||
|
@ -56,7 +59,7 @@
|
|||
import spack.repository
|
||||
import spack.url
|
||||
import spack.util.web
|
||||
from StringIO import StringIO
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
@ -238,7 +241,7 @@ def _wrapper(instance, *args, **kwargs):
|
|||
return _execute_under_condition
|
||||
|
||||
|
||||
class PackageBase(object):
|
||||
class PackageBase(with_metaclass(PackageMeta, object)):
|
||||
"""This is the superclass for all spack packages.
|
||||
|
||||
***The Package class***
|
||||
|
@ -475,7 +478,6 @@ class SomePackage(Package):
|
|||
Package creators override functions like install() (all of them do this),
|
||||
clean() (some of them do this), and others to provide custom behavior.
|
||||
"""
|
||||
__metaclass__ = PackageMeta
|
||||
#
|
||||
# These are default values for instance variables.
|
||||
#
|
||||
|
@ -1115,6 +1117,13 @@ def _prefix_write_lock(self):
|
|||
finally:
|
||||
self.prefix_lock.release_write()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _stage_and_write_lock(self):
|
||||
"""Prefix lock nested in a stage."""
|
||||
with self.stage:
|
||||
with self._prefix_write_lock():
|
||||
yield
|
||||
|
||||
def do_install(self,
|
||||
keep_prefix=False,
|
||||
keep_stage=False,
|
||||
|
@ -1233,7 +1242,7 @@ def build_process(input_stream):
|
|||
|
||||
self.stage.keep = keep_stage
|
||||
|
||||
with contextlib.nested(self.stage, self._prefix_write_lock()):
|
||||
with self._stage_and_write_lock():
|
||||
# Run the pre-install hook in the child process after
|
||||
# the directory is created.
|
||||
spack.hooks.pre_install(self)
|
||||
|
@ -1265,9 +1274,10 @@ def build_process(input_stream):
|
|||
input_stream=input_stream
|
||||
)
|
||||
with redirection_context as log_redirection:
|
||||
for phase_name, phase in zip(self.phases, self._InstallPhase_phases): # NOQA: ignore=E501
|
||||
for phase_name, phase in zip(
|
||||
self.phases, self._InstallPhase_phases):
|
||||
tty.msg(
|
||||
'Executing phase : \'{0}\''.format(phase_name) # NOQA: ignore=E501
|
||||
'Executing phase : \'{0}\''.format(phase_name)
|
||||
)
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
with log_redirection:
|
||||
|
@ -1355,7 +1365,7 @@ def sanity_check_prefix(self):
|
|||
"""This function checks whether install succeeded."""
|
||||
|
||||
def check_paths(path_list, filetype, predicate):
|
||||
if isinstance(path_list, basestring):
|
||||
if isinstance(path_list, string_types):
|
||||
path_list = [path_list]
|
||||
|
||||
for path in path_list:
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
|
||||
import spack
|
||||
import spack.error
|
||||
|
@ -180,7 +182,7 @@ def spec_preferred_variants(self, pkgname):
|
|||
variants = self.preferred.get(pkg, {}).get('variants', '')
|
||||
if variants:
|
||||
break
|
||||
if not isinstance(variants, basestring):
|
||||
if not isinstance(variants, string_types):
|
||||
variants = " ".join(variants)
|
||||
pkg = spack.repo.get(pkgname)
|
||||
spec = spack.spec.Spec("%s %s" % (pkgname, variants))
|
||||
|
@ -233,7 +235,7 @@ def spec_externals(spec):
|
|||
if (not pkg_paths) and (not pkg_modules):
|
||||
return []
|
||||
|
||||
for external_spec, path in pkg_paths.iteritems():
|
||||
for external_spec, path in iteritems(pkg_paths):
|
||||
if not path:
|
||||
# skip entries without paths (avoid creating extra Specs)
|
||||
continue
|
||||
|
@ -242,7 +244,7 @@ def spec_externals(spec):
|
|||
if external_spec.satisfies(spec):
|
||||
external_specs.append(external_spec)
|
||||
|
||||
for external_spec, module in pkg_modules.iteritems():
|
||||
for external_spec, module in iteritems(pkg_modules):
|
||||
if not module:
|
||||
continue
|
||||
|
||||
|
|
|
@ -45,15 +45,15 @@ def compile_c_and_execute(source_file, include_flags, link_flags):
|
|||
def compare_output(current_output, blessed_output):
|
||||
"""Compare blessed and current output of executables."""
|
||||
if not (current_output == blessed_output):
|
||||
print "Produced output does not match expected output."
|
||||
print "Expected output:"
|
||||
print '-' * 80
|
||||
print blessed_output
|
||||
print '-' * 80
|
||||
print "Produced output:"
|
||||
print '-' * 80
|
||||
print current_output
|
||||
print '-' * 80
|
||||
print("Produced output does not match expected output.")
|
||||
print("Expected output:")
|
||||
print('-' * 80)
|
||||
print(blessed_output)
|
||||
print('-' * 80)
|
||||
print("Produced output:")
|
||||
print('-' * 80)
|
||||
print(current_output)
|
||||
print('-' * 80)
|
||||
raise RuntimeError("Ouput check failed.",
|
||||
"See spack_output.log for details")
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@
|
|||
import re
|
||||
import shlex
|
||||
import itertools
|
||||
from six import string_types
|
||||
|
||||
import spack.error
|
||||
|
||||
|
||||
|
@ -118,7 +120,7 @@ def __init__(self, lexer):
|
|||
def gettok(self):
|
||||
"""Puts the next token in the input stream into self.next."""
|
||||
try:
|
||||
self.next = self.tokens.next()
|
||||
self.next = next(self.tokens)
|
||||
except StopIteration:
|
||||
self.next = None
|
||||
|
||||
|
@ -159,7 +161,7 @@ def expect(self, id):
|
|||
sys.exit(1)
|
||||
|
||||
def setup(self, text):
|
||||
if isinstance(text, basestring):
|
||||
if isinstance(text, string_types):
|
||||
text = shlex.split(text)
|
||||
self.text = text
|
||||
self.push_tokens(self.lexer.lex(text))
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
The ``virtual`` module contains utility classes for virtual dependencies.
|
||||
"""
|
||||
from itertools import product as iproduct
|
||||
from six import iteritems
|
||||
from pprint import pformat
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
@ -97,7 +98,7 @@ def update(self, spec):
|
|||
assert(not spec.virtual)
|
||||
|
||||
pkg = spec.package
|
||||
for provided_spec, provider_specs in pkg.provided.iteritems():
|
||||
for provided_spec, provider_specs in iteritems(pkg.provided):
|
||||
for provider_spec in provider_specs:
|
||||
# TODO: fix this comment.
|
||||
# We want satisfaction other than flags
|
||||
|
@ -201,7 +202,7 @@ def to_yaml(self, stream=None):
|
|||
def from_yaml(stream):
|
||||
try:
|
||||
yfile = syaml.load(stream)
|
||||
except MarkedYAMLError, e:
|
||||
except MarkedYAMLError as e:
|
||||
raise spack.spec.SpackYAMLError(
|
||||
"error parsing YAML ProviderIndex cache:", str(e))
|
||||
|
||||
|
@ -288,7 +289,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
|
|||
"""
|
||||
def mapiter(mappings):
|
||||
if isinstance(mappings, dict):
|
||||
return mappings.iteritems()
|
||||
return iteritems(mappings)
|
||||
else:
|
||||
return iter(mappings)
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
import stat
|
||||
import shutil
|
||||
import errno
|
||||
import exceptions
|
||||
import sys
|
||||
import inspect
|
||||
import imp
|
||||
|
@ -558,7 +557,7 @@ def _read_config(self):
|
|||
|
||||
return yaml_data['repo']
|
||||
|
||||
except exceptions.IOError:
|
||||
except IOError:
|
||||
tty.die("Error reading %s when opening %s"
|
||||
% (self.config_file, self.root))
|
||||
|
||||
|
|
|
@ -97,13 +97,14 @@
|
|||
"""
|
||||
import base64
|
||||
import collections
|
||||
import csv
|
||||
import ctypes
|
||||
import hashlib
|
||||
import itertools
|
||||
from operator import attrgetter
|
||||
from six import StringIO
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
|
||||
import cStringIO
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
import spack.architecture
|
||||
|
@ -113,7 +114,7 @@
|
|||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
from cStringIO import StringIO
|
||||
|
||||
from llnl.util.filesystem import find_libraries
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.tty.color import *
|
||||
|
@ -222,7 +223,7 @@ def canonical_deptype(deptype):
|
|||
if deptype is None:
|
||||
return alldeps
|
||||
|
||||
elif isinstance(deptype, str):
|
||||
elif isinstance(deptype, string_types):
|
||||
return special_types.get(deptype, (deptype,))
|
||||
|
||||
elif isinstance(deptype, (tuple, list)):
|
||||
|
@ -270,7 +271,7 @@ def __init__(self, *args):
|
|||
spec_like = args[0]
|
||||
if isinstance(spec_like, ArchSpec):
|
||||
self._dup(spec_like)
|
||||
elif isinstance(spec_like, basestring):
|
||||
elif isinstance(spec_like, string_types):
|
||||
spec_fields = spec_like.split("-")
|
||||
|
||||
if len(spec_fields) == 3:
|
||||
|
@ -391,7 +392,7 @@ def constrain(self, other):
|
|||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
constrained = False
|
||||
for attr, svalue in self.to_cmp_dict().iteritems():
|
||||
for attr, svalue in iteritems(self.to_cmp_dict()):
|
||||
ovalue = getattr(other, attr)
|
||||
if svalue is None and ovalue is not None:
|
||||
setattr(self, attr, ovalue)
|
||||
|
@ -406,7 +407,7 @@ def copy(self):
|
|||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all(v for k, v in self.to_cmp_dict().iteritems())
|
||||
return all(v for k, v in iteritems(self.to_cmp_dict()))
|
||||
|
||||
def to_cmp_dict(self):
|
||||
"""Returns a dictionary that can be used for field comparison."""
|
||||
|
@ -464,7 +465,7 @@ def __init__(self, *args):
|
|||
arg = args[0]
|
||||
# If there is one argument, it's either another CompilerSpec
|
||||
# to copy or a string to parse
|
||||
if isinstance(arg, basestring):
|
||||
if isinstance(arg, string_types):
|
||||
c = SpecParser().parse_compiler(arg)
|
||||
self.name = c.name
|
||||
self.versions = c.versions
|
||||
|
@ -728,7 +729,7 @@ def copy(self):
|
|||
return clone
|
||||
|
||||
def _cmp_key(self):
|
||||
return tuple((k, tuple(v)) for k, v in sorted(self.iteritems()))
|
||||
return tuple((k, tuple(v)) for k, v in sorted(iteritems(self)))
|
||||
|
||||
def __str__(self):
|
||||
sorted_keys = filter(
|
||||
|
@ -918,7 +919,7 @@ def __init__(self, spec_like, *dep_like, **kwargs):
|
|||
return
|
||||
|
||||
# Parse if the spec_like is a string.
|
||||
if not isinstance(spec_like, basestring):
|
||||
if not isinstance(spec_like, string_types):
|
||||
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
||||
|
||||
spec_list = SpecParser().parse(spec_like)
|
||||
|
@ -1018,9 +1019,9 @@ def _add_variant(self, name, value):
|
|||
if name in self.variants:
|
||||
raise DuplicateVariantError(
|
||||
"Cannot specify variant '%s' twice" % name)
|
||||
if isinstance(value, basestring) and value.upper() == 'TRUE':
|
||||
if isinstance(value, string_types) and value.upper() == 'TRUE':
|
||||
value = True
|
||||
elif isinstance(value, basestring) and value.upper() == 'FALSE':
|
||||
elif isinstance(value, string_types) and value.upper() == 'FALSE':
|
||||
value = False
|
||||
self.variants[name] = VariantSpec(name, value)
|
||||
|
||||
|
@ -1056,7 +1057,7 @@ def _set_architecture(self, **kwargs):
|
|||
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
|
||||
self.architecture = ArchSpec(*new_vals)
|
||||
else:
|
||||
new_attrvals = [(a, v) for a, v in kwargs.iteritems()
|
||||
new_attrvals = [(a, v) for a, v in iteritems(kwargs)
|
||||
if a in arch_attrs]
|
||||
for new_attr, new_value in new_attrvals:
|
||||
if getattr(self.architecture, new_attr):
|
||||
|
@ -1219,7 +1220,7 @@ def traverse_edges(self, visited=None, d=0, deptype=None,
|
|||
# get initial values for kwargs
|
||||
depth = kwargs.get('depth', False)
|
||||
key_fun = kwargs.get('key', id)
|
||||
if isinstance(key_fun, basestring):
|
||||
if isinstance(key_fun, string_types):
|
||||
key_fun = attrgetter(key_fun)
|
||||
yield_root = kwargs.get('root', True)
|
||||
cover = kwargs.get('cover', 'nodes')
|
||||
|
@ -1314,7 +1315,7 @@ def dag_hash(self, length=None):
|
|||
else:
|
||||
yaml_text = syaml.dump(
|
||||
self.to_node_dict(), default_flow_style=True, width=maxint)
|
||||
sha = hashlib.sha1(yaml_text)
|
||||
sha = hashlib.sha1(yaml_text.encode('utf-8'))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
if self.concrete:
|
||||
self._hash = b32_hash
|
||||
|
@ -1421,7 +1422,7 @@ def read_yaml_dep_specs(dependency_dict):
|
|||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
for dep_name, elt in dependency_dict.items():
|
||||
if isinstance(elt, basestring):
|
||||
if isinstance(elt, string_types):
|
||||
# original format, elt is just the dependency hash.
|
||||
dag_hash, deptypes = elt, ['build', 'link']
|
||||
elif isinstance(elt, tuple):
|
||||
|
@ -2413,11 +2414,8 @@ def __getitem__(self, name):
|
|||
if query_parameters:
|
||||
# We have extra query parameters, which are comma separated
|
||||
# values
|
||||
f = cStringIO.StringIO(query_parameters.pop())
|
||||
try:
|
||||
query_parameters = next(csv.reader(f, skipinitialspace=True))
|
||||
except StopIteration:
|
||||
query_parameters = ['']
|
||||
csv = query_parameters.pop().strip()
|
||||
query_parameters = re.split(r'\s*,\s*', csv)
|
||||
|
||||
try:
|
||||
value = next(
|
||||
|
|
|
@ -29,18 +29,19 @@
|
|||
import shutil
|
||||
import tempfile
|
||||
import getpass
|
||||
from urlparse import urljoin
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
from six.moves.urllib.parse import urljoin
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.lock
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack.util.pattern as pattern
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.error
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.util.pattern as pattern
|
||||
from spack.version import *
|
||||
from spack.util.path import canonicalize_path
|
||||
from spack.util.crypto import prefix_bits, bit_length
|
||||
|
@ -84,7 +85,7 @@ def get_tmp_root():
|
|||
if _tmp_root is None:
|
||||
config = spack.config.get_config('config')
|
||||
candidates = config['build_stage']
|
||||
if isinstance(candidates, basestring):
|
||||
if isinstance(candidates, string_types):
|
||||
candidates = [candidates]
|
||||
|
||||
path = _first_accessible_path(candidates)
|
||||
|
@ -188,7 +189,7 @@ def __init__(
|
|||
"""
|
||||
# TODO: fetch/stage coupling needs to be reworked -- the logic
|
||||
# TODO: here is convoluted and not modular enough.
|
||||
if isinstance(url_or_fetch_strategy, basestring):
|
||||
if isinstance(url_or_fetch_strategy, string_types):
|
||||
self.fetcher = fs.from_url(url_or_fetch_strategy)
|
||||
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
|
||||
self.fetcher = url_or_fetch_strategy
|
||||
|
@ -548,7 +549,7 @@ def expand_archive(self):
|
|||
if not isinstance(placement, dict):
|
||||
placement = {'': placement}
|
||||
# Make the paths in the dictionary absolute and link
|
||||
for key, value in placement.iteritems():
|
||||
for key, value in iteritems(placement):
|
||||
target_path = join_path(
|
||||
root_stage.source_path, resource.destination)
|
||||
destination_path = join_path(target_path, value)
|
||||
|
@ -661,7 +662,7 @@ def cache_local(self):
|
|||
def _get_mirrors():
|
||||
"""Get mirrors from spack configuration."""
|
||||
config = spack.config.get_config('mirrors')
|
||||
return [val for name, val in config.iteritems()]
|
||||
return [val for name, val in iteritems(config)]
|
||||
|
||||
|
||||
def ensure_access(file=spack.stage_path):
|
||||
|
|
|
@ -22,19 +22,19 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import StringIO
|
||||
import argparse
|
||||
import codecs
|
||||
import collections
|
||||
import contextlib
|
||||
import unittest
|
||||
from six import StringIO
|
||||
|
||||
import llnl.util.filesystem
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.install as install
|
||||
|
||||
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
|
||||
FILE_REGISTRY = collections.defaultdict(StringIO)
|
||||
|
||||
|
||||
# Monkey-patch open to write module files to a StringIO instance
|
||||
|
@ -44,7 +44,7 @@ def mock_open(filename, mode, *args):
|
|||
message = 'test.test_install : unexpected opening mode for mock_open'
|
||||
raise RuntimeError(message)
|
||||
|
||||
FILE_REGISTRY[filename] = StringIO.StringIO()
|
||||
FILE_REGISTRY[filename] = StringIO()
|
||||
|
||||
try:
|
||||
yield FILE_REGISTRY[filename]
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import pytest
|
||||
from six import iteritems
|
||||
|
||||
import spack.spec
|
||||
import spack.compilers as compilers
|
||||
|
@ -38,11 +39,11 @@ def test_get_compiler_duplicates(self):
|
|||
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
|
||||
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
|
||||
assert len(cfg_file_to_duplicates) == 1
|
||||
cfg_file, duplicates = cfg_file_to_duplicates.iteritems().next()
|
||||
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
|
||||
assert len(duplicates) == 1
|
||||
|
||||
def test_all_compilers(self):
|
||||
all_compilers = compilers.all_compilers()
|
||||
filtered = list(x for x in all_compilers if str(x.spec) == 'clang@3.3')
|
||||
filtered = list(x for x in filtered if x.operating_system == 'SuSE11')
|
||||
filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
|
||||
filtered = [x for x in filtered if x.operating_system == 'SuSE11']
|
||||
assert len(filtered) == 1
|
||||
|
|
|
@ -27,11 +27,12 @@
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
from six import StringIO
|
||||
|
||||
import cStringIO
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
import ordereddict_backport
|
||||
|
||||
import py
|
||||
import pytest
|
||||
import spack
|
||||
|
@ -56,11 +57,8 @@ def no_stdin_duplication(monkeypatch):
|
|||
"""Duplicating stdin (or any other stream) returns an empty
|
||||
cStringIO object.
|
||||
"""
|
||||
monkeypatch.setattr(
|
||||
llnl.util.lang,
|
||||
'duplicate_stream',
|
||||
lambda x: cStringIO.StringIO()
|
||||
)
|
||||
monkeypatch.setattr(llnl.util.lang, 'duplicate_stream',
|
||||
lambda x: StringIO())
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
@ -181,6 +179,7 @@ def config(configuration_dir):
|
|||
spack.config.clear_config_caches()
|
||||
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def database(tmpdir_factory, builtin_mock, config):
|
||||
"""Creates a mock database with some packages installed note that
|
||||
|
@ -312,7 +311,7 @@ def mock_archive():
|
|||
"\ttouch $prefix/dummy_file\n"
|
||||
"EOF\n"
|
||||
)
|
||||
os.chmod(configure_path, 0755)
|
||||
os.chmod(configure_path, 0o755)
|
||||
# Archive it
|
||||
current = tmpdir.chdir()
|
||||
archive_name = '{0}.tar.gz'.format(repo_name)
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.graph import AsciiGraph, topological_sort, graph_dot
|
||||
|
|
|
@ -283,7 +283,7 @@ def test_upgrade_read_to_write_fails_with_readonly_file(self):
|
|||
# ensure lock file exists the first time, so we open it read-only
|
||||
# to begin wtih.
|
||||
touch(self.lock_path)
|
||||
os.chmod(self.lock_path, 0444)
|
||||
os.chmod(self.lock_path, 0o444)
|
||||
|
||||
lock = Lock(self.lock_path)
|
||||
self.assertTrue(lock._reads == 0)
|
||||
|
|
|
@ -46,7 +46,7 @@ def setUp(self):
|
|||
with open(make_exe, 'w') as f:
|
||||
f.write('#!/bin/sh\n')
|
||||
f.write('echo "$@"')
|
||||
os.chmod(make_exe, 0700)
|
||||
os.chmod(make_exe, 0o700)
|
||||
|
||||
path_put_first('PATH', [self.tmpdir])
|
||||
|
||||
|
|
|
@ -24,14 +24,14 @@
|
|||
##############################################################################
|
||||
import collections
|
||||
import contextlib
|
||||
from six import StringIO
|
||||
|
||||
import cStringIO
|
||||
import pytest
|
||||
import spack.modules
|
||||
import spack.spec
|
||||
|
||||
# Our "filesystem" for the tests below
|
||||
FILE_REGISTRY = collections.defaultdict(cStringIO.StringIO)
|
||||
FILE_REGISTRY = collections.defaultdict(StringIO)
|
||||
# Spec strings that will be used throughout the tests
|
||||
mpich_spec_string = 'mpich@3.0.4'
|
||||
mpileaks_spec_string = 'mpileaks'
|
||||
|
@ -48,7 +48,7 @@ def _mock(filename, mode):
|
|||
if not mode == 'w':
|
||||
raise RuntimeError('unexpected opening mode for stringio_open')
|
||||
|
||||
FILE_REGISTRY[filename] = cStringIO.StringIO()
|
||||
FILE_REGISTRY[filename] = StringIO()
|
||||
|
||||
try:
|
||||
yield FILE_REGISTRY[filename]
|
||||
|
|
|
@ -86,7 +86,7 @@ def test_default_works(builtin_mock):
|
|||
|
||||
def test_target_match(builtin_mock):
|
||||
platform = spack.architecture.platform()
|
||||
targets = platform.targets.values()
|
||||
targets = list(platform.targets.values())
|
||||
for target in targets[:-1]:
|
||||
pkg = spack.repo.get('multimethod target=' + target.name)
|
||||
assert pkg.different_by_target() == target.name
|
||||
|
|
|
@ -86,6 +86,7 @@ class CompositeFromInterface:
|
|||
composite.append(self.Two())
|
||||
composite.add()
|
||||
self.assertEqual(self.Base.counter, 3)
|
||||
|
||||
composite.pop()
|
||||
composite.subtract()
|
||||
self.assertEqual(self.Base.counter, 2)
|
||||
|
|
|
@ -37,7 +37,8 @@
|
|||
mpi@:10.0: set([zmpi])},
|
||||
'stuff': {stuff: set([externalvirtual])}}
|
||||
"""
|
||||
import StringIO
|
||||
from six import StringIO
|
||||
|
||||
import spack
|
||||
from spack.provider_index import ProviderIndex
|
||||
from spack.spec import Spec
|
||||
|
@ -46,10 +47,10 @@
|
|||
def test_yaml_round_trip(builtin_mock):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
|
||||
ostream = StringIO.StringIO()
|
||||
ostream = StringIO()
|
||||
p.to_yaml(ostream)
|
||||
|
||||
istream = StringIO.StringIO(ostream.getvalue())
|
||||
istream = StringIO(ostream.getvalue())
|
||||
q = ProviderIndex.from_yaml(istream)
|
||||
|
||||
assert p == q
|
||||
|
|
|
@ -31,6 +31,8 @@
|
|||
default version. Once those go away, we can likely drop 2.6 and increase
|
||||
the minimum supported Python 3 version, as well.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
|
|
@ -90,7 +90,7 @@ def test_preorder_node_traversal(self):
|
|||
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'zmpi', 'fake']
|
||||
pairs = zip([0, 1, 2, 3, 4, 2, 3], names)
|
||||
pairs = list(zip([0, 1, 2, 3, 4, 2, 3], names))
|
||||
|
||||
traversal = dag.traverse()
|
||||
assert [x.name for x in traversal] == names
|
||||
|
@ -104,7 +104,7 @@ def test_preorder_edge_traversal(self):
|
|||
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'libelf', 'zmpi', 'fake', 'zmpi']
|
||||
pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names)
|
||||
pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names))
|
||||
|
||||
traversal = dag.traverse(cover='edges')
|
||||
assert [x.name for x in traversal] == names
|
||||
|
@ -118,7 +118,7 @@ def test_preorder_path_traversal(self):
|
|||
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
|
||||
pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names)
|
||||
pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names))
|
||||
|
||||
traversal = dag.traverse(cover='paths')
|
||||
assert [x.name for x in traversal] == names
|
||||
|
@ -132,7 +132,7 @@ def test_postorder_node_traversal(self):
|
|||
|
||||
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'mpileaks']
|
||||
pairs = zip([4, 3, 2, 3, 2, 1, 0], names)
|
||||
pairs = list(zip([4, 3, 2, 3, 2, 1, 0], names))
|
||||
|
||||
traversal = dag.traverse(order='post')
|
||||
assert [x.name for x in traversal] == names
|
||||
|
@ -146,7 +146,7 @@ def test_postorder_edge_traversal(self):
|
|||
|
||||
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'zmpi', 'mpileaks']
|
||||
pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names)
|
||||
pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names))
|
||||
|
||||
traversal = dag.traverse(cover='edges', order='post')
|
||||
assert [x.name for x in traversal] == names
|
||||
|
@ -160,7 +160,7 @@ def test_postorder_path_traversal(self):
|
|||
|
||||
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
|
||||
'callpath', 'fake', 'zmpi', 'mpileaks']
|
||||
pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names)
|
||||
pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names))
|
||||
|
||||
traversal = dag.traverse(cover='paths', order='post')
|
||||
assert [x.name for x in traversal] == names
|
||||
|
|
|
@ -46,8 +46,8 @@
|
|||
"""
|
||||
import os
|
||||
import re
|
||||
from StringIO import StringIO
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
from six import StringIO
|
||||
from six.moves.urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import *
|
||||
|
@ -486,7 +486,7 @@ def substitution_offsets(path):
|
|||
name_offsets = offsets[1::2]
|
||||
|
||||
ver_offsets = []
|
||||
for i in xrange(0, len(name_parts), 2):
|
||||
for i in range(0, len(name_parts), 2):
|
||||
vparts = re.split(ver, name_parts[i])
|
||||
voffsets = cumsum(vparts, offsets[i], len)
|
||||
ver_offsets.extend(voffsets[1::2])
|
||||
|
|
|
@ -22,10 +22,10 @@
|
|||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from six import string_types
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
|
@ -129,7 +129,7 @@ def __call__(self, *args, **kwargs):
|
|||
raise ValueError("Cannot use `str` as input stream.")
|
||||
|
||||
def streamify(arg, mode):
|
||||
if isinstance(arg, basestring):
|
||||
if isinstance(arg, string_types):
|
||||
return open(arg, mode), True
|
||||
elif arg is str:
|
||||
return subprocess.PIPE, False
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
to pickle functions if they're passed indirectly as parameters.
|
||||
"""
|
||||
from multiprocessing import Process, Pipe, Semaphore, Value
|
||||
from itertools import izip
|
||||
|
||||
__all__ = ['spawn', 'parmap', 'Barrier']
|
||||
|
||||
|
@ -43,7 +42,7 @@ def fun(pipe, x):
|
|||
def parmap(f, X):
|
||||
pipe = [Pipe() for x in X]
|
||||
proc = [Process(target=spawn(f), args=(c, x))
|
||||
for x, (p, c) in izip(X, pipe)]
|
||||
for x, (p, c) in zip(X, pipe)]
|
||||
[p.start() for p in proc]
|
||||
[p.join() for p in proc]
|
||||
return [p.recv() for (p, c) in pipe]
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
import string
|
||||
import itertools
|
||||
import re
|
||||
from StringIO import StringIO
|
||||
from six import StringIO
|
||||
|
||||
import spack
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ def cls_decorator(cls):
|
|||
# Retrieve the base class of the composite. Inspect its methods and
|
||||
# decide which ones will be overridden
|
||||
def no_special_no_private(x):
|
||||
return inspect.ismethod(x) and not x.__name__.startswith('_')
|
||||
return callable(x) and not x.__name__.startswith('_')
|
||||
|
||||
# Patch the behavior of each of the methods in the previous list.
|
||||
# This is done associating an instance of the descriptor below to
|
||||
|
@ -90,42 +90,25 @@ def getter(*args, **kwargs):
|
|||
return getter
|
||||
|
||||
dictionary_for_type_call = {}
|
||||
|
||||
# Construct a dictionary with the methods explicitly passed as name
|
||||
if method_list is not None:
|
||||
# python@2.7: method_list_dict = {name: IterateOver(name) for name
|
||||
# in method_list}
|
||||
method_list_dict = {}
|
||||
for name in method_list:
|
||||
method_list_dict[name] = IterateOver(name)
|
||||
dictionary_for_type_call.update(method_list_dict)
|
||||
dictionary_for_type_call.update(
|
||||
(name, IterateOver(name)) for name in method_list)
|
||||
|
||||
# Construct a dictionary with the methods inspected from the interface
|
||||
if interface is not None:
|
||||
##########
|
||||
# python@2.7: interface_methods = {name: method for name, method in
|
||||
# inspect.getmembers(interface, predicate=no_special_no_private)}
|
||||
interface_methods = {}
|
||||
for name, method in inspect.getmembers(
|
||||
interface, predicate=no_special_no_private):
|
||||
interface_methods[name] = method
|
||||
##########
|
||||
# python@2.7: interface_methods_dict = {name: IterateOver(name,
|
||||
# method) for name, method in interface_methods.iteritems()}
|
||||
interface_methods_dict = {}
|
||||
for name, method in interface_methods.iteritems():
|
||||
interface_methods_dict[name] = IterateOver(name, method)
|
||||
##########
|
||||
dictionary_for_type_call.update(interface_methods_dict)
|
||||
dictionary_for_type_call.update(
|
||||
(name, IterateOver(name, method))
|
||||
for name, method in inspect.getmembers(
|
||||
interface, predicate=no_special_no_private))
|
||||
|
||||
# Get the methods that are defined in the scope of the composite
|
||||
# class and override any previous definition
|
||||
##########
|
||||
# python@2.7: cls_method = {name: method for name, method in
|
||||
# inspect.getmembers(cls, predicate=inspect.ismethod)}
|
||||
cls_method = {}
|
||||
for name, method in inspect.getmembers(
|
||||
cls, predicate=inspect.ismethod):
|
||||
cls_method[name] = method
|
||||
##########
|
||||
dictionary_for_type_call.update(cls_method)
|
||||
dictionary_for_type_call.update(
|
||||
(name, method) for name, method in inspect.getmembers(
|
||||
cls, predicate=inspect.ismethod))
|
||||
|
||||
# Generate the new class on the fly and return it
|
||||
# FIXME : inherit from interface if we start to use ABC classes?
|
||||
wrapper_class = type(cls.__name__, (cls, container),
|
||||
|
|
|
@ -35,11 +35,11 @@ class Prefix(str):
|
|||
For example, you can do something like this::
|
||||
|
||||
prefix = Prefix('/usr')
|
||||
print prefix.lib
|
||||
print prefix.lib64
|
||||
print prefix.bin
|
||||
print prefix.share
|
||||
print prefix.man4
|
||||
print(prefix.lib)
|
||||
print(prefix.lib64)
|
||||
print(prefix.bin)
|
||||
print(prefix.share)
|
||||
print(prefix.man4)
|
||||
|
||||
This program would print:
|
||||
|
||||
|
@ -52,7 +52,7 @@ class Prefix(str):
|
|||
Prefix objects behave identically to strings. In fact, they
|
||||
subclass str. So operators like + are legal:
|
||||
|
||||
print "foobar " + prefix
|
||||
print("foobar " + prefix)
|
||||
|
||||
This prints 'foobar /usr". All of this is meant to make custom
|
||||
installs easy.
|
||||
|
|
|
@ -24,6 +24,9 @@
|
|||
##############################################################################
|
||||
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
|
||||
import json
|
||||
from six import string_types
|
||||
from six import iteritems
|
||||
|
||||
import spack.error
|
||||
|
||||
__all__ = ['load', 'dump', 'SpackJSONError']
|
||||
|
@ -36,7 +39,7 @@
|
|||
|
||||
def load(stream):
|
||||
"""Spack JSON needs to be ordered to support specs."""
|
||||
if isinstance(stream, basestring):
|
||||
if isinstance(stream, string_types):
|
||||
return _byteify(json.loads(stream, object_hook=_byteify),
|
||||
ignore_dicts=True)
|
||||
else:
|
||||
|
@ -64,7 +67,7 @@ def _byteify(data, ignore_dicts=False):
|
|||
if isinstance(data, dict) and not ignore_dicts:
|
||||
return dict((_byteify(key, ignore_dicts=True),
|
||||
_byteify(value, ignore_dicts=True)) for key, value in
|
||||
data.iteritems())
|
||||
iteritems(data))
|
||||
# if it's anything else, return it in its original form
|
||||
return data
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ def construct_mapping(self, node, deep=False):
|
|||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError, exc:
|
||||
except TypeError as exc:
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark)
|
||||
|
|
|
@ -25,10 +25,20 @@
|
|||
import re
|
||||
import os
|
||||
import sys
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from six.moves.urllib.request import urlopen, Request
|
||||
from six.moves.urllib.error import URLError
|
||||
from multiprocessing import Pool
|
||||
from HTMLParser import HTMLParser, HTMLParseError
|
||||
|
||||
try:
|
||||
# Python 2 had these in the HTMLParser package.
|
||||
from HTMLParser import HTMLParser, HTMLParseError
|
||||
except ImportError:
|
||||
# In Python 3, things moved to html.parser
|
||||
from html.parser import HTMLParser
|
||||
# Also, HTMLParseError is deprecated and never raised.
|
||||
class HTMLParseError:
|
||||
pass
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@ -80,9 +90,9 @@ def _spider(args):
|
|||
# It would be nice to do this with the HTTP Accept header to avoid
|
||||
# one round-trip. However, most servers seem to ignore the header
|
||||
# if you ask for a tarball with Accept: text/html.
|
||||
req = urllib2.Request(url)
|
||||
req = Request(url)
|
||||
req.get_method = lambda: "HEAD"
|
||||
resp = urllib2.urlopen(req, timeout=TIMEOUT)
|
||||
resp = urlopen(req, timeout=TIMEOUT)
|
||||
|
||||
if "Content-type" not in resp.headers:
|
||||
tty.debug("ignoring page " + url)
|
||||
|
@ -95,7 +105,7 @@ def _spider(args):
|
|||
|
||||
# Do the real GET request when we know it's just HTML.
|
||||
req.get_method = lambda: "GET"
|
||||
response = urllib2.urlopen(req, timeout=TIMEOUT)
|
||||
response = urlopen(req, timeout=TIMEOUT)
|
||||
response_url = response.geturl()
|
||||
|
||||
# Read the page and and stick it in the map we'll return
|
||||
|
@ -142,7 +152,7 @@ def _spider(args):
|
|||
pool.terminate()
|
||||
pool.join()
|
||||
|
||||
except urllib2.URLError as e:
|
||||
except URLError as e:
|
||||
tty.debug(e)
|
||||
if raise_on_error:
|
||||
raise spack.error.NoNetworkConnectionError(str(e), url)
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
import numbers
|
||||
from bisect import bisect_left
|
||||
from functools import wraps
|
||||
from six import string_types
|
||||
|
||||
from functools_backport import total_ordering
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
@ -216,7 +217,7 @@ def a_or_n(seg):
|
|||
segments = [a_or_n(seg) for seg in version]
|
||||
|
||||
wc = segments[0]
|
||||
for i in xrange(1, len(separators)):
|
||||
for i in range(1, len(separators)):
|
||||
wc += '(?:' + separators[i] + segments[i]
|
||||
|
||||
# Add possible alpha or beta indicator at the end of each segemnt
|
||||
|
@ -229,18 +230,24 @@ def __iter__(self):
|
|||
|
||||
def __getitem__(self, idx):
|
||||
cls = type(self)
|
||||
|
||||
if isinstance(idx, numbers.Integral):
|
||||
return self.version[idx]
|
||||
|
||||
elif isinstance(idx, slice):
|
||||
# Currently len(self.separators) == len(self.version) - 1
|
||||
extendend_separators = self.separators + ('',)
|
||||
string_arg = []
|
||||
for token, sep in zip(self.version, extendend_separators)[idx]:
|
||||
|
||||
pairs = zip(self.version[idx], extendend_separators[idx])
|
||||
for token, sep in pairs:
|
||||
string_arg.append(str(token))
|
||||
string_arg.append(str(sep))
|
||||
|
||||
string_arg.pop() # We don't need the last separator
|
||||
string_arg = ''.join(string_arg)
|
||||
return cls(string_arg)
|
||||
|
||||
message = '{cls.__name__} indices must be integers'
|
||||
raise TypeError(message.format(cls=cls))
|
||||
|
||||
|
@ -375,9 +382,9 @@ def intersection(self, other):
|
|||
class VersionRange(object):
|
||||
|
||||
def __init__(self, start, end):
|
||||
if isinstance(start, basestring):
|
||||
if isinstance(start, string_types):
|
||||
start = Version(start)
|
||||
if isinstance(end, basestring):
|
||||
if isinstance(end, string_types):
|
||||
end = Version(end)
|
||||
|
||||
self.start = start
|
||||
|
@ -568,7 +575,7 @@ class VersionList(object):
|
|||
def __init__(self, vlist=None):
|
||||
self.versions = []
|
||||
if vlist is not None:
|
||||
if isinstance(vlist, basestring):
|
||||
if isinstance(vlist, string_types):
|
||||
vlist = _string_to_version(vlist)
|
||||
if type(vlist) == VersionList:
|
||||
self.versions = vlist.versions
|
||||
|
@ -796,7 +803,7 @@ def ver(obj):
|
|||
"""
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return VersionList(obj)
|
||||
elif isinstance(obj, basestring):
|
||||
elif isinstance(obj, string_types):
|
||||
return _string_to_version(obj)
|
||||
elif isinstance(obj, (int, float)):
|
||||
return _string_to_version(str(obj))
|
||||
|
|
Loading…
Reference in a new issue