Stop using six in Spack (#33905)

Since we dropped support for Python 2.7, there's no need
so use `six` anymore. We still need to vendor it until
we update our vendored dependencies.
This commit is contained in:
Massimiliano Culpo 2022-11-15 10:07:54 +01:00 committed by GitHub
parent 5c4137baf1
commit b3124bff7c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
83 changed files with 336 additions and 625 deletions

View file

@ -71,13 +71,12 @@
import re
import math
import multiprocessing
import io
import sys
import threading
import time
from contextlib import contextmanager
from six import StringIO
from six import string_types
_error_matches = [
"^FAIL: ",
@ -246,7 +245,7 @@ def __getitem__(self, line_no):
def __str__(self):
"""Returns event lines and context."""
out = StringIO()
out = io.StringIO()
for i in range(self.start, self.end):
if i == self.line_no:
out.write(' >> %-6d%s' % (i, self[i]))
@ -386,7 +385,7 @@ def parse(self, stream, context=6, jobs=None):
(tuple): two lists containing ``BuildError`` and
``BuildWarning`` objects.
"""
if isinstance(stream, string_types):
if isinstance(stream, str):
with open(stream) as f:
return self.parse(f, context, jobs)

View file

@ -7,11 +7,10 @@
import argparse
import errno
import io
import re
import sys
from six import StringIO
class Command(object):
"""Parsed representation of a command from argparse.
@ -181,7 +180,7 @@ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
self.rst_levels = rst_levels
def format(self, cmd):
string = StringIO()
string = io.StringIO()
string.write(self.begin_command(cmd.prog))
if cmd.description:

View file

@ -18,8 +18,6 @@
from contextlib import contextmanager
from sys import platform as _platform
import six
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, symlink
@ -520,7 +518,7 @@ def chgrp(path, group, follow_symlinks=True):
if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, six.string_types):
if isinstance(group, str):
gid = grp.getgrnam(group).gr_gid
else:
gid = group
@ -1017,7 +1015,7 @@ def open_if_filename(str_or_file, mode="r"):
If it's a file object, just yields the file object.
"""
if isinstance(str_or_file, six.string_types):
if isinstance(str_or_file, str):
with open(str_or_file, mode) as f:
yield f
else:
@ -1602,7 +1600,7 @@ def find(root, files, recursive=True):
Returns:
list: The files that have been found
"""
if isinstance(files, six.string_types):
if isinstance(files, str):
files = [files]
if recursive:
@ -1666,7 +1664,7 @@ class FileList(collections.abc.Sequence):
"""
def __init__(self, files):
if isinstance(files, six.string_types):
if isinstance(files, str):
files = [files]
self.files = list(dedupe(files))
@ -1762,7 +1760,7 @@ def directories(self):
def directories(self, value):
value = value or []
# Accept a single directory as input
if isinstance(value, six.string_types):
if isinstance(value, str):
value = [value]
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
@ -1898,7 +1896,7 @@ def find_headers(headers, root, recursive=False):
Returns:
HeaderList: The headers that have been found
"""
if isinstance(headers, six.string_types):
if isinstance(headers, str):
headers = [headers]
elif not isinstance(headers, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "
@ -2064,7 +2062,7 @@ def find_system_libraries(libraries, shared=True):
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, six.string_types):
if isinstance(libraries, str):
libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "
@ -2121,7 +2119,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, six.string_types):
if isinstance(libraries, str):
libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "

View file

@ -17,9 +17,6 @@
from datetime import datetime, timedelta
from typing import Any, Callable, Iterable, List, Tuple
import six
from six import string_types
# Ignore emacs backups when listing modules
ignore_modules = [r"^\.#", "~$"]
@ -200,14 +197,9 @@ def _memoized_function(*args, **kwargs):
return ret
except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable.
raise six.raise_from(
UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(
key, func.__name__
),
),
e,
)
raise UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
) from e
return _memoized_function
@ -574,7 +566,7 @@ def match_predicate(*args):
def match(string):
for arg in args:
if isinstance(arg, string_types):
if isinstance(arg, str):
if re.search(arg, string):
return True
elif isinstance(arg, list) or isinstance(arg, tuple):

View file

@ -6,6 +6,7 @@
from __future__ import unicode_literals
import contextlib
import io
import os
import struct
import sys
@ -14,10 +15,6 @@
from datetime import datetime
from sys import platform as _platform
import six
from six import StringIO
from six.moves import input
if _platform != "win32":
import fcntl
import termios
@ -183,7 +180,7 @@ def msg(message, *args, **kwargs):
else:
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args:
print(indent + _output_filter(six.text_type(arg)))
print(indent + _output_filter(str(arg)))
def info(message, *args, **kwargs):
@ -201,13 +198,13 @@ def info(message, *args, **kwargs):
st_text = process_stacktrace(st_countback)
cprint(
"@%s{%s==>} %s%s"
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
% (format, st_text, get_timestamp(), cescape(_output_filter(str(message)))),
stream=stream,
)
for arg in args:
if wrap:
lines = textwrap.wrap(
_output_filter(six.text_type(arg)),
_output_filter(str(arg)),
initial_indent=indent,
subsequent_indent=indent,
break_long_words=break_long_words,
@ -215,7 +212,7 @@ def info(message, *args, **kwargs):
for line in lines:
stream.write(line + "\n")
else:
stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
stream.write(indent + _output_filter(str(arg)) + "\n")
def verbose(message, *args, **kwargs):
@ -238,7 +235,7 @@ def error(message, *args, **kwargs):
kwargs.setdefault("format", "*r")
kwargs.setdefault("stream", sys.stderr)
info("Error: " + six.text_type(message), *args, **kwargs)
info("Error: " + str(message), *args, **kwargs)
def warn(message, *args, **kwargs):
@ -247,7 +244,7 @@ def warn(message, *args, **kwargs):
kwargs.setdefault("format", "*Y")
kwargs.setdefault("stream", sys.stderr)
info("Warning: " + six.text_type(message), *args, **kwargs)
info("Warning: " + str(message), *args, **kwargs)
def die(message, *args, **kwargs):
@ -271,7 +268,7 @@ def get_number(prompt, **kwargs):
while number is None:
msg(prompt, newline=False)
ans = input()
if ans == six.text_type(abort):
if ans == str(abort):
return None
if ans:
@ -336,11 +333,11 @@ def hline(label=None, **kwargs):
cols -= 2
cols = min(max_width, cols)
label = six.text_type(label)
label = str(label)
prefix = char * 2 + " "
suffix = " " + (cols - len(prefix) - clen(label)) * char
out = StringIO()
out = io.StringIO()
out.write(prefix)
out.write(label)
out.write(suffix)

View file

@ -8,11 +8,10 @@
"""
from __future__ import division, unicode_literals
import io
import os
import sys
from six import StringIO, text_type
from llnl.util.tty import terminal_size
from llnl.util.tty.color import cextra, clen
@ -134,7 +133,7 @@ def colify(elts, **options):
)
# elts needs to be an array of strings so we can count the elements
elts = [text_type(elt) for elt in elts]
elts = [str(elt) for elt in elts]
if not elts:
return (0, ())
@ -232,7 +231,7 @@ def transpose():
def colified(elts, **options):
"""Invokes the ``colify()`` function but returns the result as a string
instead of writing it to an output string."""
sio = StringIO()
sio = io.StringIO()
options["output"] = sio
colify(elts, **options)
return sio.getvalue()

View file

@ -65,8 +65,6 @@
import sys
from contextlib import contextmanager
import six
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
@ -259,7 +257,7 @@ def cescape(string):
Returns:
(str): the string with color codes escaped
"""
string = six.text_type(string)
string = str(string)
string = string.replace("@", "@@")
string = string.replace("}", "}}")
return string

View file

@ -24,8 +24,6 @@
from types import ModuleType # novm
from typing import Optional # novm
from six import StringIO, string_types
import llnl.util.tty as tty
termios = None # type: Optional[ModuleType]
@ -308,7 +306,7 @@ def __init__(self, file_like):
self.file_like = file_like
if isinstance(file_like, string_types):
if isinstance(file_like, str):
self.open = True
elif _file_descriptors_work(file_like):
self.open = False
@ -324,7 +322,7 @@ def unwrap(self):
if self.file_like:
self.file = open(self.file_like, "w", encoding="utf-8")
else:
self.file = StringIO()
self.file = io.StringIO()
return self.file
else:
# We were handed an already-open file object. In this case we also
@ -787,7 +785,7 @@ def __enter__(self):
raise RuntimeError("file argument must be set by __init__ ")
# Open both write and reading on logfile
if type(self.logfile) == StringIO:
if type(self.logfile) == io.StringIO:
self._ioflag = True
# cannot have two streams on tempfile, so we must make our own
sys.stdout = self.logfile
@ -1013,7 +1011,7 @@ def _writer_daemon(
finally:
# send written data back to parent if we used a StringIO
if isinstance(log_file, StringIO):
if isinstance(log_file, io.StringIO):
control_pipe.send(log_file.getvalue())
log_file_wrapper.close()
close_connection_and_file(read_multiprocess_fd, in_pipe)

View file

@ -42,8 +42,7 @@ def _search_duplicate_compilers(error_cls):
import itertools
import pickle
import re
from six.moves.urllib.request import urlopen
from urllib.request import urlopen
import llnl.util.lang

View file

@ -17,9 +17,9 @@
import traceback
import warnings
from contextlib import closing
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml
from six.moves.urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
import llnl.util.lang

View file

@ -17,8 +17,6 @@
import sysconfig
import uuid
import six
import archspec.cpu
import llnl.util.filesystem as fs
@ -78,7 +76,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
command found and the concrete spec providing it
"""
# If it is a string assume it's one of the root specs by this module
if isinstance(query_spec, six.string_types):
if isinstance(query_spec, str):
# We have to run as part of this python interpreter
query_spec += " ^" + spec_for_current_python()
@ -923,7 +921,7 @@ def _missing(name, purpose, system_only=True):
def _required_system_executable(exes, msg):
"""Search for an executable is the system path only."""
if isinstance(exes, six.string_types):
if isinstance(exes, str):
exes = (exes,)
if spack.util.executable.which_string(*exes):
return True, None
@ -941,7 +939,7 @@ def _required_python_module(module, query_spec, msg):
def _required_executable(exes, query_spec, msg):
"""Search for an executable in the system path or in the bootstrap store."""
if isinstance(exes, six.string_types):
if isinstance(exes, str):
exes = (exes,)
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
return True, None

View file

@ -33,6 +33,7 @@
calls you can make from within the install() function.
"""
import inspect
import io
import multiprocessing
import os
import re
@ -41,8 +42,6 @@
import traceback
import types
from six import StringIO
import llnl.util.tty as tty
from llnl.util.filesystem import install, install_tree, mkdirp
from llnl.util.lang import dedupe
@ -1352,7 +1351,7 @@ def __init__(self, msg, module, classname, traceback_string, log_name, log_type,
@property
def long_message(self):
out = StringIO()
out = io.StringIO()
out.write(self._long_message if self._long_message else "")
have_log = self.log_name and os.path.exists(self.log_name)

View file

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import six
import llnl.util.lang
import spack.builder
@ -26,7 +24,7 @@ def sanity_check_prefix(builder):
pkg = builder.pkg
def check_paths(path_list, filetype, predicate):
if isinstance(path_list, six.string_types):
if isinstance(path_list, str):
path_list = [path_list]
for path in path_list:

View file

@ -10,8 +10,6 @@
import sys
from typing import List, Tuple
import six
import llnl.util.filesystem as fs
import spack.build_environment
@ -302,9 +300,7 @@ def define(cmake_var, value):
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(
value, six.string_types
):
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)

View file

@ -9,8 +9,6 @@
import inspect
from typing import List, Optional, Tuple
import six
import spack.build_environment
#: Builder classes, as registered by the "builder" decorator
@ -167,7 +165,7 @@ def __forward(self):
property(forward_property_to_getattr(attribute_name)),
)
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)):
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
def __init__(self, pkg):
# Deal with custom phases in packages here
if hasattr(pkg, "phases"):
@ -456,7 +454,7 @@ def copy(self):
return copy.deepcopy(self)
class Builder(six.with_metaclass(BuilderMeta, collections.abc.Sequence)):
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
"""A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it.

View file

@ -16,11 +16,9 @@
import tempfile
import time
import zipfile
from six import iteritems, string_types
from six.moves.urllib.error import HTTPError, URLError
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@ -216,7 +214,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
def _remove_satisfied_deps(deps, satisfied_list):
new_deps = {}
for key, value in iteritems(deps):
for key, value in deps.items():
new_value = set([v for v in value if v not in satisfied_list])
if new_value:
new_deps[key] = new_value
@ -1970,7 +1968,7 @@ def process_command(name, commands, repro_dir):
"""
tty.debug("spack {0} arguments: {1}".format(name, commands))
if len(commands) == 0 or isinstance(commands[0], string_types):
if len(commands) == 0 or isinstance(commands[0], str):
commands = [commands]
# Create a string [command 1] && [command 2] && ... && [command n] with commands

View file

@ -14,7 +14,6 @@
from typing import List, Tuple
import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError
import llnl.util.tty as tty
@ -217,7 +216,7 @@ def parse_specs(args, **kwargs):
tests = kwargs.get("tests", False)
sargs = args
if not isinstance(args, six.string_types):
if not isinstance(args, str):
sargs = " ".join(args)
unquoted_flags = _UnquotedFlags.extract(sargs)

View file

@ -8,8 +8,6 @@
import argparse
import sys
from six import iteritems
import llnl.util.tty as tty
from llnl.util.lang import index_by
from llnl.util.tty.colify import colify
@ -138,13 +136,13 @@ def compiler_info(args):
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags:
print("\tflags:")
for flag, flag_value in iteritems(c.flags):
for flag, flag_value in c.flags.items():
print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0:
if len(c.environment.get("set", {})) != 0:
print("\tenvironment:")
print("\t set:")
for key, value in iteritems(c.environment["set"]):
for key, value in c.environment["set"].items():
print("\t %s = %s" % (key, value))
if c.extra_rpaths:
print("\tExtra rpaths:")

View file

@ -4,13 +4,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import io
import os
import shutil
import sys
import tempfile
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@ -737,7 +736,7 @@ def get_install_deps_target(name):
[get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list]
)
buf = six.StringIO()
buf = io.StringIO()
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))

View file

@ -7,8 +7,7 @@
import inspect
import textwrap
from six.moves import zip_longest
from itertools import zip_longest
import llnl.util.tty as tty
import llnl.util.tty.color as color

View file

@ -3,10 +3,9 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys
import six
import llnl.util.tty.colify as colify
import spack.cmd
@ -29,7 +28,7 @@ def setup_parser(subparser):
def providers(parser, args):
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
buffer = six.StringIO()
buffer = io.StringIO()
isatty = sys.stdout.isatty()
if isatty:
buffer.write("Virtual packages:\n")

View file

@ -2,11 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys
import six
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@ -20,7 +18,7 @@
def report_tags(category, tags):
buffer = six.StringIO()
buffer = io.StringIO()
isatty = sys.stdout.isatty()
if isatty:
@ -88,7 +86,7 @@ def tags(parser, args):
return
# Report packages associated with tags
buffer = six.StringIO()
buffer = io.StringIO()
isatty = sys.stdout.isatty()
tags = args.tag if args.tag else available_tags

View file

@ -7,6 +7,7 @@
import argparse
import collections
import io
import os.path
import re
import sys
@ -16,8 +17,6 @@
except ImportError:
pytest = None # type: ignore
from six import StringIO
import llnl.util.filesystem
import llnl.util.tty.color as color
from llnl.util.tty.colify import colify
@ -126,7 +125,7 @@ def colorize(c, prefix):
old_output = sys.stdout
try:
sys.stdout = output = StringIO()
sys.stdout = output = io.StringIO()
pytest.main(["--collect-only"] + extra_args)
finally:
sys.stdout = old_output

View file

@ -5,10 +5,9 @@
from __future__ import division, print_function
import urllib.parse
from collections import defaultdict
import six.moves.urllib.parse as urllib_parse
import llnl.util.tty.color as color
from llnl.util import tty
@ -323,7 +322,7 @@ def add(self, pkg_name, fetcher):
md5_hashes[pkg_name].append(fetcher.url)
# parse out the URL scheme (https/http/ftp/etc.)
urlinfo = urllib_parse.urlparse(fetcher.url)
urlinfo = urllib.parse.urlparse(fetcher.url)
self.schemes[urlinfo.scheme] += 1
if urlinfo.scheme == "http":

View file

@ -12,8 +12,6 @@
import os
from typing import Dict # novm
import six
import archspec.cpu
import llnl.util.filesystem as fs
@ -427,7 +425,7 @@ def compiler_from_dict(items):
environment,
extra_rpaths,
enable_implicit_rpaths=implicit_rpaths,
**compiler_flags
**compiler_flags,
)
@ -677,18 +675,18 @@ def _default(fn_args):
try:
version = callback(path)
if version and six.text_type(version).strip() and version != "unknown":
if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e:
error = "Couldn't get version for compiler {0}\n".format(path) + six.text_type(e)
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, six.text_type(e)
path, e.__class__.__name__, str(e)
)
return None, error

View file

@ -39,9 +39,7 @@
from typing import List # novm
import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError
from six import iteritems
import llnl.util.lang
import llnl.util.tty as tty
@ -358,7 +356,7 @@ def clear(self):
def _process_dict_keyname_overrides(data):
"""Turn a trailing `:' in a key name into an override attribute."""
result = {}
for sk, sv in iteritems(data):
for sk, sv in data.items():
if sk.endswith(":"):
key = syaml.syaml_str(sk[:-1])
key.override = True
@ -973,7 +971,7 @@ def validate(data, schema, filename=None):
line_number = e.instance.lc.line + 1
else:
line_number = None
raise six.raise_from(ConfigFormatError(e, data, filename, line_number), e)
raise ConfigFormatError(e, data, filename, line_number) from e
# return the validated data so that we can access the raw data
# mostly relevant for environments
return test_data
@ -1140,7 +1138,7 @@ def they_are(t):
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in iteritems(source):
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).

View file

@ -7,7 +7,6 @@
import jsonschema
import jsonschema.exceptions
import six
import llnl.util.tty as tty
@ -97,7 +96,7 @@ def spec_from_entry(entry):
continue
# Value could be a list (of strings), boolean, or string
if isinstance(value, six.string_types):
if isinstance(value, str):
variant_strs.append("{0}={1}".format(name, value))
else:
try:
@ -169,10 +168,7 @@ def read(path, apply_updates):
jsonschema.validate(json_data, manifest_schema)
except (jsonschema.exceptions.ValidationError, decode_exception_type) as e:
raise six.raise_from(
ManifestValidationError("error parsing manifest JSON:", str(e)),
e,
)
raise ManifestValidationError("error parsing manifest JSON:", str(e)) from e
specs = entries_to_specs(json_data["specs"])
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))

View file

@ -28,8 +28,6 @@
import time
from typing import Dict # novm
import six
try:
import uuid
@ -770,10 +768,7 @@ def _read_from_file(self, filename):
with open(filename, "r") as f:
fdata = sjson.load(f)
except Exception as e:
raise six.raise_from(
CorruptDatabaseError("error parsing database:", str(e)),
e,
)
raise CorruptDatabaseError("error parsing database:", str(e)) from e
if fdata is None:
return

View file

@ -2,11 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships.
"""
from six import string_types
"""Data structures that represent Spack's dependency relationships."""
import spack.spec
#: The types of dependency relationships that Spack understands.
@ -48,7 +44,7 @@ def canonical_deptype(deptype):
if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, string_types):
elif isinstance(deptype, str):
if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)

View file

@ -20,8 +20,6 @@
import re
import sys
import six
import llnl.util.tty
import spack.config
@ -115,7 +113,7 @@ def _convert_to_iterable(single_val_or_multiple):
x = single_val_or_multiple
if x is None:
return []
elif isinstance(x, six.string_types):
elif isinstance(x, str):
return [x]
elif isinstance(x, spack.spec.Spec):
# Specs are iterable, but a single spec should be converted to a list

View file

@ -34,8 +34,6 @@ class OpenMpi(Package):
import re
from typing import List, Set # novm
import six
import llnl.util.lang
import llnl.util.tty.color
@ -234,7 +232,7 @@ class Foo(Package):
"""
global directive_names
if isinstance(dicts, six.string_types):
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
@ -391,7 +389,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
patches = [patches]
# auto-call patch() directive on any strings in patch list
patches = [patch(p) if isinstance(p, six.string_types) else p for p in patches]
patches = [patch(p) if isinstance(p, str) else p for p in patches]
assert all(callable(p) for p in patches)
# this is where we actually add the dependency to this package

View file

@ -12,8 +12,6 @@
import sys
from contextlib import contextmanager
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@ -363,12 +361,12 @@ def remove_install_directory(self, spec, deprecated=False):
os.unlink(path)
os.remove(metapath)
except OSError as e:
raise six.raise_from(RemoveFailedError(spec, path, e), e)
raise RemoveFailedError(spec, path, e) from e
elif os.path.exists(path):
try:
shutil.rmtree(path, **kwargs)
except OSError as e:
raise six.raise_from(RemoveFailedError(spec, path, e), e)
raise RemoveFailedError(spec, path, e) from e
path = os.path.dirname(path)
while path != self.root:

View file

@ -13,7 +13,6 @@
import time
import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@ -679,7 +678,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
self.views = {}
elif with_view is True:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(with_view, six.string_types):
elif isinstance(with_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, with_view)}
# If with_view is None, then defer to the view settings determined by
# the manifest file
@ -776,7 +775,7 @@ def _read_manifest(self, f, raw_yaml=None):
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, six.string_types):
elif isinstance(enable_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view:
path = self.path
@ -2096,16 +2095,14 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
ayl[name][:] = [
s
for s in ayl.setdefault(name, [])
if (not isinstance(s, six.string_types))
or s.startswith("$")
or Spec(s) in speclist.specs
if (not isinstance(s, str)) or s.startswith("$") or Spec(s) in speclist.specs
]
# Put the new specs into the first active list from the yaml
new_specs = [
entry
for entry in speclist.yaml_list
if isinstance(entry, six.string_types)
if isinstance(entry, str)
and not any(entry in ayl[name] for ayl in active_yaml_lists)
]
list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
@ -2181,7 +2178,7 @@ def yaml_equivalent(first, second):
elif isinstance(first, list):
return isinstance(second, list) and _equiv_list(first, second)
else: # it's a string
return isinstance(second, six.string_types) and first == second
return isinstance(second, str) and first == second
def _equiv_list(first, second):

View file

@ -29,11 +29,9 @@
import re
import shutil
import sys
import urllib.parse
from typing import List, Optional # novm
import six
import six.moves.urllib.parse as urllib_parse
import llnl.util
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@ -322,7 +320,7 @@ def candidate_urls(self):
# This must be skipped on Windows due to URL encoding
# of ':' characters on filepaths on Windows
if sys.platform != "win32" and url.startswith("file://"):
path = urllib_parse.quote(url[len("file://") :])
path = urllib.parse.quote(url[len("file://") :])
url = "file://" + path
urls.append(url)
@ -620,7 +618,7 @@ def archive(self, destination, **kwargs):
patterns = kwargs.get("exclude", None)
if patterns is not None:
if isinstance(patterns, six.string_types):
if isinstance(patterns, str):
patterns = [patterns]
for p in patterns:
tar.add_default_arg("--exclude=%s" % p)
@ -1607,7 +1605,7 @@ def from_url_scheme(url, *args, **kwargs):
in the given url."""
url = kwargs.get("url", url)
parsed_url = urllib_parse.urlparse(url, scheme="file")
parsed_url = urllib.parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or {
"file": "url",

View file

@ -2,8 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import six.moves.urllib.response as urllib_response
import urllib.response
import spack.util.url as url_util
import spack.util.web as web_util
@ -21,4 +20,4 @@ def gcs_open(req, *args, **kwargs):
stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers()
return urllib_response.addinfourl(stream, headers, url)
return urllib.response.addinfourl(stream, headers, url)

View file

@ -8,8 +8,6 @@
import re
import shutil
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@ -434,10 +432,7 @@ def from_file(filename):
test_suite._hash = content_hash
return test_suite
except Exception as e:
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)),
e,
)
raise sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)) from e
def _add_msg_to_file(filename, msg):

View file

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This module encapsulates package installation functionality.
@ -30,6 +29,7 @@
import copy
import glob
import heapq
import io
import itertools
import os
import shutil
@ -37,8 +37,6 @@
import time
from collections import defaultdict
import six
import llnl.util.filesystem as fs
import llnl.util.lock as lk
import llnl.util.tty as tty
@ -594,7 +592,7 @@ def log(pkg):
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = six.StringIO()
errors = io.StringIO()
target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files")
for glob_expr in pkg.builder.archive_files:

View file

@ -12,6 +12,7 @@
import argparse
import inspect
import io
import operator
import os
import os.path
@ -23,8 +24,6 @@
import traceback
import warnings
from six import StringIO
import archspec.cpu
import llnl.util.lang
@ -700,7 +699,7 @@ def __call__(self, *argv, **kwargs):
prepend + [self.command_name] + list(argv)
)
out = StringIO()
out = io.StringIO()
try:
with log_output(out):
self.returncode = _invoke_command(self.command, self.parser, args, unknown)

View file

@ -19,7 +19,6 @@
import traceback
import ruamel.yaml.error as yaml_error
import six
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@ -37,7 +36,7 @@
def _is_string(url):
return isinstance(url, six.string_types)
return isinstance(url, str)
def _display_mirror_entry(size, name, url, type_=None):
@ -78,10 +77,7 @@ def from_yaml(stream, name=None):
data = syaml.load(stream)
return Mirror.from_dict(data, name)
except yaml_error.MarkedYAMLError as e:
raise six.raise_from(
syaml.SpackYAMLError("error parsing YAML mirror:", str(e)),
e,
)
raise syaml.SpackYAMLError("error parsing YAML mirror:", str(e)) from e
@staticmethod
def from_json(stream, name=None):
@ -89,10 +85,7 @@ def from_json(stream, name=None):
d = sjson.load(stream)
return Mirror.from_dict(d, name)
except Exception as e:
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON mirror:", str(e)),
e,
)
raise sjson.SpackJSONError("error parsing JSON mirror:", str(e)) from e
def to_dict(self):
if self._push_url is None:
@ -102,7 +95,7 @@ def to_dict(self):
@staticmethod
def from_dict(d, name=None):
if isinstance(d, six.string_types):
if isinstance(d, str):
return Mirror(d, name=name)
else:
return Mirror(d["fetch"], d["push"], name=name)
@ -257,10 +250,7 @@ def from_yaml(stream, name=None):
data = syaml.load(stream)
return MirrorCollection(data)
except yaml_error.MarkedYAMLError as e:
raise six.raise_from(
syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)),
e,
)
raise syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)) from e
@staticmethod
def from_json(stream, name=None):
@ -268,10 +258,7 @@ def from_json(stream, name=None):
d = sjson.load(stream)
return MirrorCollection(d)
except Exception as e:
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)),
e,
)
raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
def to_dict(self, recursive=False):
return syaml_dict(

View file

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This is where most of the action happens in Spack.
The spack package class structure is based strongly on Homebrew
@ -18,6 +17,7 @@
import glob
import hashlib
import inspect
import io
import os
import re
import shutil
@ -29,8 +29,6 @@
import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
import six
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized, nullcontext
@ -130,7 +128,7 @@ def preferred_version(pkg):
return sorted(pkg.versions, key=key_fn).pop()
class WindowsRPathMeta(object):
class WindowsRPath(object):
"""Collection of functionality surrounding Windows RPATH specific features
This is essentially meaningless for all other platforms
@ -256,7 +254,7 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
variants = [variants]
for variant in variants:
if isinstance(variant, six.string_types):
if isinstance(variant, str):
variant = (variant, {})
variant_str, extra_attributes = variant
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
@ -443,7 +441,7 @@ def test_log_pathname(test_stage, spec):
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewMixin, object)):
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
"""This is the superclass for all spack packages.
***The Package class***
@ -1870,7 +1868,7 @@ def cache_extra_test_sources(self, srcs):
be copied to the corresponding location(s) under the install
testing directory.
"""
paths = [srcs] if isinstance(srcs, six.string_types) else srcs
paths = [srcs] if isinstance(srcs, str) else srcs
for path in paths:
src_path = os.path.join(self.stage.source_path, path)
@ -2000,7 +1998,7 @@ def run_test(
print(line.rstrip("\n"))
if exc_type is spack.util.executable.ProcessError:
out = six.StringIO()
out = io.StringIO()
spack.build_environment.write_log_summary(
out, "test", self.test_log_file, last=1
)
@ -2022,9 +2020,9 @@ def run_test(
return False
def _run_test_helper(self, runner, options, expected, status, installed, purpose):
status = [status] if isinstance(status, six.integer_types) else status
expected = [expected] if isinstance(expected, six.string_types) else expected
options = [options] if isinstance(options, six.string_types) else options
status = [status] if isinstance(status, int) else status
expected = [expected] if isinstance(expected, str) else expected
options = [options] if isinstance(options, str) else options
if purpose:
tty.msg(purpose)
@ -2365,7 +2363,7 @@ def format_doc(cls, **kwargs):
doc = re.sub(r"\s+", " ", cls.__doc__)
lines = textwrap.wrap(doc, 72)
results = six.StringIO()
results = io.StringIO()
for line in lines:
results.write((" " * indent) + line + "\n")
return results.getvalue()

View file

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import stat
from six import string_types
import spack.error
import spack.repo
from spack.config import ConfigError
@ -144,7 +142,7 @@ def preferred_variants(cls, pkg_name):
break
# allow variants to be list or string
if not isinstance(variants, string_types):
if not isinstance(variants, str):
variants = " ".join(variants)
# Only return variants that are actually supported by the package

View file

@ -8,8 +8,6 @@
import shlex
import sys
from six import string_types
import spack.error
import spack.util.path as sp
@ -147,7 +145,7 @@ def expect(self, id):
sys.exit(1)
def setup(self, text):
if isinstance(text, string_types):
if isinstance(text, str):
# shlex does not handle Windows path
# separators, so we must normalize to posix
text = sp.convert_to_posix_path(text)

View file

@ -5,8 +5,6 @@
"""Classes and functions to manage providers of virtual dependencies"""
import itertools
import six
import spack.error
import spack.util.spack_json as sjson
@ -66,7 +64,7 @@ def providers_for(self, virtual_spec):
"""
result = set()
# Allow string names to be passed as input, as well as specs
if isinstance(virtual_spec, six.string_types):
if isinstance(virtual_spec, str):
virtual_spec = spack.spec.Spec(virtual_spec)
# Add all the providers that satisfy the vpkg spec.
@ -174,7 +172,7 @@ def update(self, spec):
assert not self.repository.is_virtual_safe(spec.name), msg
pkg_provided = self.repository.get_pkg_class(spec.name).provided
for provided_spec, provider_specs in six.iteritems(pkg_provided):
for provided_spec, provider_specs in pkg_provided.items():
for provider_spec_readonly in provider_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
@ -310,7 +308,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
def mapiter(mappings):
if isinstance(mappings, dict):
return six.iteritems(mappings)
return mappings.items()
else:
return iter(mappings)

View file

@ -27,7 +27,6 @@
from typing import Dict # novm
import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs
import llnl.util.lang
@ -450,8 +449,7 @@ def __len__(self):
return len(self._packages_to_stats)
@six.add_metaclass(abc.ABCMeta)
class Indexer(object):
class Indexer(metaclass=abc.ABCMeta):
"""Adaptor for indexes that need to be generated when repos are updated."""
def __init__(self, repository):
@ -678,7 +676,7 @@ def __init__(self, *repos, **kwargs):
# Add each repo to this path.
for repo in repos:
try:
if isinstance(repo, six.string_types):
if isinstance(repo, str):
repo = Repo(repo, cache=cache)
self.put_last(repo)
except RepoError as e:

View file

@ -12,10 +12,8 @@
import socket
import time
import xml.sax.saxutils
from six import iteritems, text_type
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
@ -158,7 +156,7 @@ def build_report_for_package(self, directory_name, package, duration):
if cdash_phase not in phases_encountered:
phases_encountered.append(cdash_phase)
report_data[cdash_phase]["loglines"].append(
text_type("{0} output for {1}:".format(cdash_phase, package["name"]))
str("{0} output for {1}:".format(cdash_phase, package["name"]))
)
elif cdash_phase:
report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line))
@ -289,7 +287,7 @@ def extract_ctest_test_data(self, package, phases, report_data):
# Generate a report for this package.
# The first line just says "Testing package name-hash"
report_data["test"]["loglines"].append(
text_type("{0} output for {1}:".format("test", package["name"]))
str("{0} output for {1}:".format("test", package["name"]))
)
for line in package["stdout"].splitlines()[1:]:
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
@ -502,7 +500,7 @@ def upload(self, filename):
def finalize_report(self):
if self.buildIds:
tty.msg("View your build results here:")
for package_name, buildid in iteritems(self.buildIds):
for package_name, buildid in self.buildIds.items():
# Construct and display a helpful link if CDash responded with
# a buildId.
build_url = self.cdash_upload_url

View file

@ -3,13 +3,11 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.error
import urllib.request
import urllib.response
from io import BufferedReader, IOBase
import six
import six.moves.urllib.error as urllib_error
import six.moves.urllib.request as urllib_request
import six.moves.urllib.response as urllib_response
import spack.util.s3 as s3_util
import spack.util.url as url_util
@ -63,32 +61,32 @@ def _s3_open(url):
return url, headers, stream
class UrllibS3Handler(urllib_request.HTTPSHandler):
class UrllibS3Handler(urllib.request.HTTPSHandler):
def s3_open(self, req):
orig_url = req.get_full_url()
from botocore.exceptions import ClientError # type: ignore[import]
try:
url, headers, stream = _s3_open(orig_url)
return urllib_response.addinfourl(stream, headers, url)
return urllib.response.addinfourl(stream, headers, url)
except ClientError as err:
# if no such [KEY], but [KEY]/index.html exists,
# return that, instead.
if err.response["Error"]["Code"] == "NoSuchKey":
try:
_, headers, stream = _s3_open(url_util.join(orig_url, "index.html"))
return urllib_response.addinfourl(stream, headers, orig_url)
return urllib.response.addinfourl(stream, headers, orig_url)
except ClientError as err2:
if err.response["Error"]["Code"] == "NoSuchKey":
# raise original error
raise six.raise_from(urllib_error.URLError(err), err)
raise urllib.error.URLError(err) from err
raise six.raise_from(urllib_error.URLError(err2), err2)
raise urllib.error.URLError(err2) from err2
raise six.raise_from(urllib_error.URLError(err), err)
raise urllib.error.URLError(err) from err
S3OpenerDirector = urllib_request.build_opener(UrllibS3Handler())
S3OpenerDirector = urllib.request.build_opener(UrllibS3Handler())
open = S3OpenerDirector.open

View file

@ -3,11 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains jsonschema files for all of Spack's YAML formats."""
import warnings
import six
import llnl.util.lang
import llnl.util.tty
@ -45,7 +42,7 @@ def _deprecated_properties(validator, deprecated, instance, schema):
# Retrieve the template message
msg_str_or_func = deprecated["message"]
if isinstance(msg_str_or_func, six.string_types):
if isinstance(msg_str_or_func, str):
msg = msg_str_or_func.format(properties=deprecated_properties)
else:
msg = msg_str_or_func(instance, deprecated_properties)

View file

@ -2,13 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for config.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/config.py
:lines: 13-
"""
import six
from llnl.util.lang import union_dicts
@ -124,7 +122,7 @@ def update(data):
changed = False
install_tree = data.get("install_tree", None)
if isinstance(install_tree, six.string_types):
if isinstance(install_tree, str):
# deprecated short-form install tree
# add value as `root` in updated install_tree
data["install_tree"] = {"root": install_tree}
@ -148,7 +146,7 @@ def update(data):
changed = True
shared_linking = data.get("shared_linking", None)
if isinstance(shared_linking, six.string_types):
if isinstance(shared_linking, str):
# deprecated short-form shared_linking: rpath/runpath
# add value as `type` in updated shared_linking
data["shared_linking"] = {"type": shared_linking, "bind": False}

View file

@ -14,8 +14,6 @@
import types
import warnings
from six import string_types
import archspec.cpu
try:
@ -213,7 +211,7 @@ def build_criteria_names(costs, tuples):
def issequence(obj):
if isinstance(obj, string_types):
if isinstance(obj, str):
return False
return isinstance(obj, (collections.abc.Sequence, types.GeneratorType))
@ -225,7 +223,7 @@ def listify(args):
def packagize(pkg):
if isinstance(pkg, string_types):
if isinstance(pkg, str):
return spack.repo.path.get_pkg_class(pkg)
else:
return pkg
@ -949,7 +947,7 @@ def _rules_from_requirements(self, pkg_name, requirements):
"""Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements).
"""
if isinstance(requirements, string_types):
if isinstance(requirements, str):
rules = [(pkg_name, "one_of", [requirements])]
else:
rules = []

View file

@ -81,6 +81,7 @@
"""
import collections
import collections.abc
import io
import itertools
import os
import re
@ -88,7 +89,6 @@
import warnings
import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs
import llnl.util.lang as lang
@ -274,11 +274,11 @@ def _string_or_none(s):
other = spec_or_platform_tuple
platform_tuple = other.platform, other.os, other.target
elif isinstance(spec_or_platform_tuple, (six.string_types, tuple)):
elif isinstance(spec_or_platform_tuple, (str, tuple)):
spec_fields = spec_or_platform_tuple
# Normalize the string to a tuple
if isinstance(spec_or_platform_tuple, six.string_types):
if isinstance(spec_or_platform_tuple, str):
spec_fields = spec_or_platform_tuple.split("-")
if len(spec_fields) != 3:
msg = "cannot construct an ArchSpec from {0!s}"
@ -534,7 +534,6 @@ def copy(self):
@property
def concrete(self):
"""True if the spec is concrete, False otherwise"""
# return all(v for k, v in six.iteritems(self.to_cmp_dict()))
return self.platform and self.os and self.target and self.target_concrete
@property
@ -584,7 +583,7 @@ def __init__(self, *args):
arg = args[0]
# If there is one argument, it's either another CompilerSpec
# to copy or a string to parse
if isinstance(arg, six.string_types):
if isinstance(arg, str):
c = SpecParser().parse_compiler(arg)
self.name = c.name
self.versions = c.versions
@ -1335,7 +1334,7 @@ def __init__(
# Build spec should be the actual build spec unless marked dirty.
self._build_spec = None
if isinstance(spec_like, six.string_types):
if isinstance(spec_like, str):
spec_list = SpecParser(self).parse(spec_like)
if len(spec_list) > 1:
raise ValueError("More than one spec in string: " + spec_like)
@ -1538,7 +1537,7 @@ def _set_architecture(self, **kwargs):
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(new_vals)
else:
new_attrvals = [(a, v) for a, v in six.iteritems(kwargs) if a in arch_attrs]
new_attrvals = [(a, v) for a, v in kwargs.items() if a in arch_attrs]
for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr):
raise DuplicateArchitectureError(
@ -1932,9 +1931,7 @@ def to_node_dict(self, hash=ht.dag_hash):
package_hash = self._package_hash
# Full hashes are in bytes
if not isinstance(package_hash, six.text_type) and isinstance(
package_hash, six.binary_type
):
if not isinstance(package_hash, str) and isinstance(package_hash, bytes):
package_hash = package_hash.decode("utf-8")
d["package_hash"] = package_hash
@ -2204,7 +2201,7 @@ def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
else:
elt = dep
dep_name = dep["name"]
if isinstance(elt, six.string_types):
if isinstance(elt, str):
# original format, elt is just the dependency hash.
dep_hash, deptypes = elt, ["build", "link"]
elif isinstance(elt, tuple):
@ -2390,7 +2387,7 @@ def spec_and_dependency_types(s):
# Recurse on dependencies
for s, s_dependencies in dep_like.items():
if isinstance(s, six.string_types):
if isinstance(s, str):
dag_node, dependency_types = name_and_dependency_types(s)
else:
dag_node, dependency_types = spec_and_dependency_types(s)
@ -2469,10 +2466,7 @@ def from_yaml(stream):
data = yaml.load(stream)
return Spec.from_dict(data)
except yaml.error.MarkedYAMLError as e:
raise six.raise_from(
syaml.SpackYAMLError("error parsing YAML spec:", str(e)),
e,
)
raise syaml.SpackYAMLError("error parsing YAML spec:", str(e)) from e
@staticmethod
def from_json(stream):
@ -2485,10 +2479,7 @@ def from_json(stream):
data = sjson.load(stream)
return Spec.from_dict(data)
except Exception as e:
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON spec:", str(e)),
e,
)
raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
@staticmethod
def extract_json_from_clearsig(data):
@ -3112,10 +3103,7 @@ def flat_dependencies(self, **kwargs):
# with inconsistent constraints. Users cannot produce
# inconsistent specs like this on the command line: the
# parser doesn't allow it. Spack must be broken!
raise six.raise_from(
InconsistentSpecError("Invalid Spec DAG: %s" % e.message),
e,
)
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) from e
def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this
@ -4214,7 +4202,7 @@ def format(self, format_string=default_format, **kwargs):
color = kwargs.get("color", False)
transform = kwargs.get("transform", {})
out = six.StringIO()
out = io.StringIO()
def write(s, c=None):
f = clr.cescape(s)
@ -4437,7 +4425,7 @@ def old_format(self, format_string="$_$@$%@+$+$=", **kwargs):
token_transforms = dict((k.upper(), v) for k, v in kwargs.get("transform", {}).items())
length = len(format_string)
out = six.StringIO()
out = io.StringIO()
named = escape = compiler = False
named_str = fmt = ""
@ -5153,7 +5141,7 @@ def do_parse(self):
self.unexpected_token()
except spack.parse.ParseError as e:
raise six.raise_from(SpecParseError(e), e)
raise SpecParseError(e) from e
# Generate lookups for git-commit-based versions
for spec in specs:

View file

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
from six import string_types
import spack.variant
from spack.error import SpackError
from spack.spec import Spec
@ -21,7 +19,7 @@ def __init__(self, name="specs", yaml_list=None, reference=None):
self._reference = reference # TODO: Do we need defensive copy here?
# Validate yaml_list before assigning
if not all(isinstance(s, string_types) or isinstance(s, (list, dict)) for s in yaml_list):
if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
raise ValueError(
"yaml_list can contain only valid YAML types! Found:\n %s"
% [type(s) for s in yaml_list]
@ -91,7 +89,7 @@ def remove(self, spec):
remove = [
s
for s in self.yaml_list
if (isinstance(s, string_types) and not s.startswith("$")) and Spec(s) == Spec(spec)
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
]
if not remove:
msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
@ -145,7 +143,7 @@ def _expand_references(self, yaml):
for item in yaml:
# if it's a reference, expand it
if isinstance(item, string_types) and item.startswith("$"):
if isinstance(item, str) and item.startswith("$"):
# replace the reference and apply the sigil if needed
name, sigil = self._parse_reference(item)
referent = [

View file

@ -16,8 +16,6 @@
import tempfile
from typing import Dict # novm
from six import iteritems, string_types
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import (
@ -171,7 +169,7 @@ def get_stage_root():
if _stage_root is None:
candidates = spack.config.get("config:build_stage")
if isinstance(candidates, string_types):
if isinstance(candidates, str):
candidates = [candidates]
resolved_candidates = _resolve_paths(candidates)
@ -288,7 +286,7 @@ def __init__(
"""
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, string_types):
if isinstance(url_or_fetch_strategy, str):
self.fetcher = fs.from_url_scheme(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
@ -709,7 +707,7 @@ def _add_to_root_stage(self):
else:
raise
for key, value in iteritems(placement):
for key, value in placement.items():
destination_path = os.path.join(target_path, value)
source_path = os.path.join(self.source_path, key)
@ -903,7 +901,7 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
"",
*llnl.util.lang.elide_list(
["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) for v in sorted_versions]
)
),
)
print()

View file

@ -21,8 +21,6 @@
import os
import re
import six
import llnl.util.lang
import llnl.util.tty as tty
@ -69,7 +67,7 @@ def parse_install_tree(config_dict):
install_tree = config_dict.get("install_tree", {})
padded_length = False
if isinstance(install_tree, six.string_types):
if isinstance(install_tree, str):
tty.warn("Using deprecated format for configuring install_tree")
unpadded_root = install_tree
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
@ -309,7 +307,7 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
List of matching specs
"""
# Normalize input to list of specs
if isinstance(constraints, six.string_types):
if isinstance(constraints, str):
constraints = [spack.spec.Spec(constraints)]
matching_specs, errors = [], []

View file

@ -5,8 +5,6 @@
import functools
import warnings
import six
import archspec.cpu
import llnl.util.tty as tty
@ -24,7 +22,7 @@ def _ensure_other_is_target(method):
@functools.wraps(method)
def _impl(self, other):
if isinstance(other, six.string_types):
if isinstance(other, str):
other = Target(other)
if not isinstance(other, Target):
@ -95,7 +93,7 @@ def __hash__(self):
def from_dict_or_value(dict_or_value):
# A string here represents a generic target (like x86_64 or ppc64) or
# a custom micro-architecture
if isinstance(dict_or_value, six.string_types):
if isinstance(dict_or_value, str):
return Target(dict_or_value)
# TODO: From a dict we actually retrieve much more information than

View file

@ -6,8 +6,6 @@
import textwrap
from typing import List # novm
import six
import llnl.util.lang
import spack.config
@ -57,7 +55,7 @@ def context_property(cls, func):
context_property = ContextMeta.context_property
class Context(six.with_metaclass(ContextMeta, object)):
class Context(metaclass=ContextMeta):
"""Base class for context classes that are used with the template
engine.
"""

View file

@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pickle
import pytest
from six.moves import cPickle
from spack.main import SpackCommand
@ -52,6 +52,6 @@ def test_dump(tmpdir):
def test_pickle(tmpdir):
with tmpdir.as_cwd():
build_env("--pickle", _out_file, "zlib")
environment = cPickle.load(open(_out_file, "rb"))
environment = pickle.load(open(_out_file, "rb"))
assert type(environment) == dict
assert "PATH" in environment

View file

@ -4,13 +4,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import filecmp
import glob
import io
import os
import shutil
import sys
from argparse import Namespace
import pytest
from six import StringIO
import llnl.util.filesystem as fs
import llnl.util.link_tree
@ -507,7 +507,7 @@ def test_env_repo():
def test_user_removed_spec():
"""Ensure a user can remove from any position in the spack.yaml file."""
initial_yaml = StringIO(
initial_yaml = io.StringIO(
"""\
env:
specs:
@ -545,7 +545,7 @@ def test_user_removed_spec():
def test_init_from_lockfile(tmpdir):
"""Test that an environment can be instantiated from a lockfile."""
initial_yaml = StringIO(
initial_yaml = io.StringIO(
"""\
env:
specs:
@ -573,7 +573,7 @@ def test_init_from_lockfile(tmpdir):
def test_init_from_yaml(tmpdir):
"""Test that an environment can be instantiated from a lockfile."""
initial_yaml = StringIO(
initial_yaml = io.StringIO(
"""\
env:
specs:
@ -602,7 +602,7 @@ def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_package
fake_bin = fake_prefix.join("bin")
fake_bin.ensure(dir=True)
initial_yaml = StringIO(
initial_yaml = io.StringIO(
"""\
env:
specs:
@ -611,7 +611,7 @@ def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_package
"""
)
external_config = StringIO(
external_config = io.StringIO(
"""\
packages:
a:
@ -682,7 +682,7 @@ def test_env_with_config():
mpileaks:
version: [2.2]
"""
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
with e:
@ -699,7 +699,7 @@ def test_with_config_bad_include():
- /no/such/directory
- no/such/file.yaml
"""
_env_create(env_name, StringIO(test_config))
_env_create(env_name, io.StringIO(test_config))
e = ev.read(env_name)
with pytest.raises(spack.config.ConfigFileError) as exc:
@ -723,7 +723,7 @@ def test_env_with_include_config_files_same_basename():
[libelf, mpileaks]
"""
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
fs.mkdirp(os.path.join(e.path, "path", "to"))
@ -788,7 +788,7 @@ def test_env_with_included_config_file(packages_file):
include_filename = "included-config.yaml"
test_config = mpileaks_env_config(os.path.join(".", include_filename))
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
included_path = os.path.join(e.path, include_filename)
@ -842,7 +842,7 @@ def test_env_with_included_config_scope(tmpdir, packages_file):
test_config = mpileaks_env_config(config_scope_path)
# Create the environment
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
@ -868,7 +868,7 @@ def test_env_with_included_config_var_path(packages_file):
config_var_path = os.path.join("$tempdir", "included-config.yaml")
test_config = mpileaks_env_config(config_var_path)
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
config_real_path = substitute_path_variables(config_var_path)
@ -893,7 +893,7 @@ def test_env_config_precedence():
specs:
- mpileaks
"""
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
@ -926,7 +926,7 @@ def test_included_config_precedence():
specs:
- mpileaks
"""
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
e = ev.read("test")
with open(os.path.join(e.path, "high-config.yaml"), "w") as f:
@ -1263,7 +1263,7 @@ def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery
specs:
- mpileaks
"""
_env_create("test", StringIO(test_config))
_env_create("test", io.StringIO(test_config))
with ev.read("test"):
install("--fake")
@ -2672,7 +2672,7 @@ def test_modules_relative_to_views(tmpdir, install_mockery, mock_fetch):
roots:
tcl: modules
"""
_env_create("test", StringIO(spack_yaml))
_env_create("test", io.StringIO(spack_yaml))
with ev.read("test") as e:
install()
@ -2707,7 +2707,7 @@ def test_multiple_modules_post_env_hook(tmpdir, install_mockery, mock_fetch):
roots:
tcl: full_modules
"""
_env_create("test", StringIO(spack_yaml))
_env_create("test", io.StringIO(spack_yaml))
with ev.read("test") as e:
install()
@ -3116,7 +3116,7 @@ def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir,
makefile,
"--make-disable-jobserver",
"--make-target-prefix=prefix",
*depfile_flags
*depfile_flags,
)
# Do make dry run.

View file

@ -2,8 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import builtins
import filecmp
import itertools
import os
@ -12,7 +12,6 @@
import time
import pytest
from six.moves import builtins
import llnl.util.filesystem as fs
import llnl.util.tty as tty

View file

@ -9,7 +9,6 @@
from copy import copy
import pytest
from six import iteritems
import llnl.util.filesystem as fs
@ -73,7 +72,7 @@ def test_get_compiler_duplicates(config):
)
assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
cfg_file, duplicates = next(iter(cfg_file_to_duplicates.items()))
assert len(duplicates) == 1

View file

@ -5,13 +5,13 @@
import collections
import getpass
import io
import os
import sys
import tempfile
from datetime import date
import pytest
from six import StringIO
import llnl.util.tty as tty
from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp
@ -1012,7 +1012,7 @@ def test_write_empty_single_file_scope(tmpdir):
def check_schema(name, file_contents):
"""Check a Spack YAML schema against some data"""
f = StringIO(file_contents)
f = io.StringIO(file_contents)
data = syaml.load_config(f)
spack.config.validate(data, name)

View file

@ -2,12 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test environment internals without CLI"""
import io
import sys
import pytest
from six import StringIO
import spack.environment as ev
import spack.spec
@ -79,7 +78,7 @@ def test_env_change_spec(tmpdir, mock_packages, config):
def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mock_env_path):
initial_yaml = StringIO(_test_matrix_yaml)
initial_yaml = io.StringIO(_test_matrix_yaml)
e = ev.create("test", initial_yaml)
e.concretize()
e.write()
@ -96,7 +95,7 @@ def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mo
def test_env_change_spec_in_matrix_raises_error(
tmpdir, mock_packages, config, mutable_mock_env_path
):
initial_yaml = StringIO(_test_matrix_yaml)
initial_yaml = io.StringIO(_test_matrix_yaml)
e = ev.create("test", initial_yaml)
e.concretize()
e.write()

View file

@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys
import pytest
import six
import spack.graph
import spack.repo
@ -25,7 +25,7 @@ def test_static_graph_mpileaks(config, mock_packages):
"""Test a static spack graph for a simple package."""
s = spack.spec.Spec("mpileaks").normalized()
stream = six.StringIO()
stream = io.StringIO()
spack.graph.graph_dot([s], static=True, out=stream)
dot = stream.getvalue()
@ -52,7 +52,7 @@ def test_static_graph_mpileaks(config, mock_packages):
def test_dynamic_dot_graph_mpileaks(mock_packages, config):
"""Test dynamically graphing the mpileaks package."""
s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO()
stream = io.StringIO()
spack.graph.graph_dot([s], static=False, out=stream)
dot = stream.getvalue()
@ -83,7 +83,7 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name)
s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO()
stream = io.StringIO()
graph = spack.graph.AsciiGraph()
graph.write(s, out=stream, color=False)
graph_str = stream.getvalue()

View file

@ -8,7 +8,6 @@
import sys
import pytest
import six
from llnl.util.filesystem import (
HeaderList,
@ -320,7 +319,7 @@ def test_searching_order(search_fn, search_list, root, kwargs):
rlist = list(reversed(result))
# At this point make sure the search list is a sequence
if isinstance(search_list, six.string_types):
if isinstance(search_list, str):
search_list = [search_list]
# Discard entries in the order they appear in search list

View file

@ -18,7 +18,7 @@
mpi@:10.0: set([zmpi])},
'stuff': {stuff: set([externalvirtual])}}
"""
from six import StringIO
import io
import spack.repo
from spack.provider_index import ProviderIndex
@ -28,10 +28,10 @@
def test_provider_index_round_trip(mock_packages):
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
ostream = StringIO()
ostream = io.StringIO()
p.to_json(ostream)
istream = StringIO(ostream.getvalue())
istream = io.StringIO(ostream.getvalue())
q = ProviderIndex.from_json(istream, repository=spack.repo.path)
assert p == q

View file

@ -3,9 +3,9 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tests for tag index cache files."""
import io
import pytest
from six import StringIO
import spack.cmd.install
import spack.tag
@ -40,7 +40,7 @@
def test_tag_copy(mock_packages):
index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
new_index = index.copy()
assert index.tags == new_index.tags
@ -100,25 +100,27 @@ def test_tag_index_round_trip(mock_packages):
mock_index = spack.repo.path.tag_index
assert mock_index.tags
ostream = StringIO()
ostream = io.StringIO()
mock_index.to_json(ostream)
istream = StringIO(ostream.getvalue())
istream = io.StringIO(ostream.getvalue())
new_index = spack.tag.TagIndex.from_json(istream, repository=mock_packages)
assert mock_index == new_index
def test_tag_equal(mock_packages):
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
first_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
assert first_index == second_index
def test_tag_merge(mock_packages):
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(StringIO(more_tags_json), repository=mock_packages)
first_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(
io.StringIO(more_tags_json), repository=mock_packages
)
assert first_index != second_index
@ -139,14 +141,14 @@ def test_tag_merge(mock_packages):
def test_tag_not_dict(mock_packages):
list_json = "[]"
with pytest.raises(spack.tag.TagIndexError) as e:
spack.tag.TagIndex.from_json(StringIO(list_json), repository=mock_packages)
spack.tag.TagIndex.from_json(io.StringIO(list_json), repository=mock_packages)
assert "not a dict" in str(e)
def test_tag_no_tags(mock_packages):
pkg_json = '{"packages": []}'
with pytest.raises(spack.tag.TagIndexError) as e:
spack.tag.TagIndex.from_json(StringIO(pkg_json), repository=mock_packages)
spack.tag.TagIndex.from_json(io.StringIO(pkg_json), repository=mock_packages)
assert "does not start with" in str(e)

View file

@ -6,14 +6,9 @@
import codecs
import os
import sys
import tokenize
import pytest
import six
if six.PY3:
import tokenize
else:
from lib2to3.pgen2 import tokenize
import spack.util.unparse
@ -25,14 +20,10 @@
def read_pyfile(filename):
"""Read and return the contents of a Python source file (as a
string), taking into account the file encoding."""
if six.PY3:
with open(filename, "rb") as pyfile:
encoding = tokenize.detect_encoding(pyfile.readline)[0]
with codecs.open(filename, "r", encoding=encoding) as pyfile:
source = pyfile.read()
else:
with open(filename, "r") as pyfile:
source = pyfile.read()
with open(filename, "rb") as pyfile:
encoding = tokenize.detect_encoding(pyfile.readline)[0]
with codecs.open(filename, "r", encoding=encoding) as pyfile:
source = pyfile.read()
return source
@ -341,16 +332,9 @@ def test_huge_float():
check_ast_roundtrip("-1e1000j")
@pytest.mark.skipif(not six.PY2, reason="Only works for Python 2")
def test_min_int27():
check_ast_roundtrip(str(-sys.maxint - 1))
check_ast_roundtrip("-(%s)" % (sys.maxint + 1))
@pytest.mark.skipif(not six.PY3, reason="Only works for Python 3")
def test_min_int30():
check_ast_roundtrip(str(-(2 ** 31)))
check_ast_roundtrip(str(-(2 ** 63)))
check_ast_roundtrip(str(-(2**31)))
check_ast_roundtrip(str(-(2**63)))
def test_imaginary_literals():
@ -358,9 +342,6 @@ def test_imaginary_literals():
check_ast_roundtrip("-7j")
check_ast_roundtrip("0j")
check_ast_roundtrip("-0j")
if six.PY2:
check_ast_roundtrip("-(7j)")
check_ast_roundtrip("-(0j)")
def test_negative_zero():
@ -391,12 +372,11 @@ def test_function_arguments():
check_ast_roundtrip("def f(a, b = 2): pass")
check_ast_roundtrip("def f(a = 5, b = 2): pass")
check_ast_roundtrip("def f(*args, **kwargs): pass")
if six.PY3:
check_ast_roundtrip("def f(*, a = 1, b = 2): pass")
check_ast_roundtrip("def f(*, a = 1, b): pass")
check_ast_roundtrip("def f(*, a, b = 2): pass")
check_ast_roundtrip("def f(a, b = None, *, c, **kwds): pass")
check_ast_roundtrip("def f(a=2, *args, c=5, d, **kwds): pass")
check_ast_roundtrip("def f(*, a = 1, b = 2): pass")
check_ast_roundtrip("def f(*, a = 1, b): pass")
check_ast_roundtrip("def f(*, a, b = 2): pass")
check_ast_roundtrip("def f(a, b = None, *, c, **kwds): pass")
check_ast_roundtrip("def f(a=2, *args, c=5, d, **kwds): pass")
def test_relative_import():
@ -407,12 +387,10 @@ def test_import_many():
check_ast_roundtrip(import_many)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_nonlocal():
check_ast_roundtrip(nonlocal_ex)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_raise_from():
check_ast_roundtrip(raise_from)
@ -449,17 +427,11 @@ def test_joined_str_361():
check_ast_roundtrip('f"{key:4}={value!a:#06x}"')
@pytest.mark.skipif(not six.PY2, reason="Only for Python 2")
def test_repr():
check_ast_roundtrip(a_repr)
@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_complex_f_string():
check_ast_roundtrip(complex_f_string)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_annotations():
check_ast_roundtrip("def f(a : int): pass")
check_ast_roundtrip("def f(a: int = 5): pass")
@ -511,7 +483,6 @@ def test_class_decorators():
check_ast_roundtrip(class_decorator)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_class_definition():
check_ast_roundtrip("class A(metaclass=type, *[], **{}): pass")
@ -525,7 +496,6 @@ def test_try_except_finally():
check_ast_roundtrip(try_except_finally)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_starred_assignment():
check_ast_roundtrip("a, *b, c = seq")
check_ast_roundtrip("a, (*b, c) = seq")

View file

@ -25,11 +25,10 @@
spack doesn't need anyone to tell it where to get the tarball even though
it's never been told about that version before.
"""
import io
import os
import re
from six import StringIO
from six.moves.urllib.parse import urlsplit, urlunsplit
from urllib.parse import urlsplit, urlunsplit
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@ -874,7 +873,7 @@ def color_url(path, **kwargs):
vends = [vo + vl - 1 for vo in voffs]
nerr = verr = 0
out = StringIO()
out = io.StringIO()
for i in range(len(path)):
if i == vs:
out.write("@c")

View file

@ -9,15 +9,13 @@
import json
import os
import os.path
import pickle
import platform
import re
import shlex
import socket
import sys
import six
from six.moves import cPickle
from six.moves import shlex_quote as cmd_quote
import llnl.util.tty as tty
from llnl.util.lang import dedupe
@ -131,7 +129,7 @@ def env_var_to_source_line(var, val):
fname=bash_function_finder.sub(r"\1", var), decl=val
)
else:
source_line = "{var}={val}; export {var}".format(var=var, val=cmd_quote(val))
source_line = "{var}={val}; export {var}".format(var=var, val=shlex.quote(val))
return source_line
@ -154,7 +152,7 @@ def dump_environment(path, environment=None):
@system_path_filter(arg_slice=slice(1))
def pickle_environment(path, environment=None):
"""Pickle an environment dictionary to a file."""
cPickle.dump(dict(environment if environment else os.environ), open(path, "wb"), protocol=2)
pickle.dump(dict(environment if environment else os.environ), open(path, "wb"), protocol=2)
def get_host_environment_metadata():
@ -627,7 +625,7 @@ def shell_modifications(self, shell="sh", explicit=False, env=None):
cmds += _shell_unset_strings[shell].format(name)
else:
if sys.platform != "win32":
cmd = _shell_set_strings[shell].format(name, cmd_quote(new_env[name]))
cmd = _shell_set_strings[shell].format(name, shlex.quote(new_env[name]))
else:
cmd = _shell_set_strings[shell].format(name, new_env[name])
cmds += cmd
@ -1024,7 +1022,7 @@ def _source_single_file(file_and_args, environment):
current_environment = kwargs.get("env", dict(os.environ))
for f in files:
# Normalize the input to the helper function
if isinstance(f, six.string_types):
if isinstance(f, str):
f = [f]
current_environment = _source_single_file(f, environment=current_environment)

View file

@ -9,9 +9,6 @@
import subprocess
import sys
from six import string_types, text_type
from six.moves import shlex_quote
import llnl.util.tty as tty
import spack.error
@ -168,7 +165,7 @@ def __call__(self, *args, **kwargs):
raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode):
if isinstance(arg, string_types):
if isinstance(arg, str):
return open(arg, mode), True
elif arg in (str, str.split):
return subprocess.PIPE, False
@ -213,17 +210,17 @@ def streamify(arg, mode):
result = ""
if output in (str, str.split):
if sys.platform == "win32":
outstr = text_type(out.decode("ISO-8859-1"))
outstr = str(out.decode("ISO-8859-1"))
else:
outstr = text_type(out.decode("utf-8"))
outstr = str(out.decode("utf-8"))
result += outstr
if output is str.split:
sys.stdout.write(outstr)
if error in (str, str.split):
if sys.platform == "win32":
errstr = text_type(err.decode("ISO-8859-1"))
errstr = str(err.decode("ISO-8859-1"))
else:
errstr = text_type(err.decode("utf-8"))
errstr = str(err.decode("utf-8"))
result += errstr
if error is str.split:
sys.stderr.write(errstr)
@ -283,7 +280,7 @@ def which_string(*args, **kwargs):
path = kwargs.get("path", os.environ.get("PATH", ""))
required = kwargs.get("required", False)
if isinstance(path, string_types):
if isinstance(path, str):
path = path.split(os.pathsep)
for name in args:
@ -334,7 +331,7 @@ def which(*args, **kwargs):
Executable: The first executable that is found in the path
"""
exe = which_string(*args, **kwargs)
return Executable(shlex_quote(exe)) if exe else None
return Executable(shlex.quote(exe)) if exe else None
class ProcessError(spack.error.SpackError):

View file

@ -5,10 +5,10 @@
from __future__ import print_function
import io
import sys
from ctest_log_parser import BuildError, BuildWarning, CTestLogParser
from six import StringIO
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@ -86,7 +86,7 @@ def make_log_context(log_events, width=None):
width = sys.maxsize
wrap_width = width - num_width - 6
out = StringIO()
out = io.StringIO()
next_line = 1
for event in log_events:
start = event.start

View file

@ -6,12 +6,11 @@
# Need this because of spack.util.string
from __future__ import absolute_import
import io
import itertools
import re
import string
from six import StringIO
import spack.error
__all__ = [
@ -261,6 +260,6 @@ def _str_helper(self, stream, level=0):
stream.write(self._subspaces[name]._str_helper(stream, level + 1))
def __str__(self):
stream = StringIO()
stream = io.StringIO()
self._str_helper(stream)
return stream.getvalue()

View file

@ -15,8 +15,7 @@
import sys
import tempfile
from datetime import date
from six.moves.urllib.parse import urlparse
from urllib.parse import urlparse
import llnl.util.tty as tty
from llnl.util.lang import memoized

View file

@ -1,16 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64
from six import PY3, binary_type, text_type
def b32encode(digest):
# type: (binary_type) -> text_type
b32 = base64.b32encode(digest)
if PY3:
return b32.decode()
return b32

View file

@ -2,10 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import six.moves.urllib.parse as urllib_parse
import urllib.parse
import spack
import spack.util.url as url_util
@ -30,7 +28,7 @@ def get_mirror_connection(url, url_type="push"):
def _parse_s3_endpoint_url(endpoint_url):
if not urllib_parse.urlparse(endpoint_url, scheme="").scheme:
if not urllib.parse.urlparse(endpoint_url, scheme="").scheme:
endpoint_url = "://".join(("https", endpoint_url))
return endpoint_url

View file

@ -4,12 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
import collections
import json
from typing import Any, Dict, Optional # novm
from six import PY3, iteritems, string_types
import spack.error
__all__ = ["load", "dump", "SpackJSONError", "encode_json_dict", "decode_json_dict"]
@ -20,7 +17,7 @@
def load(stream):
# type: (Any) -> Dict
"""Spack JSON needs to be ordered to support specs."""
if isinstance(stream, string_types):
if isinstance(stream, str):
load = json.loads # type: ignore[assignment]
else:
load = json.load # type: ignore[assignment]
@ -56,26 +53,6 @@ def _strify(data, ignore_dicts=False):
Converts python 2 unicodes to str in JSON data, or the other way around."""
# this is a no-op in python 3
if PY3:
return data
# if this is a unicode string in python 2, return its string representation
if isinstance(data, string_types):
return data.encode("utf-8")
# if this is a list of values, return list of byteified values
if isinstance(data, list):
return [_strify(item, ignore_dicts=True) for item in data]
# if this is a dictionary, return dictionary of byteified keys and values
# but only if we haven't already byteified it
if isinstance(data, dict) and not ignore_dicts:
return collections.OrderedDict(
(_strify(key, ignore_dicts=True), _strify(value, ignore_dicts=True))
for key, value in iteritems(data)
)
# if it's anything else, return it in its original form
return data

View file

@ -15,12 +15,12 @@
import collections
import collections.abc
import ctypes
import io
import re
from typing import List # novm
import ruamel.yaml as yaml
from ruamel.yaml import RoundTripDumper, RoundTripLoader
from six import StringIO, string_types
from llnl.util.tty.color import cextra, clen, colorize
@ -52,7 +52,7 @@ class syaml_int(int):
#: mapping from syaml type -> primitive type
syaml_types = {
syaml_str: string_types,
syaml_str: str,
syaml_int: int,
syaml_dict: dict,
syaml_list: list,
@ -263,7 +263,7 @@ def represent_data(self, data):
result = super(LineAnnotationDumper, self).represent_data(data)
if data is None:
result.value = syaml_str("null")
elif isinstance(result.value, string_types):
elif isinstance(result.value, str):
result.value = syaml_str(data)
if markable(result.value):
mark(result.value, data)
@ -318,7 +318,7 @@ def dump_config(*args, **kwargs):
def dump_annotated(data, stream=None, *args, **kwargs):
kwargs["Dumper"] = LineAnnotationDumper
sio = StringIO()
sio = io.StringIO()
yaml.dump(data, sio, *args, **kwargs)
# write_line_break() is not called by YAML for empty lines, so we
@ -327,7 +327,7 @@ def dump_annotated(data, stream=None, *args, **kwargs):
getvalue = None
if stream is None:
stream = StringIO()
stream = io.StringIO()
getvalue = stream.getvalue
# write out annotations and lines, accounting for color

View file

@ -2,10 +2,9 @@
#
# SPDX-License-Identifier: Python-2.0
# coding: utf-8
from __future__ import absolute_import
from six.moves import cStringIO
import io
from .unparser import Unparser
@ -13,7 +12,6 @@
def unparse(tree, py_ver_consistent=False):
v = cStringIO()
unparser = Unparser(py_ver_consistent=py_ver_consistent)
unparser.visit(tree, v)
v = io.StringIO()
Unparser(py_ver_consistent=py_ver_consistent).visit(tree, v)
return v.getvalue().strip() + "\n"

View file

@ -1,16 +1,13 @@
# Copyright (c) 2014-2021, Simon Percivall and Spack Project Developers.
#
# SPDX-License-Identifier: Python-2.0
"Usage: unparse.py <path to source file>"
from __future__ import print_function, unicode_literals
import ast
import sys
from contextlib import contextmanager
import six
from six import StringIO
from io import StringIO
# TODO: if we require Python 3.7, use its `nullcontext()`
@ -76,11 +73,7 @@ def is_simple_tuple(slice_value):
return (
isinstance(slice_value, ast.Tuple)
and slice_value.elts
and (
# Python 2 doesn't allow starred elements in tuples like Python 3
six.PY2
or not any(isinstance(elt, ast.Starred) for elt in slice_value.elts)
)
and not any(isinstance(elt, ast.Starred) for elt in slice_value.elts)
)
@ -145,7 +138,7 @@ def fill(self, text=""):
def write(self, text):
"Append a piece of text to the current line."
self.f.write(six.text_type(text))
self.f.write(str(text))
class _Block:
"""A context manager for preparing the source for blocks. It adds
@ -395,25 +388,14 @@ def visit_YieldFrom(self, node):
def visit_Raise(self, node):
self.fill("raise")
if six.PY3:
if not node.exc:
assert not node.cause
return
self.write(" ")
self.dispatch(node.exc)
if node.cause:
self.write(" from ")
self.dispatch(node.cause)
else:
self.write(" ")
if node.type:
self.dispatch(node.type)
if node.inst:
self.write(", ")
self.dispatch(node.inst)
if node.tback:
self.write(", ")
self.dispatch(node.tback)
if not node.exc:
assert not node.cause
return
self.write(" ")
self.dispatch(node.exc)
if node.cause:
self.write(" from ")
self.dispatch(node.cause)
def visit_Try(self, node):
self.fill("try")
@ -462,10 +444,7 @@ def visit_ExceptHandler(self, node):
self.dispatch(node.type)
if node.name:
self.write(" as ")
if six.PY3:
self.write(node.name)
else:
self.dispatch(node.name)
self.write(node.name)
with self.block():
self.dispatch(node.body)
@ -475,42 +454,35 @@ def visit_ClassDef(self, node):
self.fill("@")
self.dispatch(deco)
self.fill("class " + node.name)
if six.PY3:
with self.delimit_if("(", ")", condition=node.bases or node.keywords):
comma = False
for e in node.bases:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
for e in node.keywords:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
if sys.version_info[:2] < (3, 5):
if node.starargs:
if comma:
self.write(", ")
else:
comma = True
self.write("*")
self.dispatch(node.starargs)
if node.kwargs:
if comma:
self.write(", ")
else:
comma = True
self.write("**")
self.dispatch(node.kwargs)
elif node.bases:
with self.delimit("(", ")"):
for a in node.bases[:-1]:
self.dispatch(a)
with self.delimit_if("(", ")", condition=node.bases or node.keywords):
comma = False
for e in node.bases:
if comma:
self.write(", ")
self.dispatch(node.bases[-1])
else:
comma = True
self.dispatch(e)
for e in node.keywords:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
if sys.version_info[:2] < (3, 5):
if node.starargs:
if comma:
self.write(", ")
else:
comma = True
self.write("*")
self.dispatch(node.starargs)
if node.kwargs:
if comma:
self.write(", ")
else:
comma = True
self.write("**")
self.dispatch(node.kwargs)
with self.block():
self.dispatch(node.body)
@ -654,26 +626,11 @@ def visit_Bytes(self, node):
self.write(repr(node.s))
def visit_Str(self, tree):
if six.PY3:
# Python 3.5, 3.6, and 3.7 can't tell if something was written as a
# unicode constant. Try to make that consistent with 'u' for '\u- literals
if self._py_ver_consistent and repr(tree.s).startswith("'\\u"):
self.write("u")
self._write_constant(tree.s)
elif self._py_ver_consistent:
self.write(repr(tree.s)) # just do a python 2 repr for consistency
else:
# if from __future__ import unicode_literals is in effect,
# then we want to output string literals using a 'b' prefix
# and unicode literals with no prefix.
if "unicode_literals" not in self.future_imports:
self.write(repr(tree.s))
elif isinstance(tree.s, str):
self.write("b" + repr(tree.s))
elif isinstance(tree.s, unicode): # noqa: F821
self.write(repr(tree.s).lstrip("u"))
else:
assert False, "shouldn't get here"
# Python 3.5, 3.6, and 3.7 can't tell if something was written as a
# unicode constant. Try to make that consistent with 'u' for '\u- literals
if self._py_ver_consistent and repr(tree.s).startswith("'\\u"):
self.write("u")
self._write_constant(tree.s)
def visit_JoinedStr(self, node):
# JoinedStr(expr* values)
@ -805,15 +762,7 @@ def visit_Constant(self, node):
def visit_Num(self, node):
repr_n = repr(node.n)
if six.PY3:
self.write(repr_n.replace("inf", INFSTR))
else:
# Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
with self.require_parens(pnext(_Precedence.FACTOR), node):
if "inf" in repr_n and repr_n.endswith("*j"):
repr_n = repr_n.replace("*j", "j")
# Substitute overflowing decimal literal for AST infinities.
self.write(repr_n.replace("inf", INFSTR))
self.write(repr_n.replace("inf", INFSTR))
def visit_List(self, node):
with self.delimit("[", "]"):
@ -917,17 +866,7 @@ def visit_UnaryOp(self, node):
if operator_precedence != _Precedence.FACTOR:
self.write(" ")
self.set_precedence(operator_precedence, node.operand)
if six.PY2 and isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Num):
# If we're applying unary minus to a number, parenthesize the number.
# This is necessary: -2147483648 is different from -(2147483648) on
# a 32-bit machine (the first is an int, the second a long), and
# -7j is different from -(7j). (The first has real part 0.0, the second
# has real part -0.0.)
with self.delimit("(", ")"):
self.dispatch(node.operand)
else:
self.dispatch(node.operand)
self.dispatch(node.operand)
binop = {
"Add": "+",

View file

@ -11,9 +11,7 @@
import posixpath
import re
import sys
import six.moves.urllib.parse
from six import string_types
import urllib.parse
from spack.util.path import (
canonicalize_path,
@ -50,7 +48,7 @@ def local_file_path(url):
If url is a file:// URL, return the absolute path to the local
file or directory referenced by it. Otherwise, return None.
"""
if isinstance(url, string_types):
if isinstance(url, str):
url = parse(url)
if url.scheme == "file":
@ -75,23 +73,23 @@ def parse(url, scheme="file"):
url (str): URL to be parsed
scheme (str): associated URL scheme
Returns:
(six.moves.urllib.parse.ParseResult): For file scheme URLs, the
(urllib.parse.ParseResult): For file scheme URLs, the
netloc and path components are concatenated and passed through
spack.util.path.canoncalize_path(). Otherwise, the returned value
is the same as urllib's urlparse() with allow_fragments=False.
"""
# guarantee a value passed in is of proper url format. Guarantee
# allows for easier string manipulation accross platforms
if isinstance(url, string_types):
if isinstance(url, str):
require_url_format(url)
url = escape_file_url(url)
url_obj = (
six.moves.urllib.parse.urlparse(
urllib.parse.urlparse(
url,
scheme=scheme,
allow_fragments=False,
)
if isinstance(url, string_types)
if isinstance(url, str)
else url
)
@ -119,7 +117,7 @@ def parse(url, scheme="file"):
if sys.platform == "win32":
path = convert_to_posix_path(path)
return six.moves.urllib.parse.ParseResult(
return urllib.parse.ParseResult(
scheme=scheme,
netloc=netloc,
path=path,
@ -134,7 +132,7 @@ def format(parsed_url):
Returns a canonicalized format of the given URL as a string.
"""
if isinstance(parsed_url, string_types):
if isinstance(parsed_url, str):
parsed_url = parse(parsed_url)
return parsed_url.geturl()
@ -195,8 +193,7 @@ def join(base_url, path, *extra, **kwargs):
'file:///opt/spack'
"""
paths = [
(x) if isinstance(x, string_types) else x.geturl()
for x in itertools.chain((base_url, path), extra)
(x) if isinstance(x, str) else x.geturl() for x in itertools.chain((base_url, path), extra)
]
paths = [convert_to_posix_path(x) for x in paths]
@ -204,7 +201,7 @@ def join(base_url, path, *extra, **kwargs):
last_abs_component = None
scheme = ""
for i in range(n - 1, -1, -1):
obj = six.moves.urllib.parse.urlparse(
obj = urllib.parse.urlparse(
paths[i],
scheme="",
allow_fragments=False,
@ -218,7 +215,7 @@ def join(base_url, path, *extra, **kwargs):
# Without a scheme, we have to go back looking for the
# next-last component that specifies a scheme.
for j in range(i - 1, -1, -1):
obj = six.moves.urllib.parse.urlparse(
obj = urllib.parse.urlparse(
paths[j],
scheme="",
allow_fragments=False,
@ -238,7 +235,7 @@ def join(base_url, path, *extra, **kwargs):
if last_abs_component is not None:
paths = paths[last_abs_component:]
if len(paths) == 1:
result = six.moves.urllib.parse.urlparse(
result = urllib.parse.urlparse(
paths[0],
scheme="file",
allow_fragments=False,
@ -248,7 +245,7 @@ def join(base_url, path, *extra, **kwargs):
# file:// URL component with a relative path, the relative path
# needs to be resolved.
if result.scheme == "file" and result.netloc:
result = six.moves.urllib.parse.ParseResult(
result = urllib.parse.ParseResult(
scheme=result.scheme,
netloc="",
path=posixpath.abspath(result.netloc + result.path),
@ -306,7 +303,7 @@ def _join(base_url, path, *extra, **kwargs):
base_path = convert_to_posix_path(base_path)
return format(
six.moves.urllib.parse.ParseResult(
urllib.parse.ParseResult(
scheme=scheme,
netloc=netloc,
path=base_path,

View file

@ -16,10 +16,8 @@
import sys
import traceback
from html.parser import HTMLParser
import six
from six.moves.urllib.error import URLError
from six.moves.urllib.request import Request, urlopen
from urllib.error import URLError
from urllib.request import Request, urlopen
import llnl.util.lang
import llnl.util.tty as tty
@ -683,7 +681,7 @@ def _spider(url, collect_nested):
return pages, links, subcalls
if isinstance(root_urls, six.string_types):
if isinstance(root_urls, str):
root_urls = [root_urls]
# Clear the local cache of visited pages before starting the search

View file

@ -9,12 +9,10 @@
import collections.abc
import functools
import inspect
import io
import itertools
import re
import six
from six import StringIO
import llnl.util.lang as lang
import llnl.util.tty.color
@ -665,7 +663,7 @@ def __str__(self):
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants.
string = StringIO()
string = io.StringIO()
for key in bool_keys:
string.write(str(self[key]))
@ -895,12 +893,12 @@ def __hash__(self):
return hash(self.value)
def __eq__(self, other):
if isinstance(other, (six.string_types, bool)):
if isinstance(other, (str, bool)):
return self.value == other
return self.value == other.value
def __lt__(self, other):
if isinstance(other, six.string_types):
if isinstance(other, str):
return self.value < other
return self.value < other.value

View file

@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64
import hashlib
import os
@ -10,14 +11,14 @@
import spack.filesystem_view
import spack.store
import spack.util.file_permissions as fp
import spack.util.py2 as compat
import spack.util.spack_json as sjson
def compute_hash(path):
with open(path, "rb") as f:
sha1 = hashlib.sha1(f.read()).digest()
return compat.b32encode(sha1)
b32 = base64.b32encode(sha1)
return b32.decode()
def create_manifest_entry(path):

View file

@ -30,8 +30,6 @@
from bisect import bisect_left
from functools import wraps
from six import string_types
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, working_dir
@ -721,9 +719,9 @@ def generate_git_lookup(self, pkg_name):
class VersionRange(object):
def __init__(self, start, end):
if isinstance(start, string_types):
if isinstance(start, str):
start = Version(start)
if isinstance(end, string_types):
if isinstance(end, str):
end = Version(end)
self.start = start
@ -939,7 +937,7 @@ class VersionList(object):
def __init__(self, vlist=None):
self.versions = []
if vlist is not None:
if isinstance(vlist, string_types):
if isinstance(vlist, str):
vlist = _string_to_version(vlist)
if type(vlist) == VersionList:
self.versions = vlist.versions
@ -1193,7 +1191,7 @@ def ver(obj):
"""
if isinstance(obj, (list, tuple)):
return VersionList(obj)
elif isinstance(obj, string_types):
elif isinstance(obj, str):
return _string_to_version(obj)
elif isinstance(obj, (int, float)):
return _string_to_version(str(obj))