Compare commits

..

No commits in common. "594a376c521cc746978571b1181a47bbcff30a21" and "ac7b18483a9e09293d4026d477be599387b0fe6a" have entirely different histories.

20 changed files with 107 additions and 83 deletions

View file

@ -1,29 +1,3 @@
# v0.22.2 (2024-09-21)
## Bugfixes
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
- Support macOS Sequoia (#45018, #45127)
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
- Do not halt concretization on unknown variants in externals (#45326)
- Improve validation of `develop` config section (#46485)
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
- Improve backwards compatibility in `include_concrete` (#45766)
- Fix issue where package tags were sometimes repeated (#45160)
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
- Remove debug statements in package hash computation (#45235)
- Remove redundant clingo warnings (#45269)
- Remove hard-coded layout version (#45645)
- Do not initialize previous store state in `use_store` (#45268)
- Docs improvements (#46475)
## Package updates
- `chapel` major update (#42197, #44931, #45304)
# v0.22.1 (2024-07-04) # v0.22.1 (2024-07-04)
## Bugfixes ## Bugfixes

View file

@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.22.2" __version__ = "0.22.2.dev0"
spack_version = __version__ spack_version = __version__

View file

@ -23,6 +23,7 @@
import warnings import warnings
from contextlib import closing from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang
@ -898,8 +899,9 @@ def url_read_method(url):
try: try:
_, _, spec_file = web_util.read_from_url(url) _, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read() contents = codecs.getreader("utf-8")(spec_file).read()
except web_util.SpackWebError as e: except (URLError, web_util.SpackWebError) as url_err:
tty.error(f"Error reading specfile: {url}: {e}") tty.error("Error reading specfile: {0}".format(url))
tty.error(url_err)
return contents return contents
try: try:
@ -2039,17 +2041,21 @@ def try_direct_fetch(spec, mirrors=None):
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True specfile_is_signed = True
except web_util.SpackWebError as e1: except (URLError, web_util.SpackWebError, HTTPError) as url_err:
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
except web_util.SpackWebError as e2: except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
tty.debug( tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", "Did not find {0} on {1}".format(
e1, specfile_name, buildcache_fetch_url_signed_json
),
url_err,
level=2, level=2,
) )
tty.debug( tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
url_err_x,
level=2,
) )
continue continue
specfile_contents = codecs.getreader("utf-8")(fs).read() specfile_contents = codecs.getreader("utf-8")(fs).read()
@ -2147,12 +2153,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try: try:
_, _, json_file = web_util.read_from_url(keys_index) _, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(codecs.getreader("utf-8")(json_file)) json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except web_util.SpackWebError as url_err: except (URLError, web_util.SpackWebError) as url_err:
if web_util.url_exists(keys_index): if web_util.url_exists(keys_index):
err_msg = [
"Unable to find public keys in {0},",
" caught exception attempting to read from {1}.",
]
tty.error( tty.error(
f"Unable to find public keys in {url_util.format(fetch_url)}," "".join(err_msg).format(
f" caught exception attempting to read from {url_util.format(keys_index)}." url_util.format(fetch_url), url_util.format(keys_index)
) )
)
tty.debug(url_err) tty.debug(url_err)
continue continue
@ -2432,7 +2445,7 @@ def get_remote_hash(self):
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try: try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
except (TimeoutError, urllib.error.URLError): except urllib.error.URLError:
return None return None
# Validate the hash # Validate the hash
@ -2454,7 +2467,7 @@ def conditional_fetch(self) -> FetchIndexResult:
try: try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try: try:
@ -2495,7 +2508,10 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
def conditional_fetch(self) -> FetchIndexResult: def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately # Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} headers = {
"User-Agent": web_util.SPACK_USER_AGENT,
"If-None-Match": '"{}"'.format(self.etag),
}
try: try:
response = self.urlopen(urllib.request.Request(url, headers=headers)) response = self.urlopen(urllib.request.Request(url, headers=headers))
@ -2503,14 +2519,14 @@ def conditional_fetch(self) -> FetchIndexResult:
if e.getcode() == 304: if e.getcode() == 304:
# Not modified; that means fresh. # Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError("Could not fetch index {}".format(url), e) from e
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError("Could not fetch index {}".format(url), e) from e
try: try:
result = codecs.getreader("utf-8")(response).read() result = codecs.getreader("utf-8")(response).read()
except ValueError as e: except ValueError as e:
raise FetchIndexError(f"Remote index {url} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
headers = response.headers headers = response.headers
etag_header_value = headers.get("Etag", None) or headers.get("etag", None) etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
@ -2541,19 +2557,21 @@ def conditional_fetch(self) -> FetchIndexResult:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
) )
) )
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e raise FetchIndexError(
"Could not fetch manifest from {}".format(url_manifest), e
) from e
try: try:
manifest = json.loads(response.read()) manifest = json.loads(response.read())
except Exception as e: except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Get first blob hash, which should be the index.json # Get first blob hash, which should be the index.json
try: try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e: except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Fresh? # Fresh?
if index_digest.digest == self.local_hash: if index_digest.digest == self.local_hash:

View file

@ -480,12 +480,9 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}")) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"): if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True)) env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies # Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link"))) link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))

View file

@ -1111,7 +1111,7 @@ def main_script_replacements(cmd):
if cdash_handler and cdash_handler.auth_token: if cdash_handler and cdash_handler.auth_token:
try: try:
cdash_handler.populate_buildgroup(all_job_names) cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err: except (SpackError, HTTPError, URLError) as err:
tty.warn(f"Problem populating buildgroup: {err}") tty.warn(f"Problem populating buildgroup: {err}")
else: else:
tty.warn("Unable to populate buildgroup without CDash credentials") tty.warn("Unable to populate buildgroup without CDash credentials")
@ -2095,7 +2095,7 @@ def read_broken_spec(broken_spec_url):
""" """
try: try:
_, _, fs = web_util.read_from_url(broken_spec_url) _, _, fs = web_util.read_from_url(broken_spec_url)
except web_util.SpackWebError: except (URLError, web_util.SpackWebError, HTTPError):
tty.warn(f"Unable to read broken spec from {broken_spec_url}") tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None return None

View file

@ -813,7 +813,7 @@ def _push_oci(
def extra_config(spec: Spec): def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash) spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = { spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256", "hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest, "hash": checksums[spec.dag_hash()].compressed_digest.digest,

View file

@ -11,6 +11,7 @@
from argparse import ArgumentParser, Namespace from argparse import ArgumentParser, Namespace
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
prepend_header(args, f) prepend_header(args, f)
formatter(args, f) formatter(args, f)
if args.update_completion:
fs.set_executable(args.update)
else: else:
prepend_header(args, sys.stdout) prepend_header(args, sys.stdout)
formatter(args, sys.stdout) formatter(args, sys.stdout)

View file

@ -554,7 +554,7 @@ def fetch(self):
try: try:
response = self._urlopen(self.url) response = self._urlopen(self.url)
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)

View file

@ -199,10 +199,10 @@ def __init__(cls, name, bases, attr_dict):
# assumed to be detectable # assumed to be detectable
if hasattr(cls, "executables") or hasattr(cls, "libraries"): if hasattr(cls, "executables") or hasattr(cls, "libraries"):
# Append a tag to each detectable package, so that finding them is faster # Append a tag to each detectable package, so that finding them is faster
if not hasattr(cls, "tags"): if hasattr(cls, "tags"):
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
else:
setattr(cls, "tags", [DetectablePackageMeta.TAG]) setattr(cls, "tags", [DetectablePackageMeta.TAG])
elif DetectablePackageMeta.TAG not in cls.tags:
cls.tags.append(DetectablePackageMeta.TAG)
@classmethod @classmethod
def platform_executables(cls): def platform_executables(cls):

View file

@ -160,13 +160,22 @@ def test_reverse_environment_modifications(working_env):
assert os.environ == start_env assert os.environ == start_env
def test_shell_modifications_are_properly_escaped(): def test_escape_double_quotes_in_shell_modifications():
"""Test that variable values are properly escaped so that they can safely be eval'd.""" to_validate = envutil.EnvironmentModifications()
changes = envutil.EnvironmentModifications()
changes.set("VAR", "$PATH")
changes.append_path("VAR", "$ANOTHER_PATH")
changes.set("RM_RF", "$(rm -rf /)")
script = changes.shell_modifications(shell="sh") to_validate.set("VAR", "$PATH")
assert f"export VAR='$PATH{os.pathsep}$ANOTHER_PATH'" in script to_validate.append_path("VAR", "$ANOTHER_PATH")
assert "export RM_RF='$(rm -rf /)'" in script
to_validate.set("QUOTED_VAR", '"MY_VAL"')
if sys.platform == "win32":
cmds = to_validate.shell_modifications(shell="bat")
assert r'set "VAR=$PATH;$ANOTHER_PATH"' in cmds
assert r'set "QUOTED_VAR="MY_VAL"' in cmds
cmds = to_validate.shell_modifications(shell="pwsh")
assert "$Env:VAR='$PATH;$ANOTHER_PATH'" in cmds
assert "$Env:QUOTED_VAR='\"MY_VAL\"'" in cmds
else:
cmds = to_validate.shell_modifications()
assert 'export VAR="$PATH:$ANOTHER_PATH"' in cmds
assert r'export QUOTED_VAR="\"MY_VAL\""' in cmds

View file

@ -11,7 +11,6 @@
import os.path import os.path
import pickle import pickle
import re import re
import shlex
import sys import sys
from functools import wraps from functools import wraps
from typing import Any, Callable, Dict, List, MutableMapping, Optional, Tuple, Union from typing import Any, Callable, Dict, List, MutableMapping, Optional, Tuple, Union
@ -64,6 +63,26 @@
ModificationList = List[Union["NameModifier", "NameValueModifier"]] ModificationList = List[Union["NameModifier", "NameValueModifier"]]
_find_unsafe = re.compile(r"[^\w@%+=:,./-]", re.ASCII).search
def double_quote_escape(s):
"""Return a shell-escaped version of the string *s*.
This is similar to how shlex.quote works, but it escapes with double quotes
instead of single quotes, to allow environment variable expansion within
quoted strings.
"""
if not s:
return '""'
if _find_unsafe(s) is None:
return s
# use double quotes, and escape double quotes in the string
# the string $"b is then quoted as "$\"b"
return '"' + s.replace('"', r"\"") + '"'
def system_env_normalize(func): def system_env_normalize(func):
"""Decorator wrapping calls to system env modifications, """Decorator wrapping calls to system env modifications,
converting all env variable names to all upper case on Windows, no-op converting all env variable names to all upper case on Windows, no-op
@ -163,7 +182,7 @@ def _nix_env_var_to_source_line(var: str, val: str) -> str:
fname=BASH_FUNCTION_FINDER.sub(r"\1", var), decl=val fname=BASH_FUNCTION_FINDER.sub(r"\1", var), decl=val
) )
else: else:
source_line = f"{var}={shlex.quote(val)}; export {var}" source_line = f"{var}={double_quote_escape(val)}; export {var}"
return source_line return source_line
@ -672,10 +691,11 @@ def shell_modifications(
if new is None: if new is None:
cmds += _SHELL_UNSET_STRINGS[shell].format(name) cmds += _SHELL_UNSET_STRINGS[shell].format(name)
else: else:
value = new_env[name] if sys.platform != "win32":
if shell not in ("bat", "pwsh"): new_env_name = double_quote_escape(new_env[name])
value = shlex.quote(value) else:
cmd = _SHELL_SET_STRINGS[shell].format(name, value) new_env_name = new_env[name]
cmd = _SHELL_SET_STRINGS[shell].format(name, new_env_name)
cmds += cmd cmds += cmd
return cmds return cmds

View file

@ -554,7 +554,9 @@ def visit_FormattedValue(self, node):
def _fstring_JoinedStr(self, node, write): def _fstring_JoinedStr(self, node, write):
for value in node.values: for value in node.values:
print(" ", value)
meth = getattr(self, "_fstring_" + type(value).__name__) meth = getattr(self, "_fstring_" + type(value).__name__)
print(meth)
meth(value, write) meth(value, write)
def _fstring_Str(self, node, write): def _fstring_Str(self, node, write):

View file

@ -197,8 +197,8 @@ def read_from_url(url, accept_content_type=None):
try: try:
response = urlopen(request) response = urlopen(request)
except (TimeoutError, URLError) as e: except URLError as err:
raise SpackWebError(f"Download of {url.geturl()} failed: {e}") raise SpackWebError("Download failed: {}".format(str(err)))
if accept_content_type: if accept_content_type:
try: try:
@ -458,8 +458,8 @@ def url_exists(url, curl=None):
timeout=spack.config.get("config:connect_timeout", 10), timeout=spack.config.get("config:connect_timeout", 10),
) )
return True return True
except (TimeoutError, URLError) as e: except URLError as e:
tty.debug(f"Failure reading {url}: {e}") tty.debug("Failure reading URL: " + str(e))
return False return False
@ -740,10 +740,10 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s
subcalls.append(abs_link) subcalls.append(abs_link)
_visited.add(abs_link) _visited.add(abs_link)
except (TimeoutError, URLError) as e: except URLError as e:
tty.debug(f"[SPIDER] Unable to read: {url}") tty.debug(f"[SPIDER] Unable to read: {url}")
tty.debug(str(e), level=2) tty.debug(str(e), level=2)
if isinstance(e, URLError) and isinstance(e.reason, ssl.SSLError): if hasattr(e, "reason") and isinstance(e.reason, ssl.SSLError):
tty.warn( tty.warn(
"Spack was unable to fetch url list due to a " "Spack was unable to fetch url list due to a "
"certificate verification problem. You can try " "certificate verification problem. You can try "

View file

@ -52,7 +52,7 @@ if [[ "$SPACK_TEST_SOLVER" == "original" ]]; then
fi fi
# Check if xdist is available # Check if xdist is available
if [[ "$UNIT_TEST_COVERAGE" != "true" ]] && python -m pytest -VV 2>&1 | grep xdist; then if python -m pytest --trace-config 2>&1 | grep xdist; then
export PYTEST_ADDOPTS="$PYTEST_ADDOPTS --dist loadfile --tx '${SPACK_TEST_PARALLEL:=3}*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python'" export PYTEST_ADDOPTS="$PYTEST_ADDOPTS --dist loadfile --tx '${SPACK_TEST_PARALLEL:=3}*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python'"
fi fi
@ -66,9 +66,9 @@ fi
# where it seems that otherwise the configuration file might not be located by subprocesses # where it seems that otherwise the configuration file might not be located by subprocesses
# in some, not better specified, cases. # in some, not better specified, cases.
if [[ "$UNIT_TEST_COVERAGE" == "true" ]]; then if [[ "$UNIT_TEST_COVERAGE" == "true" ]]; then
"$(which spack)" unit-test -x --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml $(which spack) unit-test -x --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml
else else
"$(which spack)" unit-test -x --verbose $(which spack) unit-test -x --verbose
fi fi

0
share/spack/setup-env.csh Normal file → Executable file
View file

0
share/spack/setup-env.fish Normal file → Executable file
View file

0
share/spack/setup-env.sh Normal file → Executable file
View file

0
share/spack/setup-tutorial-env.sh Normal file → Executable file
View file

0
share/spack/spack-completion.bash Normal file → Executable file
View file

0
share/spack/spack-completion.fish Normal file → Executable file
View file