deal with TimeoutError from ssl.py (#45683)

This commit is contained in:
Harmen Stoppels 2024-08-12 13:06:13 +02:00 committed by Harmen Stoppels
parent ce7218acae
commit 2c6df0d491
4 changed files with 30 additions and 48 deletions

View file

@ -23,7 +23,6 @@
import warnings import warnings
from contextlib import closing from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang
@ -899,9 +898,8 @@ def url_read_method(url):
try: try:
_, _, spec_file = web_util.read_from_url(url) _, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read() contents = codecs.getreader("utf-8")(spec_file).read()
except (URLError, web_util.SpackWebError) as url_err: except web_util.SpackWebError as e:
tty.error("Error reading specfile: {0}".format(url)) tty.error(f"Error reading specfile: {url}: {e}")
tty.error(url_err)
return contents return contents
try: try:
@ -2041,21 +2039,17 @@ def try_direct_fetch(spec, mirrors=None):
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True specfile_is_signed = True
except (URLError, web_util.SpackWebError, HTTPError) as url_err: except web_util.SpackWebError as e1:
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x: except web_util.SpackWebError as e2:
tty.debug( tty.debug(
"Did not find {0} on {1}".format( f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
specfile_name, buildcache_fetch_url_signed_json e1,
),
url_err,
level=2, level=2,
) )
tty.debug( tty.debug(
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
url_err_x,
level=2,
) )
continue continue
specfile_contents = codecs.getreader("utf-8")(fs).read() specfile_contents = codecs.getreader("utf-8")(fs).read()
@ -2153,19 +2147,12 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try: try:
_, _, json_file = web_util.read_from_url(keys_index) _, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(codecs.getreader("utf-8")(json_file)) json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except (URLError, web_util.SpackWebError) as url_err: except web_util.SpackWebError as url_err:
if web_util.url_exists(keys_index): if web_util.url_exists(keys_index):
err_msg = [
"Unable to find public keys in {0},",
" caught exception attempting to read from {1}.",
]
tty.error( tty.error(
"".join(err_msg).format( f"Unable to find public keys in {url_util.format(fetch_url)},"
url_util.format(fetch_url), url_util.format(keys_index) f" caught exception attempting to read from {url_util.format(keys_index)}."
)
) )
tty.debug(url_err) tty.debug(url_err)
continue continue
@ -2445,7 +2432,7 @@ def get_remote_hash(self):
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try: try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
except urllib.error.URLError: except (TimeoutError, urllib.error.URLError):
return None return None
# Validate the hash # Validate the hash
@ -2467,7 +2454,7 @@ def conditional_fetch(self) -> FetchIndexResult:
try: try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except urllib.error.URLError as e: except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try: try:
@ -2508,10 +2495,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
def conditional_fetch(self) -> FetchIndexResult: def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately # Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
headers = { headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
"User-Agent": web_util.SPACK_USER_AGENT,
"If-None-Match": '"{}"'.format(self.etag),
}
try: try:
response = self.urlopen(urllib.request.Request(url, headers=headers)) response = self.urlopen(urllib.request.Request(url, headers=headers))
@ -2519,14 +2503,14 @@ def conditional_fetch(self) -> FetchIndexResult:
if e.getcode() == 304: if e.getcode() == 304:
# Not modified; that means fresh. # Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
raise FetchIndexError("Could not fetch index {}".format(url), e) from e raise FetchIndexError(f"Could not fetch index {url}", e) from e
except urllib.error.URLError as e: except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError("Could not fetch index {}".format(url), e) from e raise FetchIndexError(f"Could not fetch index {url}", e) from e
try: try:
result = codecs.getreader("utf-8")(response).read() result = codecs.getreader("utf-8")(response).read()
except ValueError as e: except ValueError as e:
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e raise FetchIndexError(f"Remote index {url} is invalid", e) from e
headers = response.headers headers = response.headers
etag_header_value = headers.get("Etag", None) or headers.get("etag", None) etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
@ -2557,21 +2541,19 @@ def conditional_fetch(self) -> FetchIndexResult:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
) )
) )
except urllib.error.URLError as e: except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError( raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
"Could not fetch manifest from {}".format(url_manifest), e
) from e
try: try:
manifest = json.loads(response.read()) manifest = json.loads(response.read())
except Exception as e: except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
# Get first blob hash, which should be the index.json # Get first blob hash, which should be the index.json
try: try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e: except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
# Fresh? # Fresh?
if index_digest.digest == self.local_hash: if index_digest.digest == self.local_hash:

View file

@ -1111,7 +1111,7 @@ def main_script_replacements(cmd):
if cdash_handler and cdash_handler.auth_token: if cdash_handler and cdash_handler.auth_token:
try: try:
cdash_handler.populate_buildgroup(all_job_names) cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError) as err: except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}") tty.warn(f"Problem populating buildgroup: {err}")
else: else:
tty.warn("Unable to populate buildgroup without CDash credentials") tty.warn("Unable to populate buildgroup without CDash credentials")
@ -2095,7 +2095,7 @@ def read_broken_spec(broken_spec_url):
""" """
try: try:
_, _, fs = web_util.read_from_url(broken_spec_url) _, _, fs = web_util.read_from_url(broken_spec_url)
except (URLError, web_util.SpackWebError, HTTPError): except web_util.SpackWebError:
tty.warn(f"Unable to read broken spec from {broken_spec_url}") tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None return None

View file

@ -554,7 +554,7 @@ def fetch(self):
try: try:
response = self._urlopen(self.url) response = self._urlopen(self.url)
except urllib.error.URLError as e: except (TimeoutError, urllib.error.URLError) as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)

View file

@ -197,8 +197,8 @@ def read_from_url(url, accept_content_type=None):
try: try:
response = urlopen(request) response = urlopen(request)
except URLError as err: except (TimeoutError, URLError) as e:
raise SpackWebError("Download failed: {}".format(str(err))) raise SpackWebError(f"Download of {url.geturl()} failed: {e}")
if accept_content_type: if accept_content_type:
try: try:
@ -458,8 +458,8 @@ def url_exists(url, curl=None):
timeout=spack.config.get("config:connect_timeout", 10), timeout=spack.config.get("config:connect_timeout", 10),
) )
return True return True
except URLError as e: except (TimeoutError, URLError) as e:
tty.debug("Failure reading URL: " + str(e)) tty.debug(f"Failure reading {url}: {e}")
return False return False
@ -740,10 +740,10 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s
subcalls.append(abs_link) subcalls.append(abs_link)
_visited.add(abs_link) _visited.add(abs_link)
except URLError as e: except (TimeoutError, URLError) as e:
tty.debug(f"[SPIDER] Unable to read: {url}") tty.debug(f"[SPIDER] Unable to read: {url}")
tty.debug(str(e), level=2) tty.debug(str(e), level=2)
if hasattr(e, "reason") and isinstance(e.reason, ssl.SSLError): if isinstance(e, URLError) and isinstance(e.reason, ssl.SSLError):
tty.warn( tty.warn(
"Spack was unable to fetch url list due to a " "Spack was unable to fetch url list due to a "
"certificate verification problem. You can try " "certificate verification problem. You can try "