From 2c6df0d491be47730368dc48f0530ba706aa0907 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 12 Aug 2024 13:06:13 +0200 Subject: [PATCH] deal with TimeoutError from ssl.py (#45683) --- lib/spack/spack/binary_distribution.py | 60 +++++++++----------------- lib/spack/spack/ci.py | 4 +- lib/spack/spack/fetch_strategy.py | 2 +- lib/spack/spack/util/web.py | 12 +++--- 4 files changed, 30 insertions(+), 48 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index eec9d95879..c72a0750f9 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -23,7 +23,6 @@ import warnings from contextlib import closing from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple -from urllib.error import HTTPError, URLError import llnl.util.filesystem as fsys import llnl.util.lang @@ -899,9 +898,8 @@ def url_read_method(url): try: _, _, spec_file = web_util.read_from_url(url) contents = codecs.getreader("utf-8")(spec_file).read() - except (URLError, web_util.SpackWebError) as url_err: - tty.error("Error reading specfile: {0}".format(url)) - tty.error(url_err) + except web_util.SpackWebError as e: + tty.error(f"Error reading specfile: {url}: {e}") return contents try: @@ -2041,21 +2039,17 @@ def try_direct_fetch(spec, mirrors=None): try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) specfile_is_signed = True - except (URLError, web_util.SpackWebError, HTTPError) as url_err: + except web_util.SpackWebError as e1: try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_json) - except (URLError, web_util.SpackWebError, HTTPError) as url_err_x: + except web_util.SpackWebError as e2: tty.debug( - "Did not find {0} on {1}".format( - specfile_name, buildcache_fetch_url_signed_json - ), - url_err, + f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", + e1, level=2, ) tty.debug( - "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), - url_err_x, - level=2, + f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 ) continue specfile_contents = codecs.getreader("utf-8")(fs).read() @@ -2153,19 +2147,12 @@ def get_keys(install=False, trust=False, force=False, mirrors=None): try: _, _, json_file = web_util.read_from_url(keys_index) json_index = sjson.load(codecs.getreader("utf-8")(json_file)) - except (URLError, web_util.SpackWebError) as url_err: + except web_util.SpackWebError as url_err: if web_util.url_exists(keys_index): - err_msg = [ - "Unable to find public keys in {0},", - " caught exception attempting to read from {1}.", - ] - tty.error( - "".join(err_msg).format( - url_util.format(fetch_url), url_util.format(keys_index) - ) + f"Unable to find public keys in {url_util.format(fetch_url)}," + f" caught exception attempting to read from {url_util.format(keys_index)}." ) - tty.debug(url_err) continue @@ -2445,7 +2432,7 @@ def get_remote_hash(self): url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") try: response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) - except urllib.error.URLError: + except (TimeoutError, urllib.error.URLError): return None # Validate the hash @@ -2467,7 +2454,7 @@ def conditional_fetch(self) -> FetchIndexResult: try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) - except urllib.error.URLError as e: + except (TimeoutError, urllib.error.URLError) as e: raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e try: @@ -2508,10 +2495,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") - headers = { - "User-Agent": web_util.SPACK_USER_AGENT, - "If-None-Match": '"{}"'.format(self.etag), - } + headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} try: response = self.urlopen(urllib.request.Request(url, headers=headers)) @@ -2519,14 +2503,14 @@ def conditional_fetch(self) -> FetchIndexResult: if e.getcode() == 304: # Not modified; that means fresh. return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) - raise FetchIndexError("Could not fetch index {}".format(url), e) from e - except urllib.error.URLError as e: - raise FetchIndexError("Could not fetch index {}".format(url), e) from e + raise FetchIndexError(f"Could not fetch index {url}", e) from e + except (TimeoutError, urllib.error.URLError) as e: + raise FetchIndexError(f"Could not fetch index {url}", e) from e try: result = codecs.getreader("utf-8")(response).read() except ValueError as e: - raise FetchIndexError("Remote index {} is invalid".format(url), e) from e + raise FetchIndexError(f"Remote index {url} is invalid", e) from e headers = response.headers etag_header_value = headers.get("Etag", None) or headers.get("etag", None) @@ -2557,21 +2541,19 @@ def conditional_fetch(self) -> FetchIndexResult: headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, ) ) - except urllib.error.URLError as e: - raise FetchIndexError( - "Could not fetch manifest from {}".format(url_manifest), e - ) from e + except (TimeoutError, urllib.error.URLError) as e: + raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e try: manifest = json.loads(response.read()) except Exception as e: - raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e # Get first blob hash, which should be the index.json try: index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) except Exception as e: - raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e # Fresh? if index_digest.digest == self.local_hash: diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index bd664c664d..21def68779 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -1111,7 +1111,7 @@ def main_script_replacements(cmd): if cdash_handler and cdash_handler.auth_token: try: cdash_handler.populate_buildgroup(all_job_names) - except (SpackError, HTTPError, URLError) as err: + except (SpackError, HTTPError, URLError, TimeoutError) as err: tty.warn(f"Problem populating buildgroup: {err}") else: tty.warn("Unable to populate buildgroup without CDash credentials") @@ -2095,7 +2095,7 @@ def read_broken_spec(broken_spec_url): """ try: _, _, fs = web_util.read_from_url(broken_spec_url) - except (URLError, web_util.SpackWebError, HTTPError): + except web_util.SpackWebError: tty.warn(f"Unable to read broken spec from {broken_spec_url}") return None diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index c803b304c2..4aa7f339de 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -554,7 +554,7 @@ def fetch(self): try: response = self._urlopen(self.url) - except urllib.error.URLError as e: + except (TimeoutError, urllib.error.URLError) as e: # clean up archive on failure. if self.archive_file: os.remove(self.archive_file) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 8c843c5346..b681bb4950 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -197,8 +197,8 @@ def read_from_url(url, accept_content_type=None): try: response = urlopen(request) - except URLError as err: - raise SpackWebError("Download failed: {}".format(str(err))) + except (TimeoutError, URLError) as e: + raise SpackWebError(f"Download of {url.geturl()} failed: {e}") if accept_content_type: try: @@ -458,8 +458,8 @@ def url_exists(url, curl=None): timeout=spack.config.get("config:connect_timeout", 10), ) return True - except URLError as e: - tty.debug("Failure reading URL: " + str(e)) + except (TimeoutError, URLError) as e: + tty.debug(f"Failure reading {url}: {e}") return False @@ -740,10 +740,10 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s subcalls.append(abs_link) _visited.add(abs_link) - except URLError as e: + except (TimeoutError, URLError) as e: tty.debug(f"[SPIDER] Unable to read: {url}") tty.debug(str(e), level=2) - if hasattr(e, "reason") and isinstance(e.reason, ssl.SSLError): + if isinstance(e, URLError) and isinstance(e.reason, ssl.SSLError): tty.warn( "Spack was unable to fetch url list due to a " "certificate verification problem. You can try "