bugfix: fetch prefers to fetch local mirrors over remote resources (#13545)
- [x] insert at beginning of list so fetch grabs local mirrors before remote resources - [x] update the S3FetchStrategy so that it throws a SpackError if the fetch fails. Before, it was throwing URLError, which was not being caught in stage.py. - [x] move error handling out of S3FetchStrategy and into web_util.read_from_url() - [x] pass string instead of URLError to SpackWebError
This commit is contained in:
parent
bcda14f825
commit
0784ec1a6d
3 changed files with 11 additions and 8 deletions
|
@ -1129,7 +1129,7 @@ def fetch(self):
|
||||||
|
|
||||||
parsed_url = url_util.parse(self.url)
|
parsed_url = url_util.parse(self.url)
|
||||||
if parsed_url.scheme != 's3':
|
if parsed_url.scheme != 's3':
|
||||||
raise ValueError(
|
raise FetchError(
|
||||||
'S3FetchStrategy can only fetch from s3:// urls.')
|
'S3FetchStrategy can only fetch from s3:// urls.')
|
||||||
|
|
||||||
tty.msg("Fetching %s" % self.url)
|
tty.msg("Fetching %s" % self.url)
|
||||||
|
@ -1395,7 +1395,7 @@ class NoCacheError(FetchError):
|
||||||
|
|
||||||
|
|
||||||
class FailedDownloadError(FetchError):
|
class FailedDownloadError(FetchError):
|
||||||
"""Raised wen a download fails."""
|
"""Raised when a download fails."""
|
||||||
def __init__(self, url, msg=""):
|
def __init__(self, url, msg=""):
|
||||||
super(FailedDownloadError, self).__init__(
|
super(FailedDownloadError, self).__init__(
|
||||||
"Failed to fetch file from URL: %s" % url, msg)
|
"Failed to fetch file from URL: %s" % url, msg)
|
||||||
|
|
|
@ -433,11 +433,9 @@ def fetch(self, mirror_only=False):
|
||||||
|
|
||||||
# Add URL strategies for all the mirrors with the digest
|
# Add URL strategies for all the mirrors with the digest
|
||||||
for url in urls:
|
for url in urls:
|
||||||
fetchers.append(fs.from_url_scheme(
|
fetchers.insert(
|
||||||
|
0, fs.from_url_scheme(
|
||||||
url, digest, expand=expand, extension=extension))
|
url, digest, expand=expand, extension=extension))
|
||||||
# fetchers.insert(
|
|
||||||
# 0, fs.URLFetchStrategy(
|
|
||||||
# url, digest, expand=expand, extension=extension))
|
|
||||||
|
|
||||||
if self.default_fetcher.cachable:
|
if self.default_fetcher.cachable:
|
||||||
for rel_path in reversed(list(self.mirror_paths)):
|
for rel_path in reversed(list(self.mirror_paths)):
|
||||||
|
|
|
@ -177,7 +177,12 @@ def read_from_url(url, accept_content_type=None):
|
||||||
|
|
||||||
# Do the real GET request when we know it's just HTML.
|
# Do the real GET request when we know it's just HTML.
|
||||||
req.get_method = lambda: "GET"
|
req.get_method = lambda: "GET"
|
||||||
|
|
||||||
|
try:
|
||||||
response = _urlopen(req, timeout=_timeout, context=context)
|
response = _urlopen(req, timeout=_timeout, context=context)
|
||||||
|
except URLError as err:
|
||||||
|
raise SpackWebError('Download failed: {ERROR}'.format(
|
||||||
|
ERROR=str(err)))
|
||||||
|
|
||||||
if accept_content_type and not is_web_url:
|
if accept_content_type and not is_web_url:
|
||||||
content_type = response.headers.get('Content-type')
|
content_type = response.headers.get('Content-type')
|
||||||
|
|
Loading…
Reference in a new issue