Add config option to use urllib to fetch if curl missing (#21398)

* Use Python module urllib to fetch in the case that curl is missing
This commit is contained in:
loulawrence 2021-06-22 16:38:37 -04:00 committed by GitHub
parent 477c8ce820
commit 4da0561496
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 226 additions and 116 deletions

View file

@ -316,10 +316,10 @@ def fetch(self):
try:
partial_file, save_file = self._fetch_from_url(url)
if save_file:
if save_file and (partial_file is not None):
os.rename(partial_file, save_file)
break
except FetchError as e:
except FailedDownloadError as e:
errors.append(str(e))
for msg in errors:
@ -330,6 +330,8 @@ def fetch(self):
def _existing_url(self, url):
tty.debug('Checking existence of {0}'.format(url))
if spack.config.get('config:use_curl'):
curl = self.curl
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
# portable.
@ -338,8 +340,58 @@ def _existing_url(self, url):
curl_args.append('-k')
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
return curl.returncode == 0
else:
# Telling urllib to check if url is accessible
try:
url, headers, response = web_util.read_from_url(url)
except web_util.SpackWebError:
msg = "Urllib fetch failed to verify url {0}".format(url)
raise FailedDownloadError(url, msg)
return (response.getcode() is None or response.getcode() == 200)
def _fetch_from_url(self, url):
if spack.config.get('config:use_curl'):
return self._fetch_curl(url)
else:
return self._fetch_urllib(url)
def _check_headers(self, headers):
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
content_types = re.findall(r'Content-Type:[^\r\n]+', headers,
flags=re.IGNORECASE)
if content_types and 'text/html' in content_types[-1]:
warn_content_type_mismatch(self.archive_file or "the archive")
@_needs_stage
def _fetch_urllib(self, url):
save_file = None
if self.stage.save_filename:
save_file = self.stage.save_filename
tty.msg('Fetching {0}'.format(url))
# Run urllib but grab the mime type from the http headers
try:
url, headers, response = web_util.read_from_url(url)
except web_util.SpackWebError as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
if save_file and os.path.exists(save_file):
os.remove(save_file)
msg = 'urllib failed to fetch with error {0}'.format(e)
raise FailedDownloadError(url, msg)
_data = response.read()
with open(save_file, 'wb') as _open_file:
_open_file.write(_data)
headers = _data.decode('utf-8', 'ignore')
self._check_headers(headers)
return None, save_file
@_needs_stage
def _fetch_curl(self, url):
save_file = None
partial_file = None
if self.stage.save_filename:
@ -423,13 +475,7 @@ def _fetch_from_url(self, url):
url,
"Curl failed with error %d" % curl.returncode)
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
content_types = re.findall(r'Content-Type:[^\r\n]+', headers,
flags=re.IGNORECASE)
if content_types and 'text/html' in content_types[-1]:
warn_content_type_mismatch(self.archive_file or "the archive")
self._check_headers(headers)
return partial_file, save_file
@property # type: ignore # decorated properties unsupported in mypy

View file

@ -8,27 +8,31 @@
from llnl.util.filesystem import mkdirp, touch
import spack.config
from spack.stage import Stage
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
def test_fetch_missing_cache(tmpdir):
@pytest.mark.parametrize('use_curl', [True, False])
def test_fetch_missing_cache(tmpdir, use_curl):
"""Ensure raise a missing cache file."""
testpath = str(tmpdir)
with spack.config.override('config:use_curl', use_curl):
fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
with Stage(fetcher, path=testpath):
with pytest.raises(NoCacheError, match=r'No cache'):
fetcher.fetch()
def test_fetch(tmpdir):
@pytest.mark.parametrize('use_curl', [True, False])
def test_fetch(tmpdir, use_curl):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
cache = os.path.join(testpath, 'cache.tar.gz')
touch(cache)
url = 'file:///{0}'.format(cache)
with spack.config.override('config:use_curl', use_curl):
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path

View file

@ -6,20 +6,25 @@
import os
import pytest
import spack.config as spack_config
import spack.fetch_strategy as spack_fs
import spack.stage as spack_stage
def test_s3fetchstrategy_sans_url():
@pytest.mark.parametrize('use_curl', [True, False])
def test_s3fetchstrategy_sans_url(use_curl):
"""Ensure constructor with no URL fails."""
with spack_config.override('config:use_curl', use_curl):
with pytest.raises(ValueError):
spack_fs.S3FetchStrategy(None)
def test_s3fetchstrategy_bad_url(tmpdir):
@pytest.mark.parametrize('use_curl', [True, False])
def test_s3fetchstrategy_bad_url(tmpdir, use_curl):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with spack_config.override('config:use_curl', use_curl):
fetcher = spack_fs.S3FetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
@ -30,11 +35,13 @@ def test_s3fetchstrategy_bad_url(tmpdir):
fetcher.fetch()
def test_s3fetchstrategy_downloaded(tmpdir):
@pytest.mark.parametrize('use_curl', [True, False])
def test_s3fetchstrategy_downloaded(tmpdir, use_curl):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
archive = os.path.join(testpath, 's3.tar.gz')
with spack_config.override('config:use_curl', use_curl):
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):

View file

@ -19,6 +19,7 @@
from spack.version import ver
import spack.util.crypto as crypto
import spack.util.executable
from spack.util.executable import which
@pytest.fixture(params=list(crypto.hashes.keys()))
@ -47,17 +48,20 @@ def fn(v):
return factory
def test_urlfetchstrategy_sans_url():
@pytest.mark.parametrize('use_curl', [True, False])
def test_urlfetchstrategy_sans_url(use_curl):
"""Ensure constructor with no URL fails."""
with spack.config.override('config:use_curl', use_curl):
with pytest.raises(ValueError):
with fs.URLFetchStrategy(None):
pass
def test_urlfetchstrategy_bad_url(tmpdir):
@pytest.mark.parametrize('use_curl', [True, False])
def test_urlfetchstrategy_bad_url(tmpdir, use_curl):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with spack.config.override('config:use_curl', use_curl):
with pytest.raises(fs.FailedDownloadError):
fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
@ -68,7 +72,46 @@ def test_urlfetchstrategy_bad_url(tmpdir):
fetcher.fetch()
def test_fetch_options(tmpdir, mock_archive):
testpath = str(tmpdir)
with spack.config.override('config:use_curl', True):
fetcher = fs.URLFetchStrategy(url=mock_archive.url,
fetch_options={'cookie': 'True',
'timeout': 10})
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
fetcher.fetch()
@pytest.mark.parametrize('use_curl', [True, False])
def test_archive_file_errors(tmpdir, mock_archive, use_curl):
"""Ensure FetchStrategy commands may only be used as intended"""
testpath = str(tmpdir)
with spack.config.override('config:use_curl', use_curl):
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
assert fetcher is not None
with pytest.raises(fs.FailedDownloadError):
with Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
with pytest.raises(fs.NoArchiveFileError):
fetcher.archive(testpath)
with pytest.raises(fs.NoArchiveFileError):
fetcher.expand()
with pytest.raises(fs.NoArchiveFileError):
fetcher.reset()
stage.fetch()
with pytest.raises(fs.NoDigestError):
fetcher.check()
assert fetcher.archive_file is not None
fetcher._fetch_from_url('file:///does-not-exist')
@pytest.mark.parametrize('secure', [True, False])
@pytest.mark.parametrize('use_curl', [True, False])
@pytest.mark.parametrize('mock_archive',
[('.tar.gz', 'z'), ('.tgz', 'z'),
('.tar.bz2', 'j'), ('.tbz2', 'j'),
@ -77,6 +120,7 @@ def test_urlfetchstrategy_bad_url(tmpdir):
def test_fetch(
mock_archive,
secure,
use_curl,
checksum_type,
config,
mutable_mock_repo
@ -102,8 +146,8 @@ def test_fetch(
# Enter the stage directory and check some properties
with pkg.stage:
with spack.config.override('config:verify_ssl', secure):
with spack.config.override('config:use_curl', use_curl):
pkg.do_stage()
with working_dir(pkg.stage.source_path):
assert os.path.exists('configure')
assert is_exe('configure')
@ -123,11 +167,13 @@ def test_fetch(
('url-list-test @3.0a1', 'foo-3.0a1.tar.gz', 'abc30a1'),
('url-list-test @4.5-rc5', 'foo-4.5-rc5.tar.gz', 'abc45rc5'),
])
def test_from_list_url(mock_packages, config, spec, url, digest):
@pytest.mark.parametrize('use_curl', [True, False])
def test_from_list_url(mock_packages, config, spec, url, digest, use_curl):
"""
Test URLs in the url-list-test package, which means they should
have checksums in the package.
"""
with spack.config.override('config:use_curl', use_curl):
specification = Spec(spec).concretized()
pkg = spack.repo.get(specification)
fetch_strategy = fs.from_list_url(pkg)
@ -140,8 +186,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest):
assert fetch_strategy.extra_options == {'timeout': 60}
def test_from_list_url_unspecified(mock_packages, config):
@pytest.mark.parametrize('use_curl', [True, False])
def test_from_list_url_unspecified(mock_packages, config, use_curl):
"""Test non-specific URLs from the url-list-test package."""
with spack.config.override('config:use_curl', use_curl):
pkg = spack.repo.get('url-list-test')
spec = Spec('url-list-test @2.0.0').concretized()
@ -176,6 +224,8 @@ def test_unknown_hash(checksum_type):
crypto.Checker('a')
@pytest.mark.skipif(which('curl') is None,
reason='Urllib does not have built-in status bar')
def test_url_with_status_bar(tmpdir, mock_archive, monkeypatch, capfd):
"""Ensure fetch with status bar option succeeds."""
def is_true():
@ -185,7 +235,7 @@ def is_true():
monkeypatch.setattr(sys.stdout, 'isatty', is_true)
monkeypatch.setattr(tty, 'msg_enabled', is_true)
with spack.config.override('config:use_curl', True):
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
@ -195,10 +245,11 @@ def is_true():
assert '##### 100' in status
def test_url_extra_fetch(tmpdir, mock_archive):
@pytest.mark.parametrize('use_curl', [True, False])
def test_url_extra_fetch(tmpdir, mock_archive, use_curl):
"""Ensure a fetch after downloading is effectively a no-op."""
with spack.config.override('config:use_curl', use_curl):
testpath = str(tmpdir)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
@ -215,10 +266,12 @@ def test_url_extra_fetch(tmpdir, mock_archive):
['https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz',
'https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz'])
])
def test_candidate_urls(pkg_factory, url, urls, version, expected):
@pytest.mark.parametrize('use_curl', [True, False])
def test_candidate_urls(pkg_factory, url, urls, version, expected, use_curl):
"""Tests that candidate urls include mirrors and that they go through
pattern matching and substitution for versions.
"""
with spack.config.override('config:use_curl', use_curl):
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected
@ -244,11 +297,10 @@ def _which(*args, **kwargs):
testpath = str(tmpdir)
url = 'http://github.com/spack/spack'
with spack.config.override('config:use_curl', True):
fetcher = fs.URLFetchStrategy(url=url)
assert fetcher is not None
with pytest.raises(TypeError, match='object is not callable'):
with Stage(fetcher, path=testpath) as stage:
out = stage.fetch()
assert err_fmt.format('curl') in out

View file

@ -105,6 +105,7 @@ def read_from_url(url, accept_content_type=None):
else:
# User has explicitly indicated that they do not want SSL
# verification.
if not __UNABLE_TO_VERIFY_SSL:
context = ssl._create_unverified_context()
req = Request(url_util.format(url))