fix buffered download (#24623)

* Use shutil to do a buffered copy from http response to file

* Fix flake8...

* Somehow flake8 still complains about unrelated files
This commit is contained in:
Harmen Stoppels 2021-07-06 14:12:35 +02:00 committed by GitHub
parent 9d36f7f518
commit 545f971bec
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -29,11 +29,12 @@
import re
import shutil
import sys
from typing import Optional, List # novm
from typing import List, Optional # novm
import llnl.util.tty as tty
import six
import six.moves.urllib.parse as urllib_parse
import llnl.util.tty as tty
import spack.config
import spack.error
import spack.util.crypto as crypto
@ -41,9 +42,9 @@
import spack.util.url as url_util
import spack.util.web as web_util
from llnl.util.filesystem import (
working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
get_single_file, mkdirp, temp_cwd, temp_rename, working_dir)
from spack.util.compression import decompressor_for, extension
from spack.util.executable import which, CommandNotFoundError
from spack.util.executable import CommandNotFoundError, which
from spack.util.string import comma_and, quote
from spack.version import Version, ver
@ -382,12 +383,11 @@ def _fetch_urllib(self, url):
os.remove(save_file)
msg = 'urllib failed to fetch with error {0}'.format(e)
raise FailedDownloadError(url, msg)
_data = response.read()
with open(save_file, 'wb') as _open_file:
_open_file.write(_data)
headers = _data.decode('utf-8', 'ignore')
self._check_headers(headers)
with open(save_file, 'wb') as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(headers))
return None, save_file
@_needs_stage