oci: improve default_retry (#44132)

Apparently urllib can throw a range of different exceptions:

1. HTTPError
2. URLError with e.reason set to the actual exception
3. TimeoutError from getresponse, which is not wrapped
This commit is contained in:
Harmen Stoppels 2024-05-11 15:43:32 +02:00 committed by Harmen Stoppels
parent 252a5bd71b
commit 55f37dffe5

View file

@ -418,18 +418,27 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
) )
def default_retry(f, retries: int = 3, sleep=None): def default_retry(f, retries: int = 5, sleep=None):
sleep = sleep or time.sleep sleep = sleep or time.sleep
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
for i in range(retries): for i in range(retries):
try: try:
return f(*args, **kwargs) return f(*args, **kwargs)
except urllib.error.HTTPError as e: except (urllib.error.URLError, TimeoutError) as e:
# Retry on internal server errors, and rate limit errors # Retry on internal server errors, and rate limit errors
# Potentially this could take into account the Retry-After header # Potentially this could take into account the Retry-After header
# if registries support it # if registries support it
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429): if i + 1 != retries and (
(
isinstance(e, urllib.error.HTTPError)
and (500 <= e.code < 600 or e.code == 429)
)
or (
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
)
or isinstance(e, TimeoutError)
):
# Exponential backoff # Exponential backoff
sleep(2**i) sleep(2**i)
continue continue