Retry network call to recover from external failure

This change integrates the tenacity library to prevent crashed
caused by external network failure.

Change-Id: Ifa702c87d61ee83c9c86295ba2af1d3180df686f
This commit is contained in:
Tristan Cacqueray 2021-11-26 18:52:09 +00:00
parent bc02a95488
commit cd93d0fbfb
2 changed files with 29 additions and 7 deletions

View File

@ -34,6 +34,8 @@ import urllib
import yaml
from distutils.version import StrictVersion as s_version
import tenacity
GEARMAN_SERVER = None
GEARMAN_PORT = None
@ -131,6 +133,27 @@ files:
""" # noqa
retry_request = tenacity.retry(
# Raise the real exception instead of RetryError
reraise=True,
# Stop after 10 attempts
stop=tenacity.stop_after_attempt(10),
# Slowly wait more
wait=tenacity.wait_exponential(multiplier=1, min=1, max=10),
)
@retry_request
def requests_get(url, verify):
return requests.get(url, verify=verify)
def requests_get_json(url, verify):
resp = requests_get(url, verify)
resp.raise_for_status()
return resp.json()
###############################################################################
# CLI #
###############################################################################
@ -311,9 +334,8 @@ def _zuul_complete_available(zuul_url, insecure):
parameter.
"""
url = zuul_url + "/status"
zuul_status = requests.get(url, verify=insecure)
zuul_status.raise_for_status()
zuul_version = parse_version(zuul_status.json().get("zuul_version"))
zuul_status = requests_get_json(url, verify=insecure)
zuul_version = parse_version(zuul_status.get("zuul_version"))
if zuul_version and zuul_version >= s_version("4.7.0"):
return "&complete=true"
@ -331,10 +353,9 @@ def get_builds(zuul_url, insecure):
while True:
url = base_url + "&skip=" + str(pos)
logging.info("Getting job results %s", url)
jobs_result = requests.get(url, verify=insecure)
jobs_result.raise_for_status()
jobs_result = requests_get_json(url, verify=insecure)
for job in jobs_result.json():
for job in jobs_result:
# It is important here to check we didn't yield builds twice,
# as this can happen when using skip if new build get reported
# between the two requests.
@ -365,7 +386,7 @@ def check_specified_files(job_result):
for f in file_to_check:
if not job_result["log_url"]:
continue
response = requests.get("%s%s" % (job_result["log_url"], f))
response = requests_get("%s%s" % (job_result["log_url"], f))
if response.status_code == 200:
available_files.append(f)
return available_files

View File

@ -2,3 +2,4 @@ pbr>=1.6 # Apache-2.0
gear<0.17
requests<2.27 # Apache-2.0
PyYAML<6.1 # MIT
tenacity