Merge "Ensure that important files are downloaded"

This commit is contained in:
Zuul
2023-01-31 14:55:52 +00:00
committed by Gerrit Code Review
2 changed files with 55 additions and 6 deletions

View File

@@ -506,17 +506,39 @@ def get_files_to_check(config):
return files
def _is_file_available(response):
logging.debug("File from url: %s got response status: %s" % (
response.url, response.status_code))
return response.ok
def write_response_in_file(response, directory, filename):
if _is_file_available(response):
with open("%s/%s" % (directory, filename), 'wb') as f:
for txt in response.iter_content(1024):
f.write(txt)
def ensure_file_downloaded(url, directory, insecure=False):
# NOTE: There was few directories, that it does not contain
# inventory.yaml file. Retry few times download that file.
filename = url.split("/")[-1]
if directory:
if os.path.isfile("%s/%s" % (directory, filename)):
return
response = requests_get(url, verify=True)
write_response_in_file(response, directory, filename)
def download_file(url, directory, insecure=False):
logging.debug("Started fetching %s" % url)
filename = url.split("/")[-1]
try:
response = requests.get(url, verify=insecure, stream=True)
if response.status_code == 200:
if directory:
with open("%s/%s" % (directory, filename), 'wb') as f:
for txt in response.iter_content(1024):
f.write(txt)
return filename
if directory:
write_response_in_file(response, directory, filename)
return filename
except requests.exceptions.ContentDecodingError:
logging.critical("Can not decode content from %s" % url)
@@ -585,6 +607,9 @@ def check_specified_files(job_result, insecure, directory=None):
build_log_urls = [
urljoin(job_result["log_url"], s) for s in filtered_files
]
inventory_urls = [
urljoin(job_result["log_url"], "zuul-info/inventory.yaml")
]
results = []
pool = ThreadPoolExecutor(max_workers=args.workers)
@@ -594,6 +619,9 @@ def check_specified_files(job_result, insecure, directory=None):
if page:
results.append(page)
pool.map(ensure_file_downloaded, inventory_urls,
itertools.repeat(directory), itertools.repeat(insecure))
return results

View File

@@ -734,6 +734,27 @@ class TestLogMatcher(base.TestCase):
json.loads(mock_gear_job.call_args.args[1].decode('utf-8'))
)
@mock.patch('builtins.open', new_callable=mock.mock_open())
@mock.patch('os.path.isfile')
@mock.patch('requests.get')
def test_ensure_file_downloaded(self, mock_requests, mock_is_file,
mock_open):
url = 'http://someurl.com'
directory = '/tmp/logscraper'
mock_is_file.return_value = False
logscraper.ensure_file_downloaded(url, directory)
assert mock_requests.called
@mock.patch('os.path.isfile')
@mock.patch('requests.get')
def test_ensure_file_downloaded_file_exists(self, mock_requests,
mock_is_file):
url = 'http://someurl.com'
directory = '/tmp/logscraper'
mock_is_file.return_value = True
logscraper.ensure_file_downloaded(url, directory)
assert not mock_requests.called
class TestBuildCache(base.TestCase):