Merge "Ensure that important files are downloaded"
This commit is contained in:
@@ -506,16 +506,38 @@ def get_files_to_check(config):
|
|||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
||||||
|
def _is_file_available(response):
|
||||||
|
logging.debug("File from url: %s got response status: %s" % (
|
||||||
|
response.url, response.status_code))
|
||||||
|
return response.ok
|
||||||
|
|
||||||
|
|
||||||
|
def write_response_in_file(response, directory, filename):
|
||||||
|
if _is_file_available(response):
|
||||||
|
with open("%s/%s" % (directory, filename), 'wb') as f:
|
||||||
|
for txt in response.iter_content(1024):
|
||||||
|
f.write(txt)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_file_downloaded(url, directory, insecure=False):
|
||||||
|
# NOTE: There was few directories, that it does not contain
|
||||||
|
# inventory.yaml file. Retry few times download that file.
|
||||||
|
filename = url.split("/")[-1]
|
||||||
|
if directory:
|
||||||
|
if os.path.isfile("%s/%s" % (directory, filename)):
|
||||||
|
return
|
||||||
|
|
||||||
|
response = requests_get(url, verify=True)
|
||||||
|
write_response_in_file(response, directory, filename)
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, directory, insecure=False):
|
def download_file(url, directory, insecure=False):
|
||||||
logging.debug("Started fetching %s" % url)
|
logging.debug("Started fetching %s" % url)
|
||||||
filename = url.split("/")[-1]
|
filename = url.split("/")[-1]
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, verify=insecure, stream=True)
|
response = requests.get(url, verify=insecure, stream=True)
|
||||||
if response.status_code == 200:
|
|
||||||
if directory:
|
if directory:
|
||||||
with open("%s/%s" % (directory, filename), 'wb') as f:
|
write_response_in_file(response, directory, filename)
|
||||||
for txt in response.iter_content(1024):
|
|
||||||
f.write(txt)
|
|
||||||
return filename
|
return filename
|
||||||
except requests.exceptions.ContentDecodingError:
|
except requests.exceptions.ContentDecodingError:
|
||||||
logging.critical("Can not decode content from %s" % url)
|
logging.critical("Can not decode content from %s" % url)
|
||||||
@@ -585,6 +607,9 @@ def check_specified_files(job_result, insecure, directory=None):
|
|||||||
build_log_urls = [
|
build_log_urls = [
|
||||||
urljoin(job_result["log_url"], s) for s in filtered_files
|
urljoin(job_result["log_url"], s) for s in filtered_files
|
||||||
]
|
]
|
||||||
|
inventory_urls = [
|
||||||
|
urljoin(job_result["log_url"], "zuul-info/inventory.yaml")
|
||||||
|
]
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
pool = ThreadPoolExecutor(max_workers=args.workers)
|
pool = ThreadPoolExecutor(max_workers=args.workers)
|
||||||
@@ -594,6 +619,9 @@ def check_specified_files(job_result, insecure, directory=None):
|
|||||||
if page:
|
if page:
|
||||||
results.append(page)
|
results.append(page)
|
||||||
|
|
||||||
|
pool.map(ensure_file_downloaded, inventory_urls,
|
||||||
|
itertools.repeat(directory), itertools.repeat(insecure))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -734,6 +734,27 @@ class TestLogMatcher(base.TestCase):
|
|||||||
json.loads(mock_gear_job.call_args.args[1].decode('utf-8'))
|
json.loads(mock_gear_job.call_args.args[1].decode('utf-8'))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@mock.patch('builtins.open', new_callable=mock.mock_open())
|
||||||
|
@mock.patch('os.path.isfile')
|
||||||
|
@mock.patch('requests.get')
|
||||||
|
def test_ensure_file_downloaded(self, mock_requests, mock_is_file,
|
||||||
|
mock_open):
|
||||||
|
url = 'http://someurl.com'
|
||||||
|
directory = '/tmp/logscraper'
|
||||||
|
mock_is_file.return_value = False
|
||||||
|
logscraper.ensure_file_downloaded(url, directory)
|
||||||
|
assert mock_requests.called
|
||||||
|
|
||||||
|
@mock.patch('os.path.isfile')
|
||||||
|
@mock.patch('requests.get')
|
||||||
|
def test_ensure_file_downloaded_file_exists(self, mock_requests,
|
||||||
|
mock_is_file):
|
||||||
|
url = 'http://someurl.com'
|
||||||
|
directory = '/tmp/logscraper'
|
||||||
|
mock_is_file.return_value = True
|
||||||
|
logscraper.ensure_file_downloaded(url, directory)
|
||||||
|
assert not mock_requests.called
|
||||||
|
|
||||||
|
|
||||||
class TestBuildCache(base.TestCase):
|
class TestBuildCache(base.TestCase):
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user