Return upload_results in test-upload-logs-swift role
This will only change the test role and can be used for validation. We are facing some issues where the log upload to swift fails, but the role is always succeeding. To get some more information about the upload failures, we let the upload() method return those to the Ansible module and provide them in the module's JSON result. Change-Id: Ic54b877cd5ea58031e21e514eb3a7c50ea735795
This commit is contained in:
parent
1d4ef3e866
commit
2f8439148a
@ -24,9 +24,14 @@ import testtools
|
||||
import time
|
||||
import stat
|
||||
import fixtures
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from .zuul_swift_upload import FileList, Indexer, FileDetail
|
||||
from .zuul_swift_upload import FileList, Indexer, FileDetail, Uploader
|
||||
|
||||
|
||||
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
|
||||
@ -411,3 +416,57 @@ class TestFileDetail(testtools.TestCase):
|
||||
|
||||
self.assertEqual(time.gmtime(0), file_detail.last_modified)
|
||||
self.assertEqual(0, file_detail.size)
|
||||
|
||||
|
||||
class MockUploader(Uploader):
|
||||
"""An uploader that uses a mocked cloud object"""
|
||||
def __init__(self, container):
|
||||
self.container = container
|
||||
self.cloud = mock.Mock()
|
||||
self.dry_run = False
|
||||
self.public = True
|
||||
self.delete_after = None
|
||||
self.prefix = ""
|
||||
self.url = 'http://dry-run-url.com/a/path/'
|
||||
|
||||
|
||||
class TestUpload(testtools.TestCase):
|
||||
|
||||
def test_upload_result(self):
|
||||
uploader = MockUploader(container="container")
|
||||
|
||||
# Let the upload method fail for the job-output.json, but not for the
|
||||
# inventory.yaml file
|
||||
def side_effect(container, name, **ignored):
|
||||
if name == "job-output.json":
|
||||
raise requests.exceptions.RequestException(
|
||||
"Failed for a reason"
|
||||
)
|
||||
uploader.cloud.create_object = mock.Mock(
|
||||
side_effect=side_effect
|
||||
)
|
||||
|
||||
# Get some test files to upload
|
||||
files = [
|
||||
FileDetail(
|
||||
os.path.join(FIXTURE_DIR, "logs/job-output.json"),
|
||||
"job-output.json",
|
||||
),
|
||||
FileDetail(
|
||||
os.path.join(FIXTURE_DIR, "logs/zuul-info/inventory.yaml"),
|
||||
"inventory.yaml",
|
||||
),
|
||||
]
|
||||
|
||||
expected_failures = [
|
||||
{
|
||||
"file": "job-output.json",
|
||||
"error": (
|
||||
"Error posting file after multiple attempts: "
|
||||
"Failed for a reason"
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
failures = uploader.upload(files)
|
||||
self.assertEqual(expected_failures, failures)
|
||||
|
@ -682,20 +682,27 @@ class Uploader():
|
||||
|
||||
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
|
||||
threads = []
|
||||
# Keep track on upload failures
|
||||
failures = []
|
||||
|
||||
queue = queuelib.Queue()
|
||||
# add items to queue
|
||||
for f in file_list:
|
||||
queue.put(f)
|
||||
|
||||
for x in range(num_threads):
|
||||
t = threading.Thread(target=self.post_thread, args=(queue,))
|
||||
t = threading.Thread(
|
||||
target=self.post_thread, args=(queue, failures)
|
||||
)
|
||||
threads.append(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
def post_thread(self, queue):
|
||||
return failures
|
||||
|
||||
def post_thread(self, queue, failures):
|
||||
while True:
|
||||
try:
|
||||
file_detail = queue.get_nowait()
|
||||
@ -703,13 +710,23 @@ class Uploader():
|
||||
threading.current_thread(),
|
||||
file_detail)
|
||||
retry_function(lambda: self._post_file(file_detail))
|
||||
except requests.exceptions.RequestException:
|
||||
except requests.exceptions.RequestException as e:
|
||||
msg = "Error posting file after multiple attempts"
|
||||
# Do our best to attempt to upload all the files
|
||||
logging.exception("Error posting file after multiple attempts")
|
||||
logging.exception(msg)
|
||||
failures.append({
|
||||
"file": file_detail.filename,
|
||||
"error": "{}: {}".format(msg, e),
|
||||
})
|
||||
continue
|
||||
except IOError:
|
||||
except IOError as e:
|
||||
msg = "Error opening file"
|
||||
# Do our best to attempt to upload all the files
|
||||
logging.exception("Error opening file")
|
||||
logging.exception(msg)
|
||||
failures.append({
|
||||
"file": file_detail.filename,
|
||||
"error": "{}: {}".format(msg, e),
|
||||
})
|
||||
continue
|
||||
except queuelib.Empty:
|
||||
# No more work to do
|
||||
@ -801,8 +818,8 @@ def run(cloud, container, files,
|
||||
# Upload.
|
||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||
public, dry_run)
|
||||
uploader.upload(file_list)
|
||||
return uploader.url
|
||||
upload_failures = uploader.upload(file_list)
|
||||
return uploader.url, upload_failures
|
||||
|
||||
|
||||
def ansible_main():
|
||||
@ -825,15 +842,17 @@ def ansible_main():
|
||||
p = module.params
|
||||
cloud = get_cloud(p.get('cloud'))
|
||||
try:
|
||||
url = run(cloud, p.get('container'), p.get('files'),
|
||||
indexes=p.get('indexes'),
|
||||
parent_links=p.get('parent_links'),
|
||||
topdir_parent_link=p.get('topdir_parent_link'),
|
||||
partition=p.get('partition'),
|
||||
footer=p.get('footer'),
|
||||
delete_after=p.get('delete_after', 15552000),
|
||||
prefix=p.get('prefix'),
|
||||
public=p.get('public'))
|
||||
url, upload_failures = run(
|
||||
cloud, p.get('container'), p.get('files'),
|
||||
indexes=p.get('indexes'),
|
||||
parent_links=p.get('parent_links'),
|
||||
topdir_parent_link=p.get('topdir_parent_link'),
|
||||
partition=p.get('partition'),
|
||||
footer=p.get('footer'),
|
||||
delete_after=p.get('delete_after', 15552000),
|
||||
prefix=p.get('prefix'),
|
||||
public=p.get('public')
|
||||
)
|
||||
except (keystoneauth1.exceptions.http.HttpError,
|
||||
requests.exceptions.RequestException):
|
||||
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
||||
@ -844,8 +863,11 @@ def ansible_main():
|
||||
msg=s,
|
||||
cloud=cloud.name,
|
||||
region_name=cloud.config.region_name)
|
||||
module.exit_json(changed=True,
|
||||
url=url)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
url=url,
|
||||
upload_failures=upload_failures,
|
||||
)
|
||||
|
||||
|
||||
def cli_main():
|
||||
@ -903,16 +925,18 @@ def cli_main():
|
||||
if append_footer.lower() == 'none':
|
||||
append_footer = None
|
||||
|
||||
url = run(get_cloud(args.cloud), args.container, args.files,
|
||||
indexes=not args.no_indexes,
|
||||
parent_links=not args.no_parent_links,
|
||||
topdir_parent_link=args.create_topdir_parent_link,
|
||||
partition=args.partition,
|
||||
footer=append_footer,
|
||||
delete_after=args.delete_after,
|
||||
prefix=args.prefix,
|
||||
public=not args.no_public,
|
||||
dry_run=args.dry_run)
|
||||
url, _ = run(
|
||||
get_cloud(args.cloud), args.container, args.files,
|
||||
indexes=not args.no_indexes,
|
||||
parent_links=not args.no_parent_links,
|
||||
topdir_parent_link=args.create_topdir_parent_link,
|
||||
partition=args.partition,
|
||||
footer=append_footer,
|
||||
delete_after=args.delete_after,
|
||||
prefix=args.prefix,
|
||||
public=not args.no_public,
|
||||
dry_run=args.dry_run
|
||||
)
|
||||
print(url)
|
||||
|
||||
|
||||
|
@ -30,10 +30,14 @@
|
||||
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
||||
register: upload_results
|
||||
|
||||
- name: Return log URL to Zuul
|
||||
delegate_to: localhost
|
||||
zuul_return:
|
||||
data:
|
||||
zuul:
|
||||
log_url: "{{ upload_results.url }}/"
|
||||
- block:
|
||||
- name: Return log URL to Zuul
|
||||
delegate_to: localhost
|
||||
zuul_return:
|
||||
data:
|
||||
zuul:
|
||||
log_url: "{{ upload_results.url }}/"
|
||||
- name: Print upload failures
|
||||
debug:
|
||||
var: upload_results.upload_failures
|
||||
when: upload_results is defined
|
||||
|
@ -19,3 +19,6 @@ requestsexceptions
|
||||
bs4
|
||||
# For upload-logs-google
|
||||
google-cloud-storage
|
||||
|
||||
# unittest.mock compatibility package for Python < 3.3
|
||||
mock;python_version<'3.3'
|
||||
|
Loading…
Reference in New Issue
Block a user