Merge "Return upload_results in upload-logs-swift role"
This commit is contained in:
commit
62fd612dc5
@ -24,9 +24,14 @@ import testtools
|
|||||||
import time
|
import time
|
||||||
import stat
|
import stat
|
||||||
import fixtures
|
import fixtures
|
||||||
|
try:
|
||||||
|
from unittest import mock
|
||||||
|
except ImportError:
|
||||||
|
import mock
|
||||||
|
|
||||||
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from .zuul_swift_upload import FileList, Indexer, FileDetail
|
from .zuul_swift_upload import FileList, Indexer, FileDetail, Uploader
|
||||||
|
|
||||||
|
|
||||||
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
|
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
|
||||||
@ -411,3 +416,57 @@ class TestFileDetail(testtools.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(time.gmtime(0), file_detail.last_modified)
|
self.assertEqual(time.gmtime(0), file_detail.last_modified)
|
||||||
self.assertEqual(0, file_detail.size)
|
self.assertEqual(0, file_detail.size)
|
||||||
|
|
||||||
|
|
||||||
|
class MockUploader(Uploader):
|
||||||
|
"""An uploader that uses a mocked cloud object"""
|
||||||
|
def __init__(self, container):
|
||||||
|
self.container = container
|
||||||
|
self.cloud = mock.Mock()
|
||||||
|
self.dry_run = False
|
||||||
|
self.public = True
|
||||||
|
self.delete_after = None
|
||||||
|
self.prefix = ""
|
||||||
|
self.url = 'http://dry-run-url.com/a/path/'
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpload(testtools.TestCase):
|
||||||
|
|
||||||
|
def test_upload_result(self):
|
||||||
|
uploader = MockUploader(container="container")
|
||||||
|
|
||||||
|
# Let the upload method fail for the job-output.json, but not for the
|
||||||
|
# inventory.yaml file
|
||||||
|
def side_effect(container, name, **ignored):
|
||||||
|
if name == "job-output.json":
|
||||||
|
raise requests.exceptions.RequestException(
|
||||||
|
"Failed for a reason"
|
||||||
|
)
|
||||||
|
uploader.cloud.create_object = mock.Mock(
|
||||||
|
side_effect=side_effect
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get some test files to upload
|
||||||
|
files = [
|
||||||
|
FileDetail(
|
||||||
|
os.path.join(FIXTURE_DIR, "logs/job-output.json"),
|
||||||
|
"job-output.json",
|
||||||
|
),
|
||||||
|
FileDetail(
|
||||||
|
os.path.join(FIXTURE_DIR, "logs/zuul-info/inventory.yaml"),
|
||||||
|
"inventory.yaml",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
expected_failures = [
|
||||||
|
{
|
||||||
|
"file": "job-output.json",
|
||||||
|
"error": (
|
||||||
|
"Error posting file after multiple attempts: "
|
||||||
|
"Failed for a reason"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
failures = uploader.upload(files)
|
||||||
|
self.assertEqual(expected_failures, failures)
|
||||||
|
@ -682,20 +682,27 @@ class Uploader():
|
|||||||
|
|
||||||
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
|
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
|
||||||
threads = []
|
threads = []
|
||||||
|
# Keep track on upload failures
|
||||||
|
failures = []
|
||||||
|
|
||||||
queue = queuelib.Queue()
|
queue = queuelib.Queue()
|
||||||
# add items to queue
|
# add items to queue
|
||||||
for f in file_list:
|
for f in file_list:
|
||||||
queue.put(f)
|
queue.put(f)
|
||||||
|
|
||||||
for x in range(num_threads):
|
for x in range(num_threads):
|
||||||
t = threading.Thread(target=self.post_thread, args=(queue,))
|
t = threading.Thread(
|
||||||
|
target=self.post_thread, args=(queue, failures)
|
||||||
|
)
|
||||||
threads.append(t)
|
threads.append(t)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
for t in threads:
|
for t in threads:
|
||||||
t.join()
|
t.join()
|
||||||
|
|
||||||
def post_thread(self, queue):
|
return failures
|
||||||
|
|
||||||
|
def post_thread(self, queue, failures):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
file_detail = queue.get_nowait()
|
file_detail = queue.get_nowait()
|
||||||
@ -703,13 +710,23 @@ class Uploader():
|
|||||||
threading.current_thread(),
|
threading.current_thread(),
|
||||||
file_detail)
|
file_detail)
|
||||||
retry_function(lambda: self._post_file(file_detail))
|
retry_function(lambda: self._post_file(file_detail))
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException as e:
|
||||||
|
msg = "Error posting file after multiple attempts"
|
||||||
# Do our best to attempt to upload all the files
|
# Do our best to attempt to upload all the files
|
||||||
logging.exception("Error posting file after multiple attempts")
|
logging.exception(msg)
|
||||||
|
failures.append({
|
||||||
|
"file": file_detail.filename,
|
||||||
|
"error": "{}: {}".format(msg, e)
|
||||||
|
})
|
||||||
continue
|
continue
|
||||||
except IOError:
|
except IOError as e:
|
||||||
|
msg = "Error opening file"
|
||||||
# Do our best to attempt to upload all the files
|
# Do our best to attempt to upload all the files
|
||||||
logging.exception("Error opening file")
|
logging.exception(msg)
|
||||||
|
failures.append({
|
||||||
|
"file": file_detail.filename,
|
||||||
|
"error": "{}: {}".format(msg, e)
|
||||||
|
})
|
||||||
continue
|
continue
|
||||||
except queuelib.Empty:
|
except queuelib.Empty:
|
||||||
# No more work to do
|
# No more work to do
|
||||||
@ -801,8 +818,8 @@ def run(cloud, container, files,
|
|||||||
# Upload.
|
# Upload.
|
||||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||||
public, dry_run)
|
public, dry_run)
|
||||||
uploader.upload(file_list)
|
upload_failures = uploader.upload(file_list)
|
||||||
return uploader.url
|
return uploader.url, upload_failures
|
||||||
|
|
||||||
|
|
||||||
def ansible_main():
|
def ansible_main():
|
||||||
@ -825,15 +842,17 @@ def ansible_main():
|
|||||||
p = module.params
|
p = module.params
|
||||||
cloud = get_cloud(p.get('cloud'))
|
cloud = get_cloud(p.get('cloud'))
|
||||||
try:
|
try:
|
||||||
url = run(cloud, p.get('container'), p.get('files'),
|
url, upload_failures = run(
|
||||||
indexes=p.get('indexes'),
|
cloud, p.get('container'), p.get('files'),
|
||||||
parent_links=p.get('parent_links'),
|
indexes=p.get('indexes'),
|
||||||
topdir_parent_link=p.get('topdir_parent_link'),
|
parent_links=p.get('parent_links'),
|
||||||
partition=p.get('partition'),
|
topdir_parent_link=p.get('topdir_parent_link'),
|
||||||
footer=p.get('footer'),
|
partition=p.get('partition'),
|
||||||
delete_after=p.get('delete_after', 15552000),
|
footer=p.get('footer'),
|
||||||
prefix=p.get('prefix'),
|
delete_after=p.get('delete_after', 15552000),
|
||||||
public=p.get('public'))
|
prefix=p.get('prefix'),
|
||||||
|
public=p.get('public')
|
||||||
|
)
|
||||||
except (keystoneauth1.exceptions.http.HttpError,
|
except (keystoneauth1.exceptions.http.HttpError,
|
||||||
requests.exceptions.RequestException):
|
requests.exceptions.RequestException):
|
||||||
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
||||||
@ -844,8 +863,11 @@ def ansible_main():
|
|||||||
msg=s,
|
msg=s,
|
||||||
cloud=cloud.name,
|
cloud=cloud.name,
|
||||||
region_name=cloud.config.region_name)
|
region_name=cloud.config.region_name)
|
||||||
module.exit_json(changed=True,
|
module.exit_json(
|
||||||
url=url)
|
changed=True,
|
||||||
|
url=url,
|
||||||
|
upload_failures=upload_failures,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def cli_main():
|
def cli_main():
|
||||||
@ -903,16 +925,18 @@ def cli_main():
|
|||||||
if append_footer.lower() == 'none':
|
if append_footer.lower() == 'none':
|
||||||
append_footer = None
|
append_footer = None
|
||||||
|
|
||||||
url = run(get_cloud(args.cloud), args.container, args.files,
|
url, _ = run(
|
||||||
indexes=not args.no_indexes,
|
get_cloud(args.cloud), args.container, args.files,
|
||||||
parent_links=not args.no_parent_links,
|
indexes=not args.no_indexes,
|
||||||
topdir_parent_link=args.create_topdir_parent_link,
|
parent_links=not args.no_parent_links,
|
||||||
partition=args.partition,
|
topdir_parent_link=args.create_topdir_parent_link,
|
||||||
footer=append_footer,
|
partition=args.partition,
|
||||||
delete_after=args.delete_after,
|
footer=append_footer,
|
||||||
prefix=args.prefix,
|
delete_after=args.delete_after,
|
||||||
public=not args.no_public,
|
prefix=args.prefix,
|
||||||
dry_run=args.dry_run)
|
public=not args.no_public,
|
||||||
|
dry_run=args.dry_run
|
||||||
|
)
|
||||||
print(url)
|
print(url)
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,10 +30,14 @@
|
|||||||
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
||||||
register: upload_results
|
register: upload_results
|
||||||
|
|
||||||
- name: Return log URL to Zuul
|
- block:
|
||||||
delegate_to: localhost
|
- name: Return log URL to Zuul
|
||||||
zuul_return:
|
delegate_to: localhost
|
||||||
data:
|
zuul_return:
|
||||||
zuul:
|
data:
|
||||||
log_url: "{{ upload_results.url }}/"
|
zuul:
|
||||||
|
log_url: "{{ upload_results.url }}/"
|
||||||
|
- name: Print upload failures
|
||||||
|
debug:
|
||||||
|
var: upload_results.upload_failures
|
||||||
when: upload_results is defined
|
when: upload_results is defined
|
||||||
|
Loading…
Reference in New Issue
Block a user