Add upload-logs-swift role

This adds a role to upload log files to swift.  It generates index
files, sets expiration times, and creates partitioned containers as
needed.

This is largely based on the zuul_swift_upload.py script written for
Zuul v2 by Joshua Hesketh.

Change-Id: If520c849fe6a20833804609583ea758e8dd2a6f6
Co-Authored-By: Joshua Hesketh <josh@nitrotech.org>
This commit is contained in:
James E. Blair 2018-07-20 16:40:07 -07:00
parent 1548d82ffd
commit 2bf16a421f
16 changed files with 862 additions and 0 deletions

View File

@ -0,0 +1,48 @@
Upload logs to a swift container
This uploads logs to an OpenStack Object Store (Swift) container.
.. warning::
Do not use this role, the API is not complete.
**Role Variables**
.. zuul:rolevar:: zuul_site_upload_logs
:default: true
Controls when logs are uploaded. true, the default, means always
upload logs. false means never upload logs. 'failure' means to only
upload logs when the job has failed.
.. note:: Intended to be set by admins via site-variables.
.. zuul:rolevar:: zuul_log_cloud_config
Complex argument which contains the cloud configuration in
os-cloud-config (clouds.yaml) format. It is expected that this
argument comes from a `Secret`.
.. zuul:rolevar:: zuul_log_partition
:default: false
If set to true, then the first component of the log path will be
removed from the object name and added to the container name, so
that logs for different changes are distributed across a large
number of containers.
.. zuul:rolevar:: zuul_log_container
:default: logs
This role will create containers which do not already exist. If
partitioning is not enabled, this is the name of the container
which will be used. If partitioning is enabled, then this will be
used as the prefix for the container name which will be separated
from the partition name by an underscore. For example, "logs_42"
would be the container name for partition 42.
.. zuul:rolevar:: zuul_log_container_public
:default: true
If the container is created, should it be created with global read
ACLs. If the container already exists, it will not be modified.

View File

@ -0,0 +1,3 @@
zuul_log_partition: false
zuul_log_container: logs
zuul_log_container_public: true

View File

@ -0,0 +1,194 @@
# Copyright (C) 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from .zuul_swift_upload import FileList, Indexer
import os
from bs4 import BeautifulSoup
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
'test-fixtures')
class TestFileList(unittest.TestCase):
def assert_files(self, result, files):
self.assertEqual(len(result), len(files))
for expected, received in zip(files, result):
self.assertEqual(expected[0], received.relative_path)
if expected[0] and expected[0][-1] == '/':
efilename = os.path.split(
os.path.dirname(expected[0]))[1] + '/'
else:
efilename = os.path.split(expected[0])[1]
self.assertEqual(efilename, received.filename)
if received.folder:
if received.full_path is not None and expected[0] != '':
self.assertTrue(os.path.isdir(received.full_path))
else:
self.assertTrue(os.path.isfile(received.full_path))
self.assertEqual(expected[1], received.mimetype)
self.assertEqual(expected[2], received.encoding)
def find_file(self, file_list, path):
for f in file_list:
if f.relative_path == path:
return f
def test_single_dir_trailing_slash(self):
'''Test a single directory with a trailing slash'''
fl = FileList()
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
self.assert_files(fl, [
('', 'application/directory', None),
('controller', 'application/directory', None),
('zuul-info', 'application/directory', None),
('controller/subdir', 'application/directory', None),
('controller/compressed.gz', 'text/plain', 'gzip'),
('controller/journal.xz', 'text/plain', 'xz'),
('controller/service_log.txt', 'text/plain', None),
('controller/syslog', 'text/plain', None),
('controller/subdir/subdir.txt', 'text/plain', None),
('zuul-info/inventory.yaml', 'text/plain', None),
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
])
def test_single_dir(self):
'''Test a single directory without a trailing slash'''
fl = FileList()
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
self.assert_files(fl, [
('', 'application/directory', None),
('logs', 'application/directory', None),
('logs/controller', 'application/directory', None),
('logs/zuul-info', 'application/directory', None),
('logs/controller/subdir', 'application/directory', None),
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
('logs/controller/journal.xz', 'text/plain', 'xz'),
('logs/controller/service_log.txt', 'text/plain', None),
('logs/controller/syslog', 'text/plain', None),
('logs/controller/subdir/subdir.txt', 'text/plain', None),
('logs/zuul-info/inventory.yaml', 'text/plain', None),
('logs/zuul-info/zuul-info.controller.txt', 'text/plain', None),
])
def test_single_file(self):
'''Test a single file'''
fl = FileList()
fl.add(os.path.join(FIXTURE_DIR,
'logs/zuul-info/inventory.yaml'))
self.assert_files(fl, [
('', 'application/directory', None),
('inventory.yaml', 'text/plain', None),
])
def test_index_files(self):
'''Test index generation'''
fl = FileList()
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
ix = Indexer()
fl = ix.make_indexes(fl)
self.assert_files(fl, [
('', 'application/directory', None),
('index.html', 'text/html', None),
('logs', 'application/directory', None),
('logs/index.html', 'text/html', None),
('logs/controller', 'application/directory', None),
('logs/zuul-info', 'application/directory', None),
('logs/controller/subdir', 'application/directory', None),
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
('logs/controller/journal.xz', 'text/plain', 'xz'),
('logs/controller/service_log.txt', 'text/plain', None),
('logs/controller/syslog', 'text/plain', None),
('logs/controller/index.html', 'text/html', None),
('logs/controller/subdir/subdir.txt', 'text/plain', None),
('logs/controller/subdir/index.html', 'text/html', None),
('logs/zuul-info/inventory.yaml', 'text/plain', None),
('logs/zuul-info/zuul-info.controller.txt', 'text/plain', None),
('logs/zuul-info/index.html', 'text/html', None),
])
top_index = self.find_file(fl, 'index.html')
page = open(top_index.full_path).read()
page = BeautifulSoup(page, 'html.parser')
rows = page.find_all('tr')[1:]
self.assertEqual(len(rows), 2)
self.assertEqual(rows[0].find('a').get('href'), '../')
self.assertEqual(rows[0].find('a').text, '../')
self.assertEqual(rows[1].find('a').get('href'), 'logs/')
self.assertEqual(rows[1].find('a').text, 'logs/')
subdir_index = self.find_file(fl, 'logs/controller/subdir/index.html')
page = open(subdir_index.full_path).read()
page = BeautifulSoup(page, 'html.parser')
rows = page.find_all('tr')[1:]
self.assertEqual(rows[0].find('a').get('href'), '../')
self.assertEqual(rows[0].find('a').text, '../')
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
def test_index_files_trailing_slash(self):
'''Test index generation with a trailing slash'''
fl = FileList()
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
ix = Indexer()
fl = ix.make_indexes(fl)
self.assert_files(fl, [
('', 'application/directory', None),
('index.html', 'text/html', None),
('controller', 'application/directory', None),
('zuul-info', 'application/directory', None),
('controller/subdir', 'application/directory', None),
('controller/compressed.gz', 'text/plain', 'gzip'),
('controller/journal.xz', 'text/plain', 'xz'),
('controller/service_log.txt', 'text/plain', None),
('controller/syslog', 'text/plain', None),
('controller/index.html', 'text/html', None),
('controller/subdir/subdir.txt', 'text/plain', None),
('controller/subdir/index.html', 'text/html', None),
('zuul-info/inventory.yaml', 'text/plain', None),
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
('zuul-info/index.html', 'text/html', None),
])
top_index = self.find_file(fl, 'index.html')
page = open(top_index.full_path).read()
page = BeautifulSoup(page, 'html.parser')
rows = page.find_all('tr')[1:]
self.assertEqual(rows[0].find('a').get('href'), '../')
self.assertEqual(rows[0].find('a').text, '../')
self.assertEqual(rows[1].find('a').get('href'), 'controller/')
self.assertEqual(rows[1].find('a').text, 'controller/')
self.assertEqual(rows[2].find('a').get('href'), 'zuul-info/')
self.assertEqual(rows[2].find('a').text, 'zuul-info/')
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
page = open(subdir_index.full_path).read()
page = BeautifulSoup(page, 'html.parser')
rows = page.find_all('tr')[1:]
self.assertEqual(rows[0].find('a').get('href'), '../')
self.assertEqual(rows[0].find('a').text, '../')
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
self.assertEqual(rows[1].find('a').text, 'subdir.txt')

View File

@ -0,0 +1,572 @@
#!/usr/bin/env python3
#
# Copyright 2014 Rackspace Australia
# Copyright 2018 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility to upload files to swift
"""
import argparse
import logging
import mimetypes
import os
import queue as queuelib
import stat
import sys
import tempfile
import threading
import time
import zlib
import collections
from ansible.module_utils.basic import AnsibleModule
import openstack
import requests
import requests.exceptions
import requestsexceptions
mimetypes.init()
mimetypes.add_type('text/plain', '.yaml')
MAX_UPLOAD_THREADS = 24
# Map mime types to apache icons
APACHE_MIME_ICON_MAP = {
'_default': '/icons/unknown.png',
'application/gzip': '/icons/compressed.png',
'application/directory': '/icons/folder.png',
'text/html': '/icons/text.png',
'text/plain': '/icons/text.png',
}
# Map mime types to apache icons
APACHE_FILE_ICON_MAP = {
'..': '/icons/back.png',
}
def get_mime_icon(mime, filename=''):
return (APACHE_FILE_ICON_MAP.get(filename) or
APACHE_MIME_ICON_MAP.get(mime) or
APACHE_MIME_ICON_MAP['_default'])
def sizeof_fmt(num, suffix='B'):
# From http://stackoverflow.com/questions/1094841/
# reusable-library-to-get-human-readable-version-of-file-size
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Y', suffix)
class FileDetail(object):
"""
Used to generate indexes with links or as the file path
to push to swift.
"""
def __init__(self, full_path, relative_path, filename=None):
self.full_path = full_path
if filename is None:
self.filename = os.path.basename(full_path)
else:
self.filename = filename
self.relative_path = relative_path
if self.full_path and os.path.isfile(self.full_path):
mime = 'text/plain'
mime_guess, encoding = mimetypes.guess_type(self.full_path)
mime = mime_guess if mime_guess else mime
self.mimetype = mime
self.encoding = encoding
self.folder = False
else:
self.mimetype = 'application/directory'
self.encoding = None
self.folder = True
if self.full_path:
st = os.stat(self.full_path)
self.last_modified = time.gmtime(st[stat.ST_MTIME])
self.size = st[stat.ST_SIZE]
else:
self.last_modified = time.gmtime(0)
self.size = 0
def __repr__(self):
t = 'Folder' if self.folder else 'File'
return '<%s %s>' % (t, self.relative_path)
class FileList(object):
def __init__(self):
self.file_list = []
self.file_list.append(FileDetail(None, '', ''))
def __iter__(self):
return iter(self.file_list)
def __len__(self):
return len(self.file_list)
def add(self, file_path):
"""
Generate a list of files to upload to swift. Recurses through
directories
"""
# file_list: A list of FileDetails to push to swift
file_list = []
if os.path.isfile(file_path):
relative_path = os.path.basename(file_path)
file_list.append(FileDetail(file_path, relative_path))
elif os.path.isdir(file_path):
parent_dir = os.path.dirname(file_path)
if not file_path.endswith('/'):
filename = os.path.basename(file_path)
full_path = file_path
relative_name = os.path.relpath(full_path, parent_dir)
file_list.append(FileDetail(full_path, relative_name,
filename))
for path, folders, files in os.walk(file_path):
# Sort folder in-place so that we recurse in order.
files.sort(key=lambda x: x.lower())
folders.sort(key=lambda x: x.lower())
# relative_path: The path between the given directory
# and the one being currently walked.
relative_path = os.path.relpath(path, parent_dir)
for f in folders:
filename = os.path.basename(f)
full_path = os.path.join(path, filename)
relative_name = os.path.relpath(full_path, parent_dir)
file_list.append(FileDetail(full_path, relative_name,
filename))
for f in files:
filename = os.path.basename(f)
full_path = os.path.join(path, filename)
relative_name = os.path.relpath(full_path, parent_dir)
fd = FileDetail(full_path, relative_name)
file_list.append(fd)
self.file_list += file_list
class Indexer(object):
"""generates index.html files if requested."""
def __init__(self, create_parent_links=True,
append_footer='index_footer.html'):
self.create_parent_links = create_parent_links
self.append_footer = append_footer
self.index_filename = 'index.html'
def make_indexes(self, file_list):
folders = collections.OrderedDict()
for f in file_list:
if f.folder:
folders[f.relative_path] = []
folder = os.path.dirname(os.path.dirname(
f.relative_path + '/'))
if folder == '/':
folder = ''
else:
folder = os.path.dirname(f.relative_path)
folders[folder].append(f)
indexes = {}
parent_file_detail = FileDetail(None, '..', '..')
for folder, files in folders.items():
# Don't add the pseudo-top-directory
if files and files[0].full_path is None:
files = files[1:]
# Do generate a link to the parent directory
full_path = self.make_index_file([parent_file_detail] + files,
'Index of %s' % (folder,))
if full_path:
filename = os.path.basename(full_path)
relative_name = os.path.join(folder, filename)
indexes[folder] = FileDetail(full_path, relative_name)
# This appends the index file at the end of the group of files
# for each directory.
ret_file_list = FileList()
newlist = []
last_dirname = None
for f in reversed(list(file_list)):
if f.folder:
relative_path = f.relative_path + '/'
else:
relative_path = f.relative_path
dirname = os.path.dirname(relative_path)
if dirname == '/':
dirname = ''
if dirname != last_dirname:
index = indexes.pop(dirname, None)
if index:
newlist.append(index)
last_dirname = dirname
newlist.append(f)
newlist.reverse()
ret_file_list.file_list = newlist
return ret_file_list
def make_index_file(self, folder_links, title):
"""Writes an index into a file for pushing"""
for file_details in folder_links:
# Do not generate an index file if one exists already.
# This may be the case when uploading other machine generated
# content like python coverage info.
if self.index_filename == file_details.filename:
return
index_content = self.generate_log_index(folder_links, title)
tempdir = tempfile.mkdtemp()
fd = open(os.path.join(tempdir, self.index_filename), 'w')
fd.write(index_content)
return os.path.join(tempdir, self.index_filename)
def generate_log_index(self, folder_links, title):
"""Create an index of logfiles and links to them"""
output = '<html><head><title>%s</title></head><body>\n' % title
output += '<h1>%s</h1>\n' % title
output += '<table><tr><th></th><th>Name</th><th>Last Modified</th>'
output += '<th>Size</th></tr>'
file_details_to_append = None
for file_details in folder_links:
output += '<tr>'
output += (
'<td><img alt="[ ]" title="%(m)s" src="%(i)s"></img></td>' % ({
'm': file_details.mimetype,
'i': get_mime_icon(file_details.mimetype,
file_details.filename),
}))
filename = file_details.filename
if file_details.folder:
filename += '/'
output += '<td><a href="%s">%s</a></td>' % (filename,
filename)
output += '<td>%s</td>' % time.asctime(
file_details.last_modified)
if file_details.mimetype == 'folder':
size = str(file_details.size)
else:
size = sizeof_fmt(file_details.size, suffix='')
output += '<td style="text-align: right">%s</td>' % size
output += '</tr>\n'
if (self.append_footer and
self.append_footer in file_details.filename):
file_details_to_append = file_details
output += '</table>'
if file_details_to_append:
output += '<br /><hr />'
try:
with open(file_details_to_append.full_path, 'r') as f:
output += f.read()
except IOError:
logging.exception("Error opening file for appending")
output += '</body></html>\n'
return output
class DeflateFilter(object):
chunk_size = 16384
def __init__(self, infile):
self.infile = infile
self.encoder = zlib.compressobj()
self.done = False
def __iter__(self):
return self
def __next__(self):
if self.done:
raise StopIteration()
ret = b''
while True:
data = self.infile.read(self.chunk_size)
if data:
ret = self.encoder.compress(data)
if ret:
break
else:
self.done = True
ret = self.encoder.flush()
break
return ret
class Uploader(object):
def __init__(self, cloud, container, prefix=None, delete_after=None,
public=True):
if isinstance(cloud, dict):
config = openstack.config.loader.OpenStackConfig().get_one(**cloud)
self.cloud = openstack.connection.Connection(config=config)
else:
self.cloud = openstack.connect(cloud=cloud)
self.container = container
self.prefix = prefix or ''
self.delete_after = delete_after
sess = self.cloud.config.get_session()
adapter = requests.adapters.HTTPAdapter(pool_maxsize=100)
sess.mount('https://', adapter)
if not self.cloud.get_container(self.container):
self.cloud.create_container(name=self.container, public=public)
self.cloud.update_container(
name=self.container,
headers={'X-Container-Meta-Web-Index': 'index.html'})
# 'X-Container-Meta-Web-Listings': 'true'
# The ceph radosgw swift implementation requires an
# index.html at the root in order for any other indexes to
# work.
self.cloud.create_object(self.container,
name='index.html',
data='',
content_type='text/html')
self.url = os.path.join(self.cloud.object_store.get_endpoint(),
self.container, self.prefix)
def upload(self, file_list):
"""Spin up thread pool to upload to swift"""
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
threads = []
queue = queuelib.Queue()
# add items to queue
for f in file_list:
queue.put(f)
for x in range(num_threads):
t = threading.Thread(target=self.post_thread, args=(queue,))
threads.append(t)
t.start()
for t in threads:
t.join()
def post_thread(self, queue):
while True:
try:
fd = queue.get_nowait()
logging.debug("%s: processing job %s",
threading.current_thread(),
fd)
self._post_file(fd)
except requests.exceptions.RequestException:
# Do our best to attempt to upload all the files
logging.exception("Error posting file after multiple attempts")
continue
except IOError:
# Do our best to attempt to upload all the files
logging.exception("Error opening file")
continue
except queuelib.Empty:
# No more work to do
return
def _post_file(self, fd):
relative_path = os.path.join(self.prefix, fd.relative_path)
headers = {}
if self.delete_after:
headers['x-delete-after'] = str(self.delete_after)
headers['content-type'] = fd.mimetype
USE_SHADE = True
for attempt in range(3):
try:
if not fd.folder:
if fd.encoding is None and fd.mimetype.startswith('text/'):
headers['content-encoding'] = 'deflate'
data = DeflateFilter(open(fd.full_path, 'rb'))
else:
data = open(fd.full_path, 'rb')
else:
data = ''
relative_path = relative_path.rstrip('/')
if USE_SHADE:
if relative_path == '':
relative_path = '/'
if USE_SHADE:
self.cloud.create_object(self.container,
name=relative_path,
data=data,
**headers)
else:
self.cloud.object_store.put(
os.path.join('/', self.container, relative_path),
data=data,
headers=headers)
break
except requests.exceptions.RequestException:
if attempt < 2:
logging.exception(
"File posting error on attempt %d" % attempt)
continue
else:
raise
def run(cloud, container, files,
indexes=True, parent_links=True, partition=False,
footer='index_footer.html',
delete_after=15552000, prefix=None,
public=True):
if prefix:
prefix = prefix.lstrip('/')
if partition and prefix:
parts = prefix.split('/')
if len(parts) > 1:
container += '_' + parts[0]
prefix = '/'.join(parts[1:])
# Create the objects to make sure the arguments are sound.
file_list = FileList()
indexer = Indexer(create_parent_links=parent_links,
append_footer=footer)
uploader = Uploader(cloud, container, prefix, delete_after,
public)
# Scan the files.
for file_path in files:
file_path = os.path.normpath(file_path)
file_list.add(file_path)
# (Possibly) make indexes.
if indexes:
file_list = indexer.make_indexes(file_list)
logging.debug("List of files prepared to upload:")
for x in file_list:
logging.debug(x)
# Upload.
uploader.upload(file_list)
return uploader.url
def ansible_main():
module = AnsibleModule(
argument_spec=dict(
cloud=dict(required=True, type='raw'),
container=dict(required=True, type='str'),
files=dict(required=True, type='list'),
partition=dict(type='bool', default=False),
indexes=dict(type='bool', default=True),
parent_links=dict(type='bool', default=True),
public=dict(type='bool', default=True),
footer=dict(type='str'),
delete_after=dict(type='int'),
prefix=dict(type='str'),
)
)
p = module.params
url = run(p.get('cloud'), p.get('container'), p.get('files'),
indexes=p.get('indexes'),
parent_links=p.get('parent_links'),
partition=p.get('partition'),
footer=p.get('footer'),
delete_after=p.get('delete_after', 15552000),
prefix=p.get('prefix'),
public=p.get('public'))
module.exit_json(changed=True,
url=url)
def cli_main():
parser = argparse.ArgumentParser(
description="Upload files to swift"
)
parser.add_argument('--verbose', action='store_true',
help='show debug information')
parser.add_argument('--no-indexes', action='store_true',
help='do not generate any indexes at all')
parser.add_argument('--no-parent-links', action='store_true',
help='do not include links back to a parent dir')
parser.add_argument('--no-public', action='store_true',
help='do not create the container as public')
parser.add_argument('--partition', action='store_true',
help='partition the prefix into multiple containers')
parser.add_argument('--append-footer', default='index_footer.html',
help='when generating an index, if the given file is '
'present in a directory, append it to the index '
'(set to "none" to disable)')
parser.add_argument('--delete-after', default=15552000,
help='Number of seconds to delete object after '
'upload. Default is 6 months (15552000 seconds) '
'and if set to 0 X-Delete-After will not be set',
type=int)
parser.add_argument('--prefix',
help='Prepend this path to the object names when '
'uploading')
parser.add_argument('cloud',
help='Name of the cloud to use when uploading')
parser.add_argument('container',
help='Name of the container to use when uploading')
parser.add_argument('files', nargs='+',
help='the file(s) to upload with recursive glob '
'matching when supplied as a string')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
# Set requests log level accordingly
logging.getLogger("requests").setLevel(logging.DEBUG)
logging.captureWarnings(True)
append_footer = args.append_footer
if append_footer.lower() == 'none':
append_footer = None
url = run(args.cloud, args.container, args.files,
indexes=not args.no_indexes,
parent_links=not args.no_parent_links,
partition=args.partition,
footer=append_footer,
delete_after=args.delete_after,
prefix=args.prefix,
public=not args.no_public)
print(url)
if __name__ == '__main__':
# Avoid unactionable warnings
requestsexceptions.squelch_warnings(
requestsexceptions.InsecureRequestWarning)
if sys.stdin.isatty():
cli_main()
else:
ansible_main()

View File

@ -0,0 +1,39 @@
- name: Set zuul-log-path fact
include_role:
name: set-zuul-log-path-fact
# Always upload (true), never upload (false) or only on failure ('failure')
- when: zuul_site_upload_logs | default(true) | bool or
(zuul_site_upload_logs == 'failure' and not zuul_success | bool)
block:
# Use chmod instead of file because ansible 2.5 file with recurse and
# follow can't really handle symlinks to .
- name: Ensure logs are readable before uploading
delegate_to: localhost
command: "chmod -R u=rwX,g=rX,o=rX {{ zuul.executor.log_root }}/"
# ANSIBLE0007 chmod used in place of argument mode to file
tags:
- skip_ansible_lint
- name: Upload logs to swift
delegate_to: localhost
zuul_swift_upload:
cloud: "{{ zuul_log_cloud_config }}"
partition: "{{ zuul_log_partition }}"
container: "{{ zuul_log_container }}"
public: "{{ zuul_log_container_public }}"
prefix: "{{ zuul_log_path }}"
files:
- "{{ zuul.executor.log_root }}/"
register: upload_results
- name: Return log URL to Zuul
delegate_to: localhost
zuul_return:
data:
zuul:
log_url: "{{ upload_results.url }}"
when: upload_results is defined
tags:
# Avoid "no action detected in task" linter error
- skip_ansible_lint

View File

@ -10,3 +10,9 @@ ansible-lint
bashate>=0.2
zuul-sphinx>=0.1.1
stestr>=1.0.0 # Apache-2.0
# For upload-logs-swift:
openstacksdk
requests
requestsexceptions
bs4
#