Revert "upload-logs-swift: Create a download script"
This reverts commitacde44818d
and the testing part ofb3f417a6e6
. We'd like to obtain more consensus on the download script before we commit to this. In particular, the new zuul manifest file may make it possible to do this without adding the feature to the log upload roles. Change-Id: I959c44b4dac6cad6d1b3d82ba6bc0949c9c759ff
This commit is contained in:
parent
2ae8616306
commit
4f13f7c07f
@ -76,17 +76,3 @@ This uploads logs to an OpenStack Object Store (Swift) container.
|
|||||||
|
|
||||||
More details can be found at
|
More details can be found at
|
||||||
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
|
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
|
||||||
|
|
||||||
.. zuul:rolevar:: zuul_log_include_download_script
|
|
||||||
:default: False
|
|
||||||
|
|
||||||
Generate a script from ``zuul_log_download_template`` in the root
|
|
||||||
directory of the uploaded logs to facilitate easy bulk download.
|
|
||||||
|
|
||||||
.. zuul:rolevar:: zuul_log_download_template
|
|
||||||
:default: templates/download-logs.sh.j2
|
|
||||||
|
|
||||||
Path to template file if ``zuul_log_include_download_script`` is
|
|
||||||
set. See the sample file for parameters available to the template.
|
|
||||||
The file will be placed in the root of the uploaded logs (with
|
|
||||||
``.j2`` suffix removed).
|
|
||||||
|
@ -2,5 +2,3 @@ zuul_log_partition: false
|
|||||||
zuul_log_container: logs
|
zuul_log_container: logs
|
||||||
zuul_log_container_public: true
|
zuul_log_container_public: true
|
||||||
zuul_log_create_indexes: true
|
zuul_log_create_indexes: true
|
||||||
zuul_log_include_download_script: false
|
|
||||||
zuul_log_download_template: '{{ role_path }}/templates/download-logs.sh.j2'
|
|
@ -1,87 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Download all logs
|
|
||||||
|
|
||||||
#
|
|
||||||
# To use this file
|
|
||||||
#
|
|
||||||
# curl "http://fakebaseurl.com/download-logs.sh" | bash
|
|
||||||
#
|
|
||||||
# Logs will be copied in a temporary directory as described in the
|
|
||||||
# output. Set DOWNLOAD_DIR to an empty directory if you wish to
|
|
||||||
# override this.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_URL=http://fakebaseurl.com
|
|
||||||
|
|
||||||
function log {
|
|
||||||
echo "$(date -Iseconds) | $@"
|
|
||||||
}
|
|
||||||
|
|
||||||
function save_file {
|
|
||||||
local file="$1"
|
|
||||||
|
|
||||||
curl -s --compressed --create-dirs -o "${file}" "${BASE_URL}/${file}"
|
|
||||||
|
|
||||||
# Using --compressed we will send an Accept-Encoding: gzip header
|
|
||||||
# and the data will come to us across the network compressed.
|
|
||||||
# However, sometimes things like OpenStack's log server will send
|
|
||||||
# .gz files (as stored on its disk) uncompressed, so we check if
|
|
||||||
# this really looks like an ASCII file and rename for clarity.
|
|
||||||
if [[ "${file}" == *.gz ]]; then
|
|
||||||
local type=$(file "${file}")
|
|
||||||
if [[ "${type}" =~ "ASCII text" ]] || [[ "${type}" =~ "Unicode text" ]]; then
|
|
||||||
local new_name=${file%.gz}
|
|
||||||
log "Renaming to ${new_name}"
|
|
||||||
mv "${file}" "${new_name}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ -z "${DOWNLOAD_DIR}" ]]; then
|
|
||||||
DOWNLOAD_DIR=$(mktemp -d --tmpdir zuul-logs.XXXXXX)
|
|
||||||
fi
|
|
||||||
log "Saving logs to ${DOWNLOAD_DIR}"
|
|
||||||
|
|
||||||
pushd "${DOWNLOAD_DIR}" > /dev/null
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/job-output.json [ 0001 / 0011 ]"
|
|
||||||
save_file "job-output.json"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/Ꮓບບξ-unicode.txt [ 0002 / 0011 ]"
|
|
||||||
save_file "Ꮓບບξ-unicode.txt"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/compressed.gz [ 0003 / 0011 ]"
|
|
||||||
save_file "controller/compressed.gz"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/cpu-load.svg [ 0004 / 0011 ]"
|
|
||||||
save_file "controller/cpu-load.svg"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/journal.xz [ 0005 / 0011 ]"
|
|
||||||
save_file "controller/journal.xz"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/service_log.txt [ 0006 / 0011 ]"
|
|
||||||
save_file "controller/service_log.txt"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/syslog [ 0007 / 0011 ]"
|
|
||||||
save_file "controller/syslog"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/subdir/foo::3.txt [ 0008 / 0011 ]"
|
|
||||||
save_file "controller/subdir/foo::3.txt"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/controller/subdir/subdir.txt [ 0009 / 0011 ]"
|
|
||||||
save_file "controller/subdir/subdir.txt"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/zuul-info/inventory.yaml [ 0010 / 0011 ]"
|
|
||||||
save_file "zuul-info/inventory.yaml"
|
|
||||||
|
|
||||||
log "Getting ${BASE_URL}/zuul-info/zuul-info.controller.txt [ 0011 / 0011 ]"
|
|
||||||
save_file "zuul-info/zuul-info.controller.txt"
|
|
||||||
|
|
||||||
|
|
||||||
popd >/dev/null
|
|
||||||
|
|
||||||
log "Download complete!"
|
|
@ -30,7 +30,6 @@ import io
|
|||||||
import logging
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import jinja2
|
|
||||||
try:
|
try:
|
||||||
import queue as queuelib
|
import queue as queuelib
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -55,7 +54,6 @@ import requests.exceptions
|
|||||||
import requestsexceptions
|
import requestsexceptions
|
||||||
import keystoneauth1.exceptions
|
import keystoneauth1.exceptions
|
||||||
|
|
||||||
from ansible.module_utils._text import to_text
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -267,15 +265,13 @@ class FileDetail():
|
|||||||
to push to swift.
|
to push to swift.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, full_path, relative_path,
|
def __init__(self, full_path, relative_path, filename=None):
|
||||||
filename=None, is_index=False):
|
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
full_path (str): The absolute path to the file on disk.
|
full_path (str): The absolute path to the file on disk.
|
||||||
relative_path (str): The relative path from the artifacts source
|
relative_path (str): The relative path from the artifacts source
|
||||||
used for links.
|
used for links.
|
||||||
filename (str): An optional alternate filename in links.
|
filename (str): An optional alternate filename in links.
|
||||||
is_index (bool): Is this file an index
|
|
||||||
"""
|
"""
|
||||||
# Make FileNotFoundError exception to be compatible with python2
|
# Make FileNotFoundError exception to be compatible with python2
|
||||||
try:
|
try:
|
||||||
@ -289,7 +285,6 @@ class FileDetail():
|
|||||||
else:
|
else:
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.relative_path = relative_path
|
self.relative_path = relative_path
|
||||||
self.is_index = is_index
|
|
||||||
|
|
||||||
if self.full_path and os.path.isfile(self.full_path):
|
if self.full_path and os.path.isfile(self.full_path):
|
||||||
mime_guess, encoding = mimetypes.guess_type(self.full_path)
|
mime_guess, encoding = mimetypes.guess_type(self.full_path)
|
||||||
@ -310,8 +305,7 @@ class FileDetail():
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
t = 'Folder' if self.folder else 'File'
|
t = 'Folder' if self.folder else 'File'
|
||||||
return '<%s %s%s>' % (t, self.relative_path,
|
return '<%s %s>' % (t, self.relative_path)
|
||||||
' (index)' if self.is_index else '')
|
|
||||||
|
|
||||||
|
|
||||||
class FileList(Sequence):
|
class FileList(Sequence):
|
||||||
@ -417,7 +411,6 @@ class Indexer():
|
|||||||
FileList
|
FileList
|
||||||
|
|
||||||
- make_indexes() : make index.html in folders
|
- make_indexes() : make index.html in folders
|
||||||
- make_download_script() : make a script to download all logs
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, file_list):
|
def __init__(self, file_list):
|
||||||
'''
|
'''
|
||||||
@ -537,8 +530,7 @@ class Indexer():
|
|||||||
if full_path:
|
if full_path:
|
||||||
filename = os.path.basename(full_path)
|
filename = os.path.basename(full_path)
|
||||||
relative_name = os.path.join(folder, filename)
|
relative_name = os.path.join(folder, filename)
|
||||||
indexes[folder] = FileDetail(full_path, relative_name,
|
indexes[folder] = FileDetail(full_path, relative_name)
|
||||||
is_index=True)
|
|
||||||
|
|
||||||
# This appends the index file at the end of the group of files
|
# This appends the index file at the end of the group of files
|
||||||
# for each directory.
|
# for each directory.
|
||||||
@ -561,41 +553,6 @@ class Indexer():
|
|||||||
new_list.reverse()
|
new_list.reverse()
|
||||||
self.file_list.file_list = new_list
|
self.file_list.file_list = new_list
|
||||||
|
|
||||||
def make_download_script(self, base_url, download_template):
|
|
||||||
'''Make a download script from template
|
|
||||||
|
|
||||||
Note since you need the base_url, it really only makes sense
|
|
||||||
to call this after the Uploader() is initalised.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
base_url (str): The base URL to prefix
|
|
||||||
download_template (str): Path to a jinja2 template
|
|
||||||
|
|
||||||
Return:
|
|
||||||
None; a file with the same name as the template (stripped of
|
|
||||||
.j2 if present) is added to self.file_list for upload.
|
|
||||||
'''
|
|
||||||
# Prune the list to just be files, no indexes (this should run
|
|
||||||
# before indexing anyway)
|
|
||||||
download_files = [f for f in self.file_list
|
|
||||||
if not f.folder and not f.is_index]
|
|
||||||
output_filename = os.path.basename(download_template[:-3]
|
|
||||||
if download_template.endswith('.j2')
|
|
||||||
else download_template)
|
|
||||||
output = os.path.join(self.file_list.get_tempdir(), output_filename)
|
|
||||||
|
|
||||||
with open(download_template) as f, open(output, 'wb') as output:
|
|
||||||
logging.debug("Writing template %s" % output.name)
|
|
||||||
template = jinja2.Template(f.read())
|
|
||||||
rendered = template.stream(
|
|
||||||
base_url=base_url.rstrip('/'),
|
|
||||||
# jinja wants unicode input
|
|
||||||
file_list=[to_text(f.relative_path) for f in download_files])
|
|
||||||
rendered.dump(output, encoding='utf-8')
|
|
||||||
|
|
||||||
download_script = FileDetail(output.name, output_filename)
|
|
||||||
self.file_list.file_list.append(download_script)
|
|
||||||
|
|
||||||
|
|
||||||
class GzipFilter():
|
class GzipFilter():
|
||||||
chunk_size = 16384
|
chunk_size = 16384
|
||||||
@ -813,7 +770,7 @@ class Uploader():
|
|||||||
def run(cloud, container, files,
|
def run(cloud, container, files,
|
||||||
indexes=True, parent_links=True, topdir_parent_link=False,
|
indexes=True, parent_links=True, topdir_parent_link=False,
|
||||||
partition=False, footer='index_footer.html', delete_after=15552000,
|
partition=False, footer='index_footer.html', delete_after=15552000,
|
||||||
prefix=None, public=True, dry_run=False, download_template=''):
|
prefix=None, public=True, dry_run=False):
|
||||||
|
|
||||||
if prefix:
|
if prefix:
|
||||||
prefix = prefix.lstrip('/')
|
prefix = prefix.lstrip('/')
|
||||||
@ -829,16 +786,8 @@ def run(cloud, container, files,
|
|||||||
for file_path in files:
|
for file_path in files:
|
||||||
file_list.add(file_path)
|
file_list.add(file_path)
|
||||||
|
|
||||||
# Upload.
|
|
||||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
|
||||||
public, dry_run)
|
|
||||||
|
|
||||||
indexer = Indexer(file_list)
|
indexer = Indexer(file_list)
|
||||||
|
|
||||||
# (Possibly) make download script
|
|
||||||
if download_template:
|
|
||||||
indexer.make_download_script(uploader.url, download_template)
|
|
||||||
|
|
||||||
# (Possibly) make indexes.
|
# (Possibly) make indexes.
|
||||||
if indexes:
|
if indexes:
|
||||||
indexer.make_indexes(create_parent_links=parent_links,
|
indexer.make_indexes(create_parent_links=parent_links,
|
||||||
@ -849,6 +798,9 @@ def run(cloud, container, files,
|
|||||||
for x in file_list:
|
for x in file_list:
|
||||||
logging.debug(x)
|
logging.debug(x)
|
||||||
|
|
||||||
|
# Upload.
|
||||||
|
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||||
|
public, dry_run)
|
||||||
uploader.upload(file_list)
|
uploader.upload(file_list)
|
||||||
return uploader.url
|
return uploader.url
|
||||||
|
|
||||||
@ -867,7 +819,6 @@ def ansible_main():
|
|||||||
footer=dict(type='str'),
|
footer=dict(type='str'),
|
||||||
delete_after=dict(type='int'),
|
delete_after=dict(type='int'),
|
||||||
prefix=dict(type='str'),
|
prefix=dict(type='str'),
|
||||||
download_template=dict(type='str'),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -882,8 +833,7 @@ def ansible_main():
|
|||||||
footer=p.get('footer'),
|
footer=p.get('footer'),
|
||||||
delete_after=p.get('delete_after', 15552000),
|
delete_after=p.get('delete_after', 15552000),
|
||||||
prefix=p.get('prefix'),
|
prefix=p.get('prefix'),
|
||||||
public=p.get('public'),
|
public=p.get('public'))
|
||||||
download_template=p.get('download_template'))
|
|
||||||
except (keystoneauth1.exceptions.http.HttpError,
|
except (keystoneauth1.exceptions.http.HttpError,
|
||||||
requests.exceptions.RequestException):
|
requests.exceptions.RequestException):
|
||||||
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
||||||
@ -925,9 +875,6 @@ def cli_main():
|
|||||||
'upload. Default is 6 months (15552000 seconds) '
|
'upload. Default is 6 months (15552000 seconds) '
|
||||||
'and if set to 0 X-Delete-After will not be set',
|
'and if set to 0 X-Delete-After will not be set',
|
||||||
type=int)
|
type=int)
|
||||||
parser.add_argument('--download-template', default='',
|
|
||||||
help='Path to a Jinja2 template that will be filled '
|
|
||||||
'out to create an automatic download script')
|
|
||||||
parser.add_argument('--prefix',
|
parser.add_argument('--prefix',
|
||||||
help='Prepend this path to the object names when '
|
help='Prepend this path to the object names when '
|
||||||
'uploading')
|
'uploading')
|
||||||
@ -965,8 +912,7 @@ def cli_main():
|
|||||||
delete_after=args.delete_after,
|
delete_after=args.delete_after,
|
||||||
prefix=args.prefix,
|
prefix=args.prefix,
|
||||||
public=not args.no_public,
|
public=not args.no_public,
|
||||||
dry_run=args.dry_run,
|
dry_run=args.dry_run)
|
||||||
download_template=args.download_template)
|
|
||||||
print(url)
|
print(url)
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,12 +16,6 @@
|
|||||||
tags:
|
tags:
|
||||||
- skip_ansible_lint
|
- skip_ansible_lint
|
||||||
|
|
||||||
- name: Set download template
|
|
||||||
set_fact:
|
|
||||||
download_template: "{{ zuul_log_download_template }}"
|
|
||||||
when:
|
|
||||||
- zuul_log_include_download_script
|
|
||||||
|
|
||||||
- name: Upload logs to swift
|
- name: Upload logs to swift
|
||||||
delegate_to: localhost
|
delegate_to: localhost
|
||||||
zuul_swift_upload:
|
zuul_swift_upload:
|
||||||
@ -34,27 +28,8 @@
|
|||||||
files:
|
files:
|
||||||
- "{{ zuul.executor.log_root }}/"
|
- "{{ zuul.executor.log_root }}/"
|
||||||
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
||||||
download_template: "{{ download_template | default(omit) }}"
|
|
||||||
register: upload_results
|
register: upload_results
|
||||||
|
|
||||||
- name: Get download script name
|
|
||||||
set_fact:
|
|
||||||
download_script: "{{ zuul_log_download_template | basename | regex_replace('\\.j2$') }}"
|
|
||||||
when:
|
|
||||||
- zuul_log_include_download_script
|
|
||||||
|
|
||||||
- name: Set download template artifact
|
|
||||||
zuul_return:
|
|
||||||
data:
|
|
||||||
zuul:
|
|
||||||
artifacts:
|
|
||||||
- name: Download all logs
|
|
||||||
url: '{{ download_script }}'
|
|
||||||
metadata:
|
|
||||||
command: 'curl "{{ upload_results.url }}/{{ download_script }}" | bash'
|
|
||||||
when:
|
|
||||||
- zuul_log_include_download_script
|
|
||||||
|
|
||||||
- name: Return log URL to Zuul
|
- name: Return log URL to Zuul
|
||||||
delegate_to: localhost
|
delegate_to: localhost
|
||||||
zuul_return:
|
zuul_return:
|
||||||
|
@ -1,57 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Download all logs
|
|
||||||
|
|
||||||
#
|
|
||||||
# To use this file
|
|
||||||
#
|
|
||||||
# curl "{{ base_url }}/download-logs.sh" | bash
|
|
||||||
#
|
|
||||||
# Logs will be copied in a temporary directory as described in the
|
|
||||||
# output. Set DOWNLOAD_DIR to an empty directory if you wish to
|
|
||||||
# override this.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_URL={{ base_url }}
|
|
||||||
|
|
||||||
function log {
|
|
||||||
echo "$(date -Iseconds) | $@"
|
|
||||||
}
|
|
||||||
|
|
||||||
function save_file {
|
|
||||||
local file="$1"
|
|
||||||
|
|
||||||
curl -s --compressed --create-dirs -o "${file}" "${BASE_URL}/${file}"
|
|
||||||
|
|
||||||
# Using --compressed we will send an Accept-Encoding: gzip header
|
|
||||||
# and the data will come to us across the network compressed.
|
|
||||||
# However, sometimes things like OpenStack's log server will send
|
|
||||||
# .gz files (as stored on its disk) uncompressed, so we check if
|
|
||||||
# this really looks like an ASCII file and rename for clarity.
|
|
||||||
if [[ "${file}" == *.gz ]]; then
|
|
||||||
local type=$(file "${file}")
|
|
||||||
if [[ "${type}" =~ "ASCII text" ]] || [[ "${type}" =~ "Unicode text" ]]; then
|
|
||||||
local new_name=${file%.gz}
|
|
||||||
log "Renaming to ${new_name}"
|
|
||||||
mv "${file}" "${new_name}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ -z "${DOWNLOAD_DIR}" ]]; then
|
|
||||||
DOWNLOAD_DIR=$(mktemp -d --tmpdir zuul-logs.XXXXXX)
|
|
||||||
fi
|
|
||||||
log "Saving logs to ${DOWNLOAD_DIR}"
|
|
||||||
|
|
||||||
pushd "${DOWNLOAD_DIR}" > /dev/null
|
|
||||||
|
|
||||||
{% set total_files = file_list | length %}
|
|
||||||
{% for file in file_list %}
|
|
||||||
log "Getting ${BASE_URL}/{{ '%-80s'|format(file) }} [ {{ '%04d'|format(loop.index) }} / {{ '%04d'|format(total_files) }} ]"
|
|
||||||
save_file "{{ file }}"
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
popd >/dev/null
|
|
||||||
|
|
||||||
log "Download complete!"
|
|
Loading…
Reference in New Issue
Block a user