Write a logging config file and pass it to callbacks

We need to pass a working logging config to zuul_stream and ara so that
alembic migrations don't step on pre-playbook output.

Write a logging config using json, then pass its location in env vars so that
zuul_stream and ara can pick it up and pass it to dictConfig.

In support of this, create a LoggingConfig class so that we don't have
to copy key names and logic between executor.server, zuul_stream and
zuul_json. Since we have one, go ahead and use it for the server logging
config too, providing them with a slightly richer default logging
config for folks who don't provide a logging config file of their own.

The log config processing has to go into zuul.ansible because it's
needed in the bubblewrap and we don't have it in the python path
otherwise.

Change-Id: I3d7ac797fd2ee2c53f5fbd79d3ee048be6ca9366
This commit is contained in:
Monty Taylor 2017-08-25 19:14:25 -05:00
parent 22dd6506f4
commit 7093b49f04
No known key found for this signature in database
GPG Key ID: 7BAE94BC7141A594
5 changed files with 288 additions and 23 deletions

View File

@ -33,6 +33,8 @@ except ImportError:
# It's here in 2.3
from ansible.vars.manager import strip_internal_keys
from zuul.ansible import logconfig
class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0
@ -45,8 +47,12 @@ class CallbackModule(CallbackBase):
self.results = []
self.output = []
self.playbook = {}
logging_config = logconfig.load_job_config(
os.environ['ZUUL_JOB_LOG_CONFIG'])
self.output_path = os.path.splitext(
os.environ['ZUUL_JOB_OUTPUT_FILE'])[0] + '.json'
logging_config.job_output_file)[0] + '.json'
# For now, just read in the old file and write it all out again
# This may well not scale from a memory perspective- but let's see how
# it goes.

View File

@ -21,6 +21,7 @@ from __future__ import absolute_import
import datetime
import logging
import logging.config
import json
import os
import socket
@ -30,6 +31,8 @@ import uuid
from ansible.plugins.callback import default
from zuul.ansible import logconfig
LOG_STREAM_PORT = 19885
@ -108,14 +111,13 @@ class CallbackModule(default.CallbackModule):
# ansible appends timestamp, user and pid to the log lines emitted
# to the log file. We're doing other things though, so we don't want
# this.
path = os.environ['ZUUL_JOB_OUTPUT_FILE']
logging_config = logconfig.load_job_config(
os.environ['ZUUL_JOB_LOG_CONFIG'])
if self._display.verbosity > 2:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(filename=path, level=level, format='%(message)s')
# Squelch logging from ara so we don't get the initializing message
logging.getLogger('ara').setLevel(logging.ERROR)
logging_config.setDebug()
logging_config.apply()
self._logger = logging.getLogger('zuul.executor.ansible')

237
zuul/ansible/logconfig.py Normal file
View File

@ -0,0 +1,237 @@
# Copyright 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging.config
import json
import os
import yaml
_DEFAULT_JOB_LOGGING_CONFIG = {
'version': 1,
'formatters': {
'plain': {'format': '%(message)s'},
'result': {'format': 'RESULT %(message)s'},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
'level': 'INFO',
'formatter': 'plain',
},
'result': {
# result is returned to subprocess stdout and is used to pass
# control information from the callback back to the executor
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
'level': 'INFO',
'formatter': 'result',
},
'jobfile': {
# used by executor to emit log file
'class': 'logging.FileHandler',
'level': 'INFO',
'formatter': 'plain',
},
},
'loggers': {
'zuul.executor.ansible.result': {
'handlers': ['result'],
'level': 'INFO',
},
'zuul.executor.ansible': {
'handlers': ['jobfile'],
'level': 'INFO',
},
'sqlalchemy.engine': {
'handlers': ['console'],
'level': 'WARN',
},
'alembic': {
'handlers': ['console'],
'level': 'WARN',
},
},
'root': {'handlers': []},
}
_DEFAULT_SERVER_LOGGING_CONFIG = {
'version': 1,
'formatters': {
'simple': {
'format': '%(asctime)s %(levelname)s %(name)s: %(message)s'
},
},
'handlers': {
'console': {
# Used for printing to stdout
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
'level': 'WARNING',
'formatter': 'simple',
},
},
'loggers': {
'zuul': {
'handlers': ['console'],
'level': 'DEBUG',
},
'sqlalchemy.engine': {
'handlers': ['console'],
'level': 'WARN',
},
'gerrit': {
'handlers': ['console'],
'level': 'INFO',
},
'gear': {
'handlers': ['console'],
'level': 'WARN',
},
'alembic': {
'handlers': ['console'],
'level': 'WARN',
},
},
'root': {
'handlers': ['console'],
'level': 'WARN',
},
}
_DEFAULT_SERVER_FILE_HANDLERS = {
'normal': {
# Used for writing normal log files
'class': 'logging.handlers.WatchedFileHandler',
# This will get set to something real by DictLoggingConfig.server
'filename': '/var/log/zuul/{server}.log',
'level': 'INFO',
'formatter': 'simple',
},
}
def _read_config_file(filename: str):
if not os.path.exists(filename):
raise ValueError("Unable to read logging config file at %s" % filename)
if os.path.splitext(filename)[1] in ('.yml', '.yaml', '.json'):
return yaml.safe_load(open(filename, 'r'))
return filename
def load_config(filename: str):
config = _read_config_file(filename)
if isinstance(config, dict):
return DictLoggingConfig(config)
return FileLoggingConfig(filename)
def load_job_config(filename: str):
return JobLoggingConfig(_read_config_file(filename))
class LoggingConfig(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def apply(self):
"""Apply the config information to the current logging config."""
class DictLoggingConfig(LoggingConfig, metaclass=abc.ABCMeta):
def __init__(self, config):
self._config = config
def apply(self):
logging.config.dictConfig(self._config)
def writeJson(self, filename: str):
open(filename, 'w').write(json.dumps(self._config, indent=2))
class JobLoggingConfig(DictLoggingConfig):
def __init__(self, config=None, job_output_file=None):
if not config:
config = _DEFAULT_JOB_LOGGING_CONFIG.copy()
super(JobLoggingConfig, self).__init__(config=config)
if job_output_file:
self.job_output_file = job_output_file
def setDebug(self):
# Set the level for zuul.executor.ansible to DEBUG
self._config['loggers']['zuul.executor.ansible']['level'] = 'DEBUG'
@property
def job_output_file(self) -> str:
return self._config['handlers']['jobfile']['filename']
@job_output_file.setter
def job_output_file(self, filename: str):
self._config['handlers']['jobfile']['filename'] = filename
class ServerLoggingConfig(DictLoggingConfig):
def __init__(self, config=None, server=None):
if not config:
config = _DEFAULT_SERVER_LOGGING_CONFIG.copy()
super(ServerLoggingConfig, self).__init__(config=config)
if server:
self.server = server
@property
def server(self):
return self._server
@server.setter
def server(self, server):
self._server = server
# Add the normal file handler. It's not included in the default
# config above because we're templating out the filename. Also, we
# only want to add the handler if we're actually going to use it.
for name, handler in _DEFAULT_SERVER_FILE_HANDLERS.items():
server_handler = handler.copy()
server_handler['filename'] = server_handler['filename'].format(
server=server)
self._config['handlers'][name] = handler
# Change everything configured to write to stdout to write to
# log files instead. This leaves root going to console, which is
# how the loggers infra uses work.
# NOTE(mordred) root -> console may not actually be intentional. It can
# be changed by changing
# _DEFAULT_SERVER_LOGGING_CONFIG['root']['handlers']
for logger in self._config['loggers'].values():
if logger['handlers'] == ['console']:
logger['handlers'] = ['normal']
class FileLoggingConfig(LoggingConfig):
def __init__(self, filename):
self._filename = filename
def apply(self):
logging.config.fileConfig(self._filename)
if __name__ == '__main__':
# Use this to emit a working logging output for testing zuul_stream
# locally.
config = JobLoggingConfig(
job_output_file=os.path.abspath('job-output.txt'))
config.writeJson('logging.json')

View File

@ -26,8 +26,8 @@ import traceback
yappi = extras.try_import('yappi')
from zuul.ansible import logconfig
import zuul.lib.connections
from zuul.lib import yamlutil as yaml
# Do not import modules that will pull in paramiko which must not be
# imported until after the daemonization.
@ -84,19 +84,18 @@ class ZuulApp(object):
def setup_logging(self, section, parameter):
if self.config.has_option(section, parameter):
fp = os.path.expanduser(self.config.get(section, parameter))
if not os.path.exists(fp):
raise Exception("Unable to read logging config file at %s" %
fp)
if os.path.splitext(fp)[1] in ('.yml', '.yaml'):
with open(fp, 'r') as f:
logging.config.dictConfig(yaml.safe_load(f))
else:
logging.config.fileConfig(fp)
logging_config = logconfig.load_config_from_file(fp)
else:
logging.basicConfig(level=logging.DEBUG)
# If someone runs in the foreground and doesn't give a logging
# config, leave the config set to emit to stdout.
if hasattr(self.args, 'nodaemon') and not self.args.nodaemon:
logging_config = logconfig.ServerLoggingConfig()
else:
# Setting a server value updates the defaults to use
# WatchedFileHandler on /var/log/zuul/{server}-debug.log
# and /var/log/zuul/{server}.log
logging_config = logconfig.ServerLoggingConfig(server=section)
logging_config.apply()
def configure_connections(self, source_only=False):
self.connections = zuul.lib.connections.ConnectionRegistry()

View File

@ -280,6 +280,7 @@ class JobDir(object):
'''
# root
# ansible (mounted in bwrap read-only)
# logging.json
# inventory.yaml
# .ansible (mounted in bwrap read-write)
# fact-cache/localhost
@ -336,6 +337,7 @@ class JobDir(object):
pass
self.known_hosts = os.path.join(ssh_dir, 'known_hosts')
self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
self.logging_json = os.path.join(self.ansible_root, 'logging.json')
self.playbooks = [] # The list of candidate playbooks
self.playbook = None # A pointer to the candidate we have chosen
self.pre_playbooks = []
@ -1004,6 +1006,7 @@ class AnsibleJob(object):
self.preparePlaybooks(args)
self.prepareAnsibleFiles(args)
self.writeLoggingConfig()
data = {
# TODO(mordred) worker_name is needed as a unique name for the
@ -1400,6 +1403,14 @@ class AnsibleJob(object):
for key in node['host_keys']:
known_hosts.write('%s\n' % key)
def writeLoggingConfig(self):
self.log.debug("Writing logging config for job %s %s",
self.jobdir.job_output_file,
self.jobdir.logging_json)
logging_config = zuul.ansible.logconfig.JobLoggingConfig(
job_output_file=self.jobdir.job_output_file)
logging_config.writeJson(self.jobdir.logging_json)
def writeAnsibleConfig(self, jobdir_playbook, playbook):
trusted = jobdir_playbook.trusted
@ -1482,7 +1493,9 @@ class AnsibleJob(object):
config_file = playbook.ansible_config
env_copy = os.environ.copy()
env_copy.update(self.ssh_agent.env)
env_copy['ZUUL_JOB_OUTPUT_FILE'] = self.jobdir.job_output_file
if ara_callbacks:
env_copy['ARA_LOG_CONFIG'] = self.jobdir.logging_json
env_copy['ZUUL_JOB_LOG_CONFIG'] = self.jobdir.logging_json
env_copy['ZUUL_JOBDIR'] = self.jobdir.root
pythonpath = env_copy.get('PYTHONPATH')
if pythonpath:
@ -1550,7 +1563,15 @@ class AnsibleJob(object):
("Ansible timeout exceeded",))
watchdog.start()
try:
for idx, line in enumerate(iter(self.proc.stdout.readline, b'')):
# Use manual idx instead of enumerate so that RESULT lines
# don't count towards BUFFER_LINES_FOR_SYNTAX
idx = 0
for line in iter(self.proc.stdout.readline, b''):
if line.startswith(b'RESULT'):
# TODO(mordred) Process result commands if sent
continue
else:
idx += 1
if idx < BUFFER_LINES_FOR_SYNTAX:
syntax_buffer.append(line)
line = line[:1024].rstrip()