Sync oslo-incubator code

Sync common code from oslo-incubator. The main reason for it is removal
of usage 'log' module from incubator in manila codebase. Because we already
use another 'log' from library 'oslo_log'.

Head of oslo-incubator code that was used for 'sync' operation was at
following commit:

commit f5646edc61b9653d7ff71ed0177ed77811bbdcd0
Author: Elena Ezhova <eezhova@mirantis.com>
Date:   Tue Apr 7 17:46:16 2015 +0300

Additional changes:
- Added explicit registration of 'oslo_log.log' options to all manila scripts
- Module 'manila.openstack.common.log' removed as unused

Change-Id: I5b98d3863329227531fdb94ad7dbf5b3e5c51141
This commit is contained in:
Valeriy Ponomaryov 2015-04-21 12:00:20 +03:00
parent df03bbbd6d
commit 94face22ea
25 changed files with 98 additions and 778 deletions

View File

@ -47,6 +47,7 @@ CONF = cfg.CONF
def main():
log.register_options(CONF)
CONF(sys.argv[1:], project='manila',
version=version.version_string())
log.setup(CONF, "manila")

View File

@ -42,6 +42,7 @@ CONF = cfg.CONF
def main():
log.register_options(CONF)
CONF(sys.argv[1:], project='manila',
version=version.version_string())
config.verify_share_protocols()

View File

@ -37,6 +37,7 @@ CONF = cfg.CONF
def main():
log.register_options(CONF)
CONF(sys.argv[1:], project='manila',
version=version.version_string())
log.setup(CONF, "manila")

View File

@ -37,6 +37,7 @@ CONF = cfg.CONF
def main():
log.register_options(CONF)
CONF(sys.argv[1:], project='manila',
version=version.version_string())
log.setup(CONF, "manila")

View File

@ -27,6 +27,7 @@ stepping stone.
import socket
from oslo_config import cfg
from oslo_log import log
import six
from manila.common import constants
@ -34,6 +35,7 @@ from manila import exception
from manila.i18n import _
CONF = cfg.CONF
log.register_options(CONF)
def _get_my_ip():

View File

@ -16,15 +16,15 @@ See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo.i18n
try:
import oslo_i18n
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
# application name when this module is synced into the separate
# repository. It is OK to have more than one translation function
# using the same domain, since there will still only be one message
# catalog.
_translators = oslo.i18n.TranslatorFactory(domain='manila')
_translators = oslo_i18n.TranslatorFactory(domain='manila')
# The primary translation function using the well-known name "_"
_ = _translators.primary
@ -38,3 +38,8 @@ _LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
except ImportError:
# NOTE(dims): Support for cases where a project wants to use
# code from oslo-incubator, but is not ready to be internationalized
# (like tempest)
_ = _LI = _LW = _LE = _LC = lambda x: x

View File

@ -16,21 +16,21 @@
from __future__ import print_function
import copy
import errno
import gc
import logging
import os
import pprint
import socket
import sys
import traceback
import eventlet
import eventlet.backdoor
import greenlet
from oslo.config import cfg
from oslo_config import cfg
from manila.openstack.common._i18n import _LI
from manila.openstack.common import log as logging
help_for_backdoor_port = (
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
@ -49,6 +49,12 @@ CONF.register_opts(eventlet_backdoor_opts)
LOG = logging.getLogger(__name__)
def list_opts():
"""Entry point for oslo-config-generator.
"""
return [(None, copy.deepcopy(eventlet_backdoor_opts))]
class EventletBackdoorConfigValueError(Exception):
def __init__(self, port_range, help_msg, ex):
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '

View File

@ -15,25 +15,27 @@
import contextlib
import errno
import logging
import os
import stat
import tempfile
from oslo.utils import excutils
from manila.openstack.common import log as logging
from oslo_utils import excutils
LOG = logging.getLogger(__name__)
_FILE_CACHE = {}
DEFAULT_MODE = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
def ensure_tree(path):
def ensure_tree(path, mode=DEFAULT_MODE):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
:param mode: Directory creation permissions
"""
try:
os.makedirs(path)
os.makedirs(path, mode)
except OSError as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):

View File

@ -1,710 +0,0 @@
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo.utils import importutils
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from manila.openstack.common._i18n import _
from manila.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN',
"keystonemiddleware=WARN", "routes.middleware=WARN",
"stevedore=WARN"]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"manila.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog '
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))

View File

@ -15,6 +15,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
import time
@ -22,7 +23,6 @@ from eventlet import event
from eventlet import greenthread
from manila.openstack.common._i18n import _LE, _LW
from manila.openstack.common import log as logging
LOG = logging.getLogger(__name__)
@ -84,9 +84,9 @@ class FixedIntervalLoopingCall(LoopingCallBase):
break
delay = end - start - interval
if delay > 0:
LOG.warn(_LW('task %(func_name)s run outlasted '
LOG.warn(_LW('task %(func_name)r run outlasted '
'interval by %(delay).2f sec'),
{'func_name': repr(self.f), 'delay': delay})
{'func_name': self.f, 'delay': delay})
greenthread.sleep(-delay if delay < 0 else 0)
except LoopingCallDone as e:
self.stop()
@ -127,9 +127,9 @@ class DynamicLoopingCall(LoopingCallBase):
if periodic_interval_max is not None:
idle = min(idle, periodic_interval_max)
LOG.debug('Dynamic looping call %(func_name)s sleeping '
LOG.debug('Dynamic looping call %(func_name)r sleeping '
'for %(idle).02f seconds',
{'func_name': repr(self.f), 'idle': idle})
{'func_name': self.f, 'idle': idle})
greenthread.sleep(idle)
except LoopingCallDone as e:
self.stop()

View File

@ -16,9 +16,9 @@
"""
Filter support
"""
import logging
from manila.openstack.common._i18n import _LI
from manila.openstack.common import log as logging
from manila.openstack.common.scheduler import base_handler
LOG = logging.getLogger(__name__)

View File

@ -63,7 +63,8 @@ class WeighedObject(object):
return "<WeighedObject '%s': %s>" % (self.obj, self.weight)
class BaseWeigher(six.with_metaclass(abc.ABCMeta, object)):
@six.add_metaclass(abc.ABCMeta)
class BaseWeigher(object):
"""Base class for pluggable weighers.
The attributes maxval and minval can be specified to set up the maximum

View File

@ -13,9 +13,10 @@
# License for the specific language governing permissions and limitations
# under the License.
import logging
import six
from manila.openstack.common import log as logging
from manila.openstack.common.scheduler import filters
from manila.openstack.common.scheduler.filters import extra_specs_ops

View File

@ -15,7 +15,7 @@
import operator
from oslo.utils import strutils
from oslo_utils import strutils
# 1. The following operations are supported:
# =, s==, s!=, s>=, s>, s<=, s<, <in>, <is>, <or>, ==, !=, >=, <=

View File

@ -13,7 +13,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from manila.openstack.common import log as logging
import logging
from manila.openstack.common.scheduler import filters
LOG = logging.getLogger(__name__)

View File

@ -15,7 +15,7 @@
import operator
from oslo.serialization import jsonutils
from oslo_serialization import jsonutils
import six
from manila.openstack.common.scheduler import filters

View File

@ -18,7 +18,7 @@
"""Generic Node base class for all workers that run on hosts."""
import errno
import logging as std_logging
import logging
import os
import random
import signal
@ -35,11 +35,10 @@ except ImportError:
import eventlet
from eventlet import event
from oslo.config import cfg
from oslo_config import cfg
from manila.openstack.common import eventlet_backdoor
from manila.openstack.common._i18n import _LE, _LI, _LW
from manila.openstack.common import log as logging
from manila.openstack.common import systemd
from manila.openstack.common import threadgroup
@ -163,7 +162,7 @@ class ServiceLauncher(Launcher):
signo = 0
LOG.debug('Full set of CONF:')
CONF.log_opt_values(LOG, std_logging.DEBUG)
CONF.log_opt_values(LOG, logging.DEBUG)
try:
if ready_callback:
@ -200,22 +199,26 @@ class ServiceWrapper(object):
class ProcessLauncher(object):
def __init__(self, wait_interval=0.01):
"""Constructor.
_signal_handlers_set = set()
@classmethod
def _handle_class_signals(cls, *args, **kwargs):
for handler in cls._signal_handlers_set:
handler(*args, **kwargs)
def __init__(self):
"""Constructor."""
:param wait_interval: The interval to sleep for between checks
of child process exit.
"""
self.children = {}
self.sigcaught = None
self.running = True
self.wait_interval = wait_interval
rfd, self.writepipe = os.pipe()
self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
self.handle_signal()
def handle_signal(self):
_set_signals_handler(self._handle_signal)
self._signal_handlers_set.add(self._handle_signal)
_set_signals_handler(self._handle_class_signals)
def _handle_signal(self, signo, frame):
self.sigcaught = signo
@ -334,8 +337,8 @@ class ProcessLauncher(object):
def _wait_child(self):
try:
# Don't block if no child processes have exited
pid, status = os.waitpid(0, os.WNOHANG)
# Block while any of child processes have exited
pid, status = os.waitpid(0, 0)
if not pid:
return None
except OSError as exc:
@ -364,10 +367,6 @@ class ProcessLauncher(object):
while self.running:
wrap = self._wait_child()
if not wrap:
# Yield to other threads if no children have exited
# Sleep for a short time to avoid excessive CPU usage
# (see bug #1095346)
eventlet.greenthread.sleep(self.wait_interval)
continue
while self.running and len(wrap.children) < wrap.workers:
self._start_child(wrap)
@ -377,7 +376,7 @@ class ProcessLauncher(object):
systemd.notify_once()
LOG.debug('Full set of CONF:')
CONF.log_opt_values(LOG, std_logging.DEBUG)
CONF.log_opt_values(LOG, logging.DEBUG)
try:
while True:
@ -392,8 +391,14 @@ class ProcessLauncher(object):
if not _is_sighup_and_daemon(self.sigcaught):
break
cfg.CONF.reload_config_files()
for service in set(
[wrap.service for wrap in self.children.values()]):
service.reset()
for pid in self.children:
os.kill(pid, signal.SIGHUP)
self.running = True
self.sigcaught = None
except eventlet.greenlet.GreenletExit:
@ -434,8 +439,8 @@ class Service(object):
def start(self):
pass
def stop(self):
self.tg.stop()
def stop(self, graceful=False):
self.tg.stop(graceful)
self.tg.wait()
# Signal that service cleanup is done:
if not self._done.ready():

View File

@ -16,12 +16,11 @@
Helper module for systemd service readiness notification.
"""
import logging
import os
import socket
import sys
from manila.openstack.common import log as logging
LOG = logging.getLogger(__name__)

View File

@ -11,12 +11,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import threading
import eventlet
from eventlet import greenpool
from manila.openstack.common import log as logging
from manila.openstack.common import loopingcall
@ -96,6 +96,8 @@ class ThreadGroup(object):
continue
try:
x.stop()
except eventlet.greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)

View File

@ -39,7 +39,6 @@ import manila.network.neutron.neutron_network_plugin
import manila.network.nova_network_plugin
import manila.network.standalone_network_plugin
import manila.openstack.common.eventlet_backdoor
import manila.openstack.common.log
import manila.quota
import manila.scheduler.driver
import manila.scheduler.host_manager
@ -94,10 +93,6 @@ _global_opt_lists = [
manila.network.nova_network_plugin.nova_single_network_plugin_opts,
manila.network.standalone_network_plugin.standalone_network_plugin_opts,
manila.openstack.common.eventlet_backdoor.eventlet_backdoor_opts,
manila.openstack.common.log.common_cli_opts,
manila.openstack.common.log.generic_log_opts,
manila.openstack.common.log.log_opts,
manila.openstack.common.log.logging_cli_opts,
manila.quota.quota_opts,
manila.scheduler.driver.scheduler_driver_opts,
manila.scheduler.host_manager.host_manager_opts,

View File

@ -35,6 +35,7 @@ class ManilaCmdAllTestCase(test.TestCase):
sys.argv = ['manila-all']
self.mock_object(log, 'setup')
self.mock_object(log, 'register_options')
self.mock_object(log, 'getLogger')
self.mock_object(utils, 'monkey_patch')
self.mock_object(service, 'WSGIService')
@ -49,6 +50,7 @@ class ManilaCmdAllTestCase(test.TestCase):
self.assertEqual(CONF.project, 'manila')
self.assertEqual(CONF.version, version.version_string())
log.setup.assert_called_once_with(CONF, "manila")
log.register_options.assert_called_once_with(CONF)
log.getLogger.assert_called_once_with('manila.all')
utils.monkey_patch.assert_called_once_with()
service.WSGIService.assert_called_once_with('osapi_share')

View File

@ -29,6 +29,7 @@ class ManilaCmdApiTestCase(test.TestCase):
def test_main(self):
self.mock_object(manila_api.log, 'setup')
self.mock_object(manila_api.log, 'register_options')
self.mock_object(manila_api.utils, 'monkey_patch')
self.mock_object(manila_api.service, 'WSGIService')
self.mock_object(manila_api.service, 'serve')
@ -39,6 +40,7 @@ class ManilaCmdApiTestCase(test.TestCase):
self.assertEqual(CONF.project, 'manila')
self.assertEqual(CONF.version, version.version_string())
manila_api.log.setup.assert_called_once_with(CONF, "manila")
manila_api.log.register_options.assert_called_once_with(CONF)
manila_api.utils.monkey_patch.assert_called_once_with()
manila_api.service.WSGIService.assert_called_once_with('osapi_share')
manila_api.service.wait.assert_called_once_with()

View File

@ -27,6 +27,7 @@ class ManilaCmdSchedulerTestCase(test.TestCase):
def test_main(self):
sys.argv = ['manila-scheduler']
self.mock_object(manila_scheduler.log, 'setup')
self.mock_object(manila_scheduler.log, 'register_options')
self.mock_object(manila_scheduler.utils, 'monkey_patch')
self.mock_object(manila_scheduler.service.Service, 'create')
self.mock_object(manila_scheduler.service, 'serve')
@ -37,6 +38,7 @@ class ManilaCmdSchedulerTestCase(test.TestCase):
self.assertEqual(CONF.project, 'manila')
self.assertEqual(CONF.version, version.version_string())
manila_scheduler.log.setup.assert_called_once_with(CONF, "manila")
manila_scheduler.log.register_options.assert_called_once_with(CONF)
manila_scheduler.utils.monkey_patch.assert_called_once_with()
manila_scheduler.service.Service.create.assert_called_once_with(
binary='manila-scheduler')

View File

@ -30,6 +30,7 @@ class ManilaCmdShareTestCase(test.TestCase):
@ddt.data(None, [], ['foo', ], ['foo', 'bar', ])
def test_main(self, backends):
self.mock_object(manila_share.log, 'setup')
self.mock_object(manila_share.log, 'register_options')
self.mock_object(manila_share.utils, 'monkey_patch')
self.mock_object(manila_share.service, 'ProcessLauncher')
self.mock_object(manila_share.service.Service, 'create')
@ -45,6 +46,7 @@ class ManilaCmdShareTestCase(test.TestCase):
manila_share.main()
manila_share.log.setup.assert_called_once_with(CONF, "manila")
manila_share.log.register_options.assert_called_once_with(CONF)
manila_share.utils.monkey_patch.assert_called_once_with()
manila_share.service.ProcessLauncher.assert_called_once_with()
self.launcher.wait.assert_called_once_with()

View File

@ -4,7 +4,6 @@
module=eventlet_backdoor
module=fileutils
module=local
module=log
module=loopingcall
module=scheduler
module=scheduler.filters