Sync with oslo-incubator 61f4461f91

Change-Id: I916e80aaaf0859da6e439de141618735866cb7db
This commit is contained in:
Kiall Mac Innes 2015-06-07 15:46:32 +01:00
parent 9b4c9f9d9f
commit bbaf8b3875
8 changed files with 53 additions and 206 deletions

View File

@ -143,7 +143,7 @@ def initialize_if_enabled():
# listen(). In any case, pull the port number out here.
port = sock.getsockname()[1]
LOG.info(
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d'),
{'port': port, 'pid': os.getpid()}
)
eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,

View File

@ -1,149 +0,0 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import errno
import logging
import os
import stat
import tempfile
from oslo_utils import excutils
LOG = logging.getLogger(__name__)
_FILE_CACHE = {}
DEFAULT_MODE = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
def ensure_tree(path, mode=DEFAULT_MODE):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
:param mode: Directory creation permissions
"""
try:
os.makedirs(path, mode)
except OSError as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise
def read_cached_file(filename, force_reload=False):
"""Read from a file if it has been modified.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh
or not.
"""
global _FILE_CACHE
if force_reload:
delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug("Reloading cached file %s" % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
reloaded = True
return (reloaded, cache_info['data'])
def delete_cached_file(filename):
"""Delete cached file if present.
:param filename: filename to delete
"""
global _FILE_CACHE
if filename in _FILE_CACHE:
del _FILE_CACHE[filename]
def delete_if_exists(path, remove=os.unlink):
"""Delete a file, but ignore file not found error.
:param path: File to delete
:param remove: Optional function to remove passed path
"""
try:
remove(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
@contextlib.contextmanager
def remove_path_on_error(path, remove=delete_if_exists):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
:param path: File to work with
:param remove: Optional function to remove passed path
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
remove(path)
def file_open(*args, **kwargs):
"""Open file
see built-in open() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
return open(*args, **kwargs)
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
"""Create temporary file or use existing file.
This util is needed for creating temporary file with
specified content, suffix and prefix. If path is not None,
it will be used for writing content. If the path doesn't
exist it'll be created.
:param content: content for temporary file.
:param path: same as parameter 'dir' for mkstemp
:param suffix: same as parameter 'suffix' for mkstemp
:param prefix: same as parameter 'prefix' for mkstemp
For example: it can be used in database tests for creating
configuration files.
"""
if path:
ensure_tree(path)
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
try:
os.write(fd, content)
finally:
os.close(fd)
return path

View File

@ -84,9 +84,9 @@ class FixedIntervalLoopingCall(LoopingCallBase):
break
delay = end - start - interval
if delay > 0:
LOG.warn(_LW('task %(func_name)r run outlasted '
'interval by %(delay).2f sec'),
{'func_name': self.f, 'delay': delay})
LOG.warning(_LW('task %(func_name)r run outlasted '
'interval by %(delay).2f sec'),
{'func_name': self.f, 'delay': delay})
greenthread.sleep(-delay if delay < 0 else 0)
except LoopingCallDone as e:
self.stop()

View File

@ -40,7 +40,21 @@ class PackageReportGenerator(object):
self.version_obj = version_obj
def __call__(self):
return vm.PackageModel(
self.version_obj.vendor_string(),
self.version_obj.product_string(),
self.version_obj.version_string_with_package())
if hasattr(self.version_obj, "vendor_string"):
vendor_string = self.version_obj.vendor_string()
else:
vendor_string = None
if hasattr(self.version_obj, "product_string"):
product_string = self.version_obj.product_string()
else:
product_string = None
if hasattr(self.version_obj, "version_string_with_package"):
version_string_with_package = self.version_obj.\
version_string_with_package()
else:
version_string_with_package = None
return vm.PackageModel(vendor_string, product_string,
version_string_with_package)

View File

@ -157,7 +157,8 @@ class GuruMeditation(object):
service_name = service_name or os.path.basename(
inspect.stack()[-1][1])
filename = "%s_gurumeditation_%s" % (
service_name, timeutils.strtime(fmt=cls.timestamp_fmt))
service_name, timeutils.utcnow().strftime(
cls.timestamp_fmt))
filepath = os.path.join(log_dir, filename)
try:
with open(filepath, "w") as dumpfile:

View File

@ -18,6 +18,7 @@
"""Generic Node base class for all workers that run on hosts."""
import errno
import io
import logging
import os
import random
@ -25,14 +26,6 @@ import signal
import sys
import time
try:
# Importing just the symbol here because the io module does not
# exist in Python 2.6.
from io import UnsupportedOperation # noqa
except ImportError:
# Python 2.6
UnsupportedOperation = None
import eventlet
from eventlet import event
from oslo_config import cfg
@ -59,15 +52,15 @@ def _is_daemon():
# http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics
try:
is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
except io.UnsupportedOperation:
# Could not get the fileno for stdout, so we must be a daemon.
is_daemon = True
except OSError as err:
if err.errno == errno.ENOTTY:
# Assume we are a daemon because there is no terminal.
is_daemon = True
else:
raise
except UnsupportedOperation:
# Could not get the fileno for stdout, so we must be a daemon.
is_daemon = True
return is_daemon
@ -199,6 +192,13 @@ class ServiceWrapper(object):
class ProcessLauncher(object):
_signal_handlers_set = set()
@classmethod
def _handle_class_signals(cls, *args, **kwargs):
for handler in cls._signal_handlers_set:
handler(*args, **kwargs)
def __init__(self, wait_interval=0.01):
"""Constructor.
@ -214,7 +214,8 @@ class ProcessLauncher(object):
self.handle_signal()
def handle_signal(self):
_set_signals_handler(self._handle_signal)
self._signal_handlers_set.add(self._handle_signal)
_set_signals_handler(self._handle_class_signals)
def _handle_signal(self, signo, frame):
self.sigcaught = signo
@ -226,7 +227,7 @@ class ProcessLauncher(object):
def _pipe_watcher(self):
# This will block until the write end is closed when the parent
# dies unexpectedly
self.readpipe.read()
self.readpipe.read(1)
LOG.info(_LI('Parent process has died unexpectedly, exiting'))
@ -234,15 +235,12 @@ class ProcessLauncher(object):
def _child_process_handle_signal(self):
# Setup child signal handlers differently
def _sigterm(*args):
signal.signal(signal.SIGTERM, signal.SIG_DFL)
raise SignalExit(signal.SIGTERM)
def _sighup(*args):
signal.signal(signal.SIGHUP, signal.SIG_DFL)
raise SignalExit(signal.SIGHUP)
signal.signal(signal.SIGTERM, _sigterm)
# Parent signals with SIGTERM when it wants us to go away.
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if _sighup_supported():
signal.signal(signal.SIGHUP, _sighup)
# Block SIGINT and let the parent send us a SIGTERM
@ -391,8 +389,14 @@ class ProcessLauncher(object):
if not _is_sighup_and_daemon(self.sigcaught):
break
cfg.CONF.reload_config_files()
for service in set(
[wrap.service for wrap in self.children.values()]):
service.reset()
for pid in self.children:
os.kill(pid, signal.SIGHUP)
self.running = True
self.sigcaught = None
except eventlet.greenlet.GreenletExit:

View File

@ -79,27 +79,3 @@ def wrap(sock):
ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED
return ssl.wrap_socket(sock, **ssl_kwargs)
_SSL_PROTOCOLS = {
"tlsv1": ssl.PROTOCOL_TLSv1,
"sslv23": ssl.PROTOCOL_SSLv23,
}
try:
_SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2
except AttributeError:
pass
try:
_SSL_PROTOCOLS["sslv3"] = ssl.PROTOCOL_SSLv3
except AttributeError:
pass
def validate_ssl_version(version):
key = version.lower()
try:
return _SSL_PROTOCOLS[key]
except KeyError:
raise RuntimeError(_("Invalid SSL version : %s") % version)

View File

@ -17,6 +17,7 @@ import threading
import eventlet
from eventlet import greenpool
from designate.openstack.common._i18n import _LE
from designate.openstack.common import loopingcall
@ -98,15 +99,15 @@ class ThreadGroup(object):
x.stop()
except eventlet.greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)
except Exception:
LOG.exception(_LE('Error stopping thread.'))
def stop_timers(self):
for x in self.timers:
try:
x.stop()
except Exception as ex:
LOG.exception(ex)
except Exception:
LOG.exception(_LE('Error stopping timer.'))
self.timers = []
def stop(self, graceful=False):
@ -132,8 +133,8 @@ class ThreadGroup(object):
x.wait()
except eventlet.greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)
except Exception:
LOG.exception(_LE('Error waiting on ThreadGroup.'))
current = threading.current_thread()
# Iterate over a copy of self.threads so thread_done doesn't