Merge "Update olso modules"

This commit is contained in:
Jenkins 2014-05-08 17:44:02 +00:00 committed by Gerrit Code Review
commit 8202a84045
16 changed files with 137 additions and 72 deletions

@ -16,10 +16,13 @@ import base64
from Crypto.Hash import HMAC from Crypto.Hash import HMAC
from Crypto import Random from Crypto import Random
import six
from heat.openstack.common.gettextutils import _ from heat.openstack.common.gettextutils import _
from heat.openstack.common import importutils from heat.openstack.common import importutils
bchr = six.int2byte
class CryptoutilsException(Exception): class CryptoutilsException(Exception):
"""Generic Exception for Crypto utilities.""" """Generic Exception for Crypto utilities."""
@ -64,7 +67,7 @@ class HKDF(object):
:param salt: optional salt value (a non-secret random value) :param salt: optional salt value (a non-secret random value)
""" """
if salt is None: if salt is None:
salt = '\x00' * self.hashfn.digest_size salt = b'\x00' * self.hashfn.digest_size
return HMAC.new(salt, ikm, self.hashfn).digest() return HMAC.new(salt, ikm, self.hashfn).digest()
@ -80,12 +83,12 @@ class HKDF(object):
if length > self.max_okm_length: if length > self.max_okm_length:
raise HKDFOutputLengthTooLong(length, self.max_okm_length) raise HKDFOutputLengthTooLong(length, self.max_okm_length)
N = (length + self.hashfn.digest_size - 1) / self.hashfn.digest_size N = (length + self.hashfn.digest_size - 1) // self.hashfn.digest_size
okm = "" okm = b""
tmp = "" tmp = b""
for block in range(1, N + 1): for block in range(1, N + 1):
tmp = HMAC.new(prk, tmp + info + chr(block), self.hashfn).digest() tmp = HMAC.new(prk, tmp + info + bchr(block), self.hashfn).digest()
okm += tmp okm += tmp
return okm[:length] return okm[:length]
@ -135,8 +138,8 @@ class SymmetricCrypto(object):
raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE) raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE)
r = len(msg) % self.cipher.block_size r = len(msg) % self.cipher.block_size
padlen = self.cipher.block_size - r - 1 padlen = self.cipher.block_size - r - 1
msg += '\x00' * padlen msg += b'\x00' * padlen
msg += chr(padlen) msg += bchr(padlen)
enc = iv + cipher.encrypt(msg) enc = iv + cipher.encrypt(msg)
if b64encode: if b64encode:
@ -160,7 +163,7 @@ class SymmetricCrypto(object):
cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv)
padded = cipher.decrypt(msg[self.cipher.block_size:]) padded = cipher.decrypt(msg[self.cipher.block_size:])
l = ord(padded[-1]) + 1 l = ord(padded[-1:]) + 1
plain = padded[:-l] plain = padded[:-l]
return plain return plain

@ -18,12 +18,12 @@ import functools
import os import os
import fixtures import fixtures
from oslotest import base as test_base
import six import six
from heat.openstack.common.db.sqlalchemy import provision from heat.openstack.common.db.sqlalchemy import provision
from heat.openstack.common.db.sqlalchemy import session from heat.openstack.common.db.sqlalchemy import session
from heat.openstack.common.db.sqlalchemy import utils from heat.openstack.common.db.sqlalchemy import utils
from heat.openstack.common import test as test_base
class DbFixture(fixtures.Fixture): class DbFixture(fixtures.Fixture):

@ -20,6 +20,7 @@ import os
import subprocess import subprocess
import lockfile import lockfile
from oslotest import base as test_base
from six import moves from six import moves
from six.moves.urllib import parse from six.moves.urllib import parse
import sqlalchemy import sqlalchemy
@ -27,7 +28,6 @@ import sqlalchemy.exc
from heat.openstack.common.db.sqlalchemy import utils from heat.openstack.common.db.sqlalchemy import utils
from heat.openstack.common.gettextutils import _LE from heat.openstack.common.gettextutils import _LE
from heat.openstack.common import test as test_base
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)

@ -34,7 +34,7 @@ import six
_AVAILABLE_LANGUAGES = {} _AVAILABLE_LANGUAGES = {}
# FIXME(dhellmann): Remove this when moving to heat.i18n. # FIXME(dhellmann): Remove this when moving to oslo.i18n.
USE_LAZY = False USE_LAZY = False
@ -116,7 +116,7 @@ class TranslatorFactory(object):
# NOTE(dhellmann): When this module moves out of the incubator into # NOTE(dhellmann): When this module moves out of the incubator into
# heat.i18n, these global variables can be moved to an integration # oslo.i18n, these global variables can be moved to an integration
# module within each application. # module within each application.
# Create the global translation functions. # Create the global translation functions.
@ -147,7 +147,7 @@ def enable_lazy():
your project is importing _ directly instead of using the your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function. gettextutils.install() way of importing the _ function.
""" """
# FIXME(dhellmann): This function will be removed in heat.i18n, # FIXME(dhellmann): This function will be removed in oslo.i18n,
# because the TranslatorFactory makes it superfluous. # because the TranslatorFactory makes it superfluous.
global _, _LI, _LW, _LE, _LC, USE_LAZY global _, _LI, _LW, _LE, _LC, USE_LAZY
tf = TranslatorFactory('heat', lazy=True) tf = TranslatorFactory('heat', lazy=True)

@ -35,7 +35,17 @@ import datetime
import functools import functools
import inspect import inspect
import itertools import itertools
import json import sys
if sys.version_info < (2, 7):
# On Python <= 2.6, json module is not C boosted, so try to use
# simplejson module if available
try:
import simplejson as json
except ImportError:
import json
else:
import json
import six import six
import six.moves.xmlrpc_client as xmlrpclib import six.moves.xmlrpc_client as xmlrpclib
@ -160,8 +170,8 @@ def loads(s):
return json.loads(s) return json.loads(s)
def load(s): def load(fp):
return json.load(s) return json.load(fp)
try: try:

@ -38,7 +38,7 @@ LOG = logging.getLogger(__name__)
util_opts = [ util_opts = [
cfg.BoolOpt('disable_process_locking', default=False, cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'), help='Enables or disables inter-process locks.'),
cfg.StrOpt('lock_path', cfg.StrOpt('lock_path',
default=os.environ.get("HEAT_LOCK_PATH"), default=os.environ.get("HEAT_LOCK_PATH"),
help='Directory to use for lock files.') help='Directory to use for lock files.')
@ -276,7 +276,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
:param external: The external keyword argument denotes whether this lock :param external: The external keyword argument denotes whether this lock
should work across multiple processes. This means that if two different should work across multiple processes. This means that if two different
workers both run a a method decorated with @synchronized('mylock', workers both run a method decorated with @synchronized('mylock',
external=True), only one of them will execute at a time. external=True), only one of them will execute at a time.
""" """
int_lock = internal_lock(name) int_lock = internal_lock(name)
@ -287,6 +287,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
yield ext_lock yield ext_lock
else: else:
yield int_lock yield int_lock
LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):

@ -84,12 +84,10 @@ logging_cli_opts = [
cfg.StrOpt('log-config-append', cfg.StrOpt('log-config-append',
metavar='PATH', metavar='PATH',
deprecated_name='log-config', deprecated_name='log-config',
help='The name of logging configuration file. It does not ' help='The name of a logging configuration file. This file '
'disable existing loggers, but just appends specified ' 'is appended to any existing logging configuration '
'logging configuration to any other existing logging ' 'files. For details about logging configuration files, '
'options. Please see the Python logging module ' 'see the Python logging module documentation.'),
'documentation for details on logging configuration '
'files.'),
cfg.StrOpt('log-format', cfg.StrOpt('log-format',
default=None, default=None,
metavar='FORMAT', metavar='FORMAT',
@ -103,7 +101,7 @@ logging_cli_opts = [
default=_DEFAULT_LOG_DATE_FORMAT, default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT', metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. ' help='Format string for %%(asctime)s in log records. '
'Default: %(default)s'), 'Default: %(default)s .'),
cfg.StrOpt('log-file', cfg.StrOpt('log-file',
metavar='PATH', metavar='PATH',
deprecated_name='logfile', deprecated_name='logfile',
@ -112,30 +110,30 @@ logging_cli_opts = [
cfg.StrOpt('log-dir', cfg.StrOpt('log-dir',
deprecated_name='logdir', deprecated_name='logdir',
help='(Optional) The base directory used for relative ' help='(Optional) The base directory used for relative '
'--log-file paths'), '--log-file paths.'),
cfg.BoolOpt('use-syslog', cfg.BoolOpt('use-syslog',
default=False, default=False,
help='Use syslog for logging. ' help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, ' 'Existing syslog format is DEPRECATED during I, '
'and then will be changed in J to honor RFC5424'), 'and will chang in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format', cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing # TODO(bogdando) remove or use True after existing
# syslog format deprecation in J # syslog format deprecation in J
default=False, default=False,
help='(Optional) Use syslog rfc5424 format for logging. ' help='(Optional) Enables or disables syslog rfc5424 format '
'If enabled, will add APP-NAME (RFC5424) before the ' 'for logging. If enabled, prefixes the MSG part of the '
'MSG part of the syslog message. The old format ' 'syslog message with APP-NAME (RFC5424). The '
'without APP-NAME is deprecated in I, ' 'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'), 'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility', cfg.StrOpt('syslog-log-facility',
default='LOG_USER', default='LOG_USER',
help='Syslog facility to receive log lines') help='Syslog facility to receive log lines.')
] ]
generic_log_opts = [ generic_log_opts = [
cfg.BoolOpt('use_stderr', cfg.BoolOpt('use_stderr',
default=True, default=True,
help='Log output to standard error') help='Log output to standard error.')
] ]
log_opts = [ log_opts = [
@ -143,18 +141,18 @@ log_opts = [
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] ' '%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s', '%(instance)s%(message)s',
help='Format string to use for log messages with context'), help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string', cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s', '%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context'), help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix', cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d', default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG'), help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix', cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s', '%(instance)s',
help='Prefix each line of exception output with this format'), help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels', cfg.ListOpt('default_log_levels',
default=[ default=[
'amqp=WARN', 'amqp=WARN',
@ -167,25 +165,25 @@ log_opts = [
'iso8601=WARN', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN' 'requests.packages.urllib3.connectionpool=WARN'
], ],
help='List of logger=LEVEL pairs'), help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors', cfg.BoolOpt('publish_errors',
default=False, default=False,
help='Publish error events'), help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations', cfg.BoolOpt('fatal_deprecations',
default=False, default=False,
help='Make deprecations fatal'), help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed # NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we # a full instance (and could include more information), and other times we
# are just handed a UUID for the instance. # are just handed a UUID for the instance.
cfg.StrOpt('instance_format', cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ', default='[instance: %(uuid)s] ',
help='If an instance is passed with the log message, format ' help='The format for an instance that is passed with the log '
'it like this'), 'message. '),
cfg.StrOpt('instance_uuid_format', cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ', default='[instance: %(uuid)s] ',
help='If an instance UUID is passed with the log message, ' help='The format for an instance UUID that is passed with the '
'format it like this'), 'log message. '),
] ]
CONF = cfg.CONF CONF = cfg.CONF
@ -451,7 +449,7 @@ def _load_log_config(log_config_append):
logging.config.fileConfig(log_config_append, logging.config.fileConfig(log_config_append,
disable_existing_loggers=False) disable_existing_loggers=False)
except moves.configparser.Error as exc: except moves.configparser.Error as exc:
raise LogConfigError(log_config_append, str(exc)) raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'): def setup(product_name, version='unknown'):

@ -17,18 +17,15 @@
Network-related utilities and helper functions. Network-related utilities and helper functions.
""" """
# TODO(jd) Use six.moves once import socket
# https://bitbucket.org/gutworth/six/pull-request/28
# is merged
try:
import urllib.parse
SplitResult = urllib.parse.SplitResult
except ImportError:
import urlparse
SplitResult = urlparse.SplitResult
from six.moves.urllib import parse from six.moves.urllib import parse
from heat.openstack.common.gettextutils import _LW
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def parse_host_port(address, default_port=None): def parse_host_port(address, default_port=None):
"""Interpret a string as a host:port pair. """Interpret a string as a host:port pair.
@ -74,7 +71,7 @@ def parse_host_port(address, default_port=None):
return (host, None if port is None else int(port)) return (host, None if port is None else int(port))
class ModifiedSplitResult(SplitResult): class ModifiedSplitResult(parse.SplitResult):
"""Split results class for urlsplit.""" """Split results class for urlsplit."""
# NOTE(dims): The functions below are needed for Python 2.6.x. # NOTE(dims): The functions below are needed for Python 2.6.x.
@ -106,3 +103,58 @@ def urlsplit(url, scheme='', allow_fragments=True):
path, query = path.split('?', 1) path, query = path.split('?', 1)
return ModifiedSplitResult(scheme, netloc, return ModifiedSplitResult(scheme, netloc,
path, query, fragment) path, query, fragment)
def set_tcp_keepalive(sock, tcp_keepalive=True,
tcp_keepidle=None,
tcp_keepalive_interval=None,
tcp_keepalive_count=None):
"""Set values for tcp keepalive parameters
This function configures tcp keepalive parameters if users wish to do
so.
:param tcp_keepalive: Boolean, turn on or off tcp_keepalive. If users are
not sure, this should be True, and default values will be used.
:param tcp_keepidle: time to wait before starting to send keepalive probes
:param tcp_keepalive_interval: time between successive probes, once the
initial wait time is over
:param tcp_keepalive_count: number of probes to send before the connection
is killed
"""
# NOTE(praneshp): Despite keepalive being a tcp concept, the level is
# still SOL_SOCKET. This is a quirk.
if isinstance(tcp_keepalive, bool):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, tcp_keepalive)
else:
raise TypeError("tcp_keepalive must be a boolean")
if not tcp_keepalive:
return
# These options aren't available in the OS X version of eventlet,
# Idle + Count * Interval effectively gives you the total timeout.
if tcp_keepidle is not None:
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE,
tcp_keepidle)
else:
LOG.warning(_LW('tcp_keepidle not available on your system'))
if tcp_keepalive_interval is not None:
if hasattr(socket, 'TCP_KEEPINTVL'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPINTVL,
tcp_keepalive_interval)
else:
LOG.warning(_LW('tcp_keepintvl not available on your system'))
if tcp_keepalive_count is not None:
if hasattr(socket, 'TCP_KEEPCNT'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPCNT,
tcp_keepalive_count)
else:
LOG.warning(_LW('tcp_keepknt not available on your system'))

@ -93,10 +93,11 @@ from heat.openstack.common import log as logging
policy_opts = [ policy_opts = [
cfg.StrOpt('policy_file', cfg.StrOpt('policy_file',
default='policy.json', default='policy.json',
help=_('JSON file containing policy')), help=_('The JSON file that defines policies.')),
cfg.StrOpt('policy_default_rule', cfg.StrOpt('policy_default_rule',
default='default', default='default',
help=_('Rule enforced when requested rule is not found')), help=_('Default rule. Enforced when a requested rule is not '
'found.')),
] ]
CONF = cfg.CONF CONF = cfg.CONF

@ -202,7 +202,7 @@ class ReplyProxy(ConnectionContext):
LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s'
', message : %(data)s'), {'msg_id': msg_id, ', message : %(data)s'), {'msg_id': msg_id,
'data': message_data}) 'data': message_data})
LOG.warn(_('_call_waiters: %s') % str(self._call_waiters)) LOG.warn(_('_call_waiters: %s') % self._call_waiters)
else: else:
waiter.put(message_data) waiter.put(message_data)

@ -549,7 +549,7 @@ class Connection(object):
raise raise
log_info = {} log_info = {}
log_info['err_str'] = str(e) log_info['err_str'] = e
log_info['max_retries'] = self.max_retries log_info['max_retries'] = self.max_retries
log_info.update(params) log_info.update(params)
@ -621,7 +621,7 @@ class Connection(object):
""" """
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': exc}
LOG.error(_LE("Failed to declare consumer for topic '%(topic)s': " LOG.error(_LE("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info) "%(err_str)s") % log_info)
@ -641,11 +641,11 @@ class Connection(object):
def _error_callback(exc): def _error_callback(exc):
if isinstance(exc, socket.timeout): if isinstance(exc, socket.timeout):
LOG.debug('Timed out waiting for RPC response: %s' % LOG.debug('Timed out waiting for RPC response: %s' %
str(exc)) exc)
raise rpc_common.Timeout() raise rpc_common.Timeout()
else: else:
LOG.exception(_LE('Failed to consume message from queue: %s') % LOG.exception(_LE('Failed to consume message from queue: %s') %
str(exc)) exc)
info['do_consume'] = True info['do_consume'] = True
def _consume(): def _consume():
@ -682,7 +682,7 @@ class Connection(object):
"""Send to a publisher based on the publisher class.""" """Send to a publisher based on the publisher class."""
def _error_callback(exc): def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': exc}
LOG.exception(_LE("Failed to publish message to topic " LOG.exception(_LE("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info) "'%(topic)s': %(err_str)s") % log_info)

@ -571,7 +571,7 @@ class Connection(object):
add it to our list of consumers add it to our list of consumers
""" """
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': exc}
LOG.error(_LE("Failed to declare consumer for topic '%(topic)s': " LOG.error(_LE("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info) "%(err_str)s") % log_info)
@ -588,11 +588,11 @@ class Connection(object):
def _error_callback(exc): def _error_callback(exc):
if isinstance(exc, qpid_exceptions.Empty): if isinstance(exc, qpid_exceptions.Empty):
LOG.debug('Timed out waiting for RPC response: %s' % LOG.debug('Timed out waiting for RPC response: %s' %
str(exc)) exc)
raise rpc_common.Timeout() raise rpc_common.Timeout()
else: else:
LOG.exception(_LE('Failed to consume message from queue: %s') % LOG.exception(_LE('Failed to consume message from queue: %s') %
str(exc)) exc)
def _consume(): def _consume():
nxt_receiver = self.session.next_receiver(timeout=timeout) nxt_receiver = self.session.next_receiver(timeout=timeout)
@ -625,7 +625,7 @@ class Connection(object):
"""Send to a publisher based on the publisher class.""" """Send to a publisher based on the publisher class."""
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': exc}
LOG.exception(_LE("Failed to publish message to topic " LOG.exception(_LE("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info) "'%(topic)s': %(err_str)s") % log_info)

@ -48,7 +48,7 @@ class RpcProxy(object):
basis. basis.
:param version_cap: Optionally cap the maximum version used for sent :param version_cap: Optionally cap the maximum version used for sent
messages. messages.
:param serializer: Optionaly (de-)serialize entities with a :param serializer: Optionally (de-)serialize entities with a
provided helper. provided helper.
""" """
self.topic = topic self.topic = topic

@ -78,7 +78,7 @@ def bool_from_string(subject, strict=False, default=False):
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
""" """
if not isinstance(subject, six.string_types): if not isinstance(subject, six.string_types):
subject = str(subject) subject = six.text_type(subject)
lowered = subject.strip().lower() lowered = subject.strip().lower()

@ -18,7 +18,7 @@
## ##
## DO NOT MODIFY THIS FILE ## DO NOT MODIFY THIS FILE
## ##
## This file is being graduated to the heattest library. Please make all ## This file is being graduated to the oslotest library. Please make all
## changes there, and only backport critical fixes here. - dhellmann ## changes there, and only backport critical fixes here. - dhellmann
## ##
############################################################################## ##############################################################################

@ -99,7 +99,7 @@ class ThreadGroup(object):
except Exception as ex: except Exception as ex:
LOG.exception(ex) LOG.exception(ex)
def _stop_timers(self): def stop_timers(self):
for x in self.timers: for x in self.timers:
try: try:
x.stop() x.stop()
@ -114,7 +114,7 @@ class ThreadGroup(object):
Never kill threads. Never kill threads.
* In case of graceful=False, kill threads immediately. * In case of graceful=False, kill threads immediately.
""" """
self._stop_timers() self.stop_timers()
if graceful: if graceful:
# In case of graceful=True, wait for all threads to be # In case of graceful=True, wait for all threads to be
# finished, never kill threads # finished, never kill threads