Sync with oslo-incubator

In particular, pick up the bug fix for memory cache
backend. Also includes some improved code comments
and config option help strings, and a few other
tweaks.

Change-Id: I10e592bbafadac22cded158f4083515b76a58fca
This commit is contained in:
kgriffs 2014-06-19 15:18:43 -05:00
parent 22584f484c
commit feff615796
6 changed files with 80 additions and 67 deletions

View File

@ -147,7 +147,7 @@ class MemoryBackend(backends.BaseCache):
try: try:
# NOTE(flaper87): Keys with ttl == 0 # NOTE(flaper87): Keys with ttl == 0
# don't exist in the _keys_expires dict # don't exist in the _keys_expires dict
self._keys_expires[value[0]].remove(value[1]) self._keys_expires[value[0]].remove(key)
except (KeyError, ValueError): except (KeyError, ValueError):
pass pass

View File

@ -26,9 +26,9 @@ class BaseCache(object):
:params parsed_url: Parsed url object. :params parsed_url: Parsed url object.
:params options: A dictionary with configuration parameters :params options: A dictionary with configuration parameters
for the cache. For example: for the cache. For example:
- default_ttl: An integer defining the default ttl
for keys. - default_ttl: An integer defining the default ttl for keys.
""" """
def __init__(self, parsed_url, options=None): def __init__(self, parsed_url, options=None):
@ -43,20 +43,17 @@ class BaseCache(object):
def set(self, key, value, ttl, not_exists=False): def set(self, key, value, ttl, not_exists=False):
"""Sets or updates a cache entry """Sets or updates a cache entry
NOTE: Thread-safety is required and has to be .. note:: Thread-safety is required and has to be guaranteed by the
guaranteed by the backend implementation. backend implementation.
:params key: Item key as string. :params key: Item key as string.
:type key: `unicode string` :type key: `unicode string`
:params value: Value to assign to the key. This :params value: Value to assign to the key. This can be anything that
can be anything that is handled is handled by the current backend.
by the current backend. :params ttl: Key's timeout in seconds. 0 means no timeout.
:params ttl: Key's timeout in seconds. 0 means
no timeout.
:type ttl: int :type ttl: int
:params not_exists: If True, the key will be set :params not_exists: If True, the key will be set if it doesn't exist.
if it doesn't exist. Otherwise, Otherwise, it'll always be set.
it'll always be set.
:type not_exists: bool :type not_exists: bool
:returns: True if the operation succeeds, False otherwise. :returns: True if the operation succeeds, False otherwise.
@ -74,9 +71,8 @@ class BaseCache(object):
:params key: Item key as string. :params key: Item key as string.
:type key: `unicode string` :type key: `unicode string`
:params value: Value to assign to the key. This :params value: Value to assign to the key. This can be anything that
can be anything that is handled is handled by the current backend.
by the current backend.
""" """
try: try:
return self[key] return self[key]
@ -91,15 +87,14 @@ class BaseCache(object):
def get(self, key, default=None): def get(self, key, default=None):
"""Gets one item from the cache """Gets one item from the cache
NOTE: Thread-safety is required and it has to be .. note:: Thread-safety is required and it has to be guaranteed
guaranteed by the backend implementation. by the backend implementation.
:params key: Key for the item to retrieve :params key: Key for the item to retrieve from the cache.
from the cache.
:params default: The default value to return. :params default: The default value to return.
:returns: `key`'s value in the cache if it exists, :returns: `key`'s value in the cache if it exists, otherwise
otherwise `default` should be returned. `default` should be returned.
""" """
return self._get(key, default) return self._get(key, default)
@ -115,8 +110,8 @@ class BaseCache(object):
def __delitem__(self, key): def __delitem__(self, key):
"""Removes an item from cache. """Removes an item from cache.
NOTE: Thread-safety is required and it has to be .. note:: Thread-safety is required and it has to be guaranteed by
guaranteed by the backend implementation. the backend implementation.
:params key: The key to remove. :params key: The key to remove.
@ -130,8 +125,8 @@ class BaseCache(object):
def clear(self): def clear(self):
"""Removes all items from the cache. """Removes all items from the cache.
NOTE: Thread-safety is required and it has to be .. note:: Thread-safety is required and it has to be guaranteed by
guaranteed by the backend implementation. the backend implementation.
""" """
return self._clear() return self._clear()
@ -143,9 +138,8 @@ class BaseCache(object):
"""Increments the value for a key """Increments the value for a key
:params key: The key for the value to be incremented :params key: The key for the value to be incremented
:params delta: Number of units by which to increment :params delta: Number of units by which to increment the value.
the value. Pass a negative number to Pass a negative number to decrement the value.
decrement the value.
:returns: The new value :returns: The new value
""" """
@ -158,10 +152,8 @@ class BaseCache(object):
def append_tail(self, key, tail): def append_tail(self, key, tail):
"""Appends `tail` to `key`'s value. """Appends `tail` to `key`'s value.
:params key: The key of the value to which :params key: The key of the value to which `tail` should be appended.
`tail` should be appended. :params tail: The list of values to append to the original.
:params tail: The list of values to append to the
original.
:returns: The new value :returns: The new value
""" """
@ -181,10 +173,8 @@ class BaseCache(object):
def append(self, key, value): def append(self, key, value):
"""Appends `value` to `key`'s value. """Appends `value` to `key`'s value.
:params key: The key of the value to which :params key: The key of the value to which `tail` should be appended.
`tail` should be appended. :params value: The value to append to the original.
:params value: The value to append to the
original.
:returns: The new value :returns: The new value
""" """
@ -196,8 +186,7 @@ class BaseCache(object):
:params key: The key to verify. :params key: The key to verify.
:returns: True if the key exists, :returns: True if the key exists, otherwise False.
otherwise False.
""" """
@abc.abstractmethod @abc.abstractmethod
@ -209,9 +198,8 @@ class BaseCache(object):
"""Gets keys' value from cache """Gets keys' value from cache
:params keys: List of keys to retrieve. :params keys: List of keys to retrieve.
:params default: The default value to return :params default: The default value to return for each key that is not
for each key that is not in in the cache.
the cache.
:returns: A generator of (key, value) :returns: A generator of (key, value)
""" """
@ -227,13 +215,12 @@ class BaseCache(object):
def set_many(self, data, ttl=None): def set_many(self, data, ttl=None):
"""Puts several items into the cache at once """Puts several items into the cache at once
Depending on the backend, this operation may or may Depending on the backend, this operation may or may not be efficient.
not be efficient. The default implementation calls The default implementation calls set for each (key, value) pair
set for each (key, value) pair passed, other backends passed, other backends support set_many operations as part of their
support set_many operations as part of their protocols. protocols.
:params data: A dictionary like {key: val} to store :params data: A dictionary like {key: val} to store in the cache.
in the cache.
:params ttl: Key's timeout in seconds. :params ttl: Key's timeout in seconds.
""" """

View File

@ -64,6 +64,10 @@ BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
WORDWRAP_WIDTH = 60 WORDWRAP_WIDTH = 60
def raise_extension_exception(extmanager, ep, err):
raise
def generate(argv): def generate(argv):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='generate sample configuration file', description='generate sample configuration file',
@ -107,6 +111,7 @@ def generate(argv):
'oslo.config.opts', 'oslo.config.opts',
names=list(set(parsed_args.libraries)), names=list(set(parsed_args.libraries)),
invoke_on_load=False, invoke_on_load=False,
on_load_failure_callback=raise_extension_exception
) )
for ext in loader: for ext in loader:
for group, opts in ext.plugin(): for group, opts in ext.plugin():
@ -145,7 +150,7 @@ def _import_module(mod_str):
def _is_in_group(opt, group): def _is_in_group(opt, group):
"Check if opt is in group." """Check if opt is in group."""
for value in group._opts.values(): for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until # NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator. # newly released oslo.config support '==' operator.
@ -154,7 +159,7 @@ def _is_in_group(opt, group):
return False return False
def _guess_groups(opt, mod_obj): def _guess_groups(opt):
# is it in the DEFAULT group? # is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF): if _is_in_group(opt, cfg.CONF):
return 'DEFAULT' return 'DEFAULT'
@ -188,7 +193,7 @@ def _list_opts(obj):
ret = {} ret = {}
for opt in opts: for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt) ret.setdefault(_guess_groups(opt), []).append(opt)
return ret.items() return ret.items()
@ -218,6 +223,8 @@ def _get_my_ip():
def _sanitize_default(name, value): def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host.""" """Set up a reasonably sensible default for pybasedir, my_ip and host."""
hostname = socket.gethostname()
fqdn = socket.getfqdn()
if value.startswith(sys.prefix): if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the # NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first # second part is an absolute pathname and therefore drop the first
@ -229,8 +236,13 @@ def _sanitize_default(name, value):
return value.replace(BASEDIR, '') return value.replace(BASEDIR, '')
elif value == _get_my_ip(): elif value == _get_my_ip():
return '10.0.0.1' return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: elif value in (hostname, fqdn):
return 'marconi' if 'host' in name:
return 'marconi'
elif value.endswith(hostname):
return value.replace(hostname, 'marconi')
elif value.endswith(fqdn):
return value.replace(fqdn, 'marconi')
elif value.strip() != value: elif value.strip() != value:
return '"%s"' % value return '"%s"' % value
return value return value
@ -241,7 +253,6 @@ def _print_opt(opt):
if not opt_help: if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = "" opt_help = ""
opt_type = None
try: try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0) opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err: except (ValueError, AttributeError) as err:

View File

@ -49,9 +49,22 @@ class save_and_reraise_exception(object):
decide_if_need_reraise() decide_if_need_reraise()
if not should_be_reraised: if not should_be_reraised:
ctxt.reraise = False ctxt.reraise = False
If another exception occurs and reraise flag is False,
the saved exception will not be logged.
If the caller wants to raise new exception during exception handling
he/she sets reraise to False initially with an ability to set it back to
True if needed::
except Exception:
with save_and_reraise_exception(reraise=False) as ctxt:
[if statements to determine whether to raise a new exception]
# Not raising a new exception, so reraise
ctxt.reraise = True
""" """
def __init__(self): def __init__(self, reraise=True):
self.reraise = True self.reraise = reraise
def __enter__(self): def __enter__(self):
self.type_, self.value, self.tb, = sys.exc_info() self.type_, self.value, self.tb, = sys.exc_info()
@ -59,10 +72,11 @@ class save_and_reraise_exception(object):
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None: if exc_type is not None:
logging.error(_LE('Original exception being dropped: %s'), if self.reraise:
traceback.format_exception(self.type_, logging.error(_LE('Original exception being dropped: %s'),
self.value, traceback.format_exception(self.type_,
self.tb)) self.value,
self.tb))
return False return False
if self.reraise: if self.reraise:
six.reraise(self.type_, self.value, self.tb) six.reraise(self.type_, self.value, self.tb)

View File

@ -99,13 +99,13 @@ def remove_path_on_error(path, remove=delete_if_exists):
def file_open(*args, **kwargs): def file_open(*args, **kwargs):
"""Open file """Open file
see built-in file() documentation for more details see built-in open() documentation for more details
Note: The reason this is kept in a separate module is to easily Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system be able to provide a stub module that doesn't alter system
state at all (for unit tests) state at all (for unit tests)
""" """
return file(*args, **kwargs) return open(*args, **kwargs)
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):

View File

@ -38,10 +38,10 @@ LOG = logging.getLogger(__name__)
util_opts = [ util_opts = [
cfg.BoolOpt('disable_process_locking', default=False, cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'), help='Enables or disables inter-process locks.'),
cfg.StrOpt('lock_path', cfg.StrOpt('lock_path',
default=os.environ.get("MARCONI_LOCK_PATH"), default=os.environ.get("MARCONI_LOCK_PATH"),
help=('Directory to use for lock files.')) help='Directory to use for lock files.')
] ]
@ -239,7 +239,7 @@ def external_lock(name, lock_file_prefix=None, lock_path=None):
def remove_external_lock_file(name, lock_file_prefix=None): def remove_external_lock_file(name, lock_file_prefix=None):
"""Remove a external lock file when it's not used anymore """Remove an external lock file when it's not used anymore
This will be helpful when we have a lot of lock files This will be helpful when we have a lot of lock files
""" """
with internal_lock(name): with internal_lock(name):
@ -276,7 +276,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
:param external: The external keyword argument denotes whether this lock :param external: The external keyword argument denotes whether this lock
should work across multiple processes. This means that if two different should work across multiple processes. This means that if two different
workers both run a a method decorated with @synchronized('mylock', workers both run a method decorated with @synchronized('mylock',
external=True), only one of them will execute at a time. external=True), only one of them will execute at a time.
""" """
int_lock = internal_lock(name) int_lock = internal_lock(name)
@ -287,6 +287,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
yield ext_lock yield ext_lock
else: else:
yield int_lock yield int_lock
LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):