diff --git a/mistral/actions/action_factory.py b/mistral/actions/action_factory.py index 438e744d6..5b6370e17 100644 --- a/mistral/actions/action_factory.py +++ b/mistral/actions/action_factory.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mistral.openstack.common import importutils +from oslo_utils import importutils def construct_action_class(action_class_str, attributes): diff --git a/mistral/config.py b/mistral/config.py index d7d9e17ae..ac1135585 100644 --- a/mistral/config.py +++ b/mistral/config.py @@ -115,12 +115,11 @@ CONF.register_cli_opts(CLI_OPTS) _DEFAULT_LOG_LEVELS = [ 'sqlalchemy=WARN', - 'oslo.messaging=INFO', + 'oslo_messaging=INFO', 'iso8601=WARN', 'eventlet.wsgi.server=WARN', 'stevedore=INFO', - 'mistral.openstack.common.loopingcall=INFO', - 'mistral.openstack.common.periodic_task=INFO', + 'oslo_service.periodic_task=INFO', 'mistral.services.periodic=INFO' ] diff --git a/mistral/context.py b/mistral/context.py index e1d3317df..57b6001bc 100644 --- a/mistral/context.py +++ b/mistral/context.py @@ -18,10 +18,10 @@ import eventlet from keystoneclient.v3 import client as keystone_client from oslo_config import cfg import oslo_messaging as messaging +from oslo_serialization import jsonutils from pecan import hooks from mistral import exceptions as exc -from mistral.openstack.common import jsonutils from mistral import utils diff --git a/mistral/db/sqlalchemy/migration/cli.py b/mistral/db/sqlalchemy/migration/cli.py index 25399b7c0..2df9a0720 100644 --- a/mistral/db/sqlalchemy/migration/cli.py +++ b/mistral/db/sqlalchemy/migration/cli.py @@ -19,9 +19,9 @@ from alembic import command as alembic_cmd from alembic import config as alembic_cfg from alembic import util as alembic_u from oslo_config import cfg +from oslo_utils import importutils import six -from mistral.openstack.common import importutils from mistral.services import action_manager from mistral.services import workflows diff --git a/mistral/db/sqlalchemy/types.py b/mistral/db/sqlalchemy/types.py index 154398494..d61d36f07 100644 --- a/mistral/db/sqlalchemy/types.py +++ b/mistral/db/sqlalchemy/types.py @@ -18,12 +18,11 @@ # expressed by json-strings # +from oslo_serialization import jsonutils import sqlalchemy as sa from sqlalchemy.dialects import mysql from sqlalchemy.ext import mutable -from mistral.openstack.common import jsonutils - class JsonEncoded(sa.TypeDecorator): """Represents an immutable structure as a json-encoded string.""" diff --git a/mistral/openstack/__init__.py b/mistral/openstack/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/mistral/openstack/common/__init__.py b/mistral/openstack/common/__init__.py deleted file mode 100644 index d1223eaf7..000000000 --- a/mistral/openstack/common/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six - - -six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) diff --git a/mistral/openstack/common/config/__init__.py b/mistral/openstack/common/config/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/mistral/openstack/common/config/generator.py b/mistral/openstack/common/config/generator.py deleted file mode 100644 index 32f6bdd36..000000000 --- a/mistral/openstack/common/config/generator.py +++ /dev/null @@ -1,314 +0,0 @@ -# Copyright 2012 SINA Corporation -# Copyright 2014 Cisco Systems, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -"""Extracts OpenStack config option info from module(s).""" - -from __future__ import print_function - -import argparse -import imp -import os -import re -import socket -import sys -import textwrap - -from oslo_config import cfg -import six -import stevedore.named - -from mistral.openstack.common import gettextutils -from mistral.openstack.common import importutils - -gettextutils.install('mistral') - -STROPT = "StrOpt" -BOOLOPT = "BoolOpt" -INTOPT = "IntOpt" -FLOATOPT = "FloatOpt" -LISTOPT = "ListOpt" -DICTOPT = "DictOpt" -MULTISTROPT = "MultiStrOpt" - -OPT_TYPES = { - STROPT: 'string value', - BOOLOPT: 'boolean value', - INTOPT: 'integer value', - FLOATOPT: 'floating point value', - LISTOPT: 'list value', - DICTOPT: 'dict value', - MULTISTROPT: 'multi valued', -} - -OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, - FLOATOPT, LISTOPT, DICTOPT, - MULTISTROPT])) - -PY_EXT = ".py" -BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), - "../../../../")) -WORDWRAP_WIDTH = 60 - - -def raise_extension_exception(extmanager, ep, err): - raise - - -def generate(argv): - parser = argparse.ArgumentParser( - description='generate sample configuration file', - ) - parser.add_argument('-m', dest='modules', action='append') - parser.add_argument('-l', dest='libraries', action='append') - parser.add_argument('srcfiles', nargs='*') - parsed_args = parser.parse_args(argv) - - mods_by_pkg = dict() - for filepath in parsed_args.srcfiles: - pkg_name = filepath.split(os.sep)[1] - mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), - os.path.basename(filepath).split('.')[0]]) - mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) - # NOTE(lzyeval): place top level modules before packages - pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) - ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) - pkg_names.extend(ext_names) - - # opts_by_group is a mapping of group name to an options list - # The options list is a list of (module, options) tuples - opts_by_group = {'DEFAULT': []} - - if parsed_args.modules: - for module_name in parsed_args.modules: - module = _import_module(module_name) - if module: - for group, opts in _list_opts(module): - opts_by_group.setdefault(group, []).append((module_name, - opts)) - - # Look for entry points defined in libraries (or applications) for - # option discovery, and include their return values in the output. - # - # Each entry point should be a function returning an iterable - # of pairs with the group name (or None for the default group) - # and the list of Opt instances for that group. - if parsed_args.libraries: - loader = stevedore.named.NamedExtensionManager( - 'oslo.config.opts', - names=list(set(parsed_args.libraries)), - invoke_on_load=False, - on_load_failure_callback=raise_extension_exception - ) - for ext in loader: - for group, opts in ext.plugin(): - opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) - opt_list.append((ext.name, opts)) - - for pkg_name in pkg_names: - mods = mods_by_pkg.get(pkg_name) - mods.sort() - for mod_str in mods: - if mod_str.endswith('.__init__'): - mod_str = mod_str[:mod_str.rfind(".")] - - mod_obj = _import_module(mod_str) - if not mod_obj: - raise RuntimeError("Unable to import module %s" % mod_str) - - for group, opts in _list_opts(mod_obj): - opts_by_group.setdefault(group, []).append((mod_str, opts)) - - print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) - for group in sorted(opts_by_group.keys()): - print_group_opts(group, opts_by_group[group]) - - -def _import_module(mod_str): - try: - if mod_str.startswith('bin.'): - imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) - return sys.modules[mod_str[4:]] - else: - return importutils.import_module(mod_str) - except Exception as e: - sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) - return None - - -def _is_in_group(opt, group): - "Check if opt is in group." - for value in group._opts.values(): - # NOTE(llu): Temporary workaround for bug #1262148, wait until - # newly released oslo.config support '==' operator. - if not(value['opt'] != opt): - return True - return False - - -def _guess_groups(opt, mod_obj): - # is it in the DEFAULT group? - if _is_in_group(opt, cfg.CONF): - return 'DEFAULT' - - # what other groups is it in? - for value in cfg.CONF.values(): - if isinstance(value, cfg.CONF.GroupAttr): - if _is_in_group(opt, value._group): - return value._group.name - - raise RuntimeError( - "Unable to find group for option %s, " - "maybe it's defined twice in the same group?" - % opt.name - ) - - -def _list_opts(obj): - def is_opt(o): - return (isinstance(o, cfg.Opt) and - not isinstance(o, cfg.SubCommandOpt)) - - opts = list() - for attr_str in dir(obj): - attr_obj = getattr(obj, attr_str) - if is_opt(attr_obj): - opts.append(attr_obj) - elif (isinstance(attr_obj, list) and - all(map(lambda x: is_opt(x), attr_obj))): - opts.extend(attr_obj) - - ret = {} - for opt in opts: - ret.setdefault(_guess_groups(opt, obj), []).append(opt) - return ret.items() - - -def print_group_opts(group, opts_by_module): - print("[%s]" % group) - print('') - for mod, opts in opts_by_module: - print('#') - print('# Options defined in %s' % mod) - print('#') - print('') - for opt in opts: - _print_opt(opt) - print('') - - -def _get_my_ip(): - try: - csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - csock.connect(('8.8.8.8', 80)) - (addr, port) = csock.getsockname() - csock.close() - return addr - except socket.error: - return None - - -def _sanitize_default(name, value): - """Set up a reasonably sensible default for pybasedir, my_ip and host.""" - hostname = socket.gethostname() - fqdn = socket.getfqdn() - if value.startswith(sys.prefix): - # NOTE(jd) Don't use os.path.join, because it is likely to think the - # second part is an absolute pathname and therefore drop the first - # part. - value = os.path.normpath("/usr/" + value[len(sys.prefix):]) - elif value.startswith(BASEDIR): - return value.replace(BASEDIR, '/usr/lib/python/site-packages') - elif BASEDIR in value: - return value.replace(BASEDIR, '') - elif value == _get_my_ip(): - return '10.0.0.1' - elif value in (hostname, fqdn): - if 'host' in name: - return 'mistral' - elif value.endswith(hostname): - return value.replace(hostname, 'mistral') - elif value.endswith(fqdn): - return value.replace(fqdn, 'mistral') - elif value.strip() != value: - return '"%s"' % value - return value - - -def _print_opt(opt): - opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help - if not opt_help: - sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) - opt_help = "" - opt_type = None - try: - opt_type = OPTION_REGEX.search(str(type(opt))).group(0) - except (ValueError, AttributeError) as err: - sys.stderr.write("%s\n" % str(err)) - sys.exit(1) - opt_help = u'%s (%s)' % (opt_help, - OPT_TYPES[opt_type]) - print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) - if opt.deprecated_opts: - for deprecated_opt in opt.deprecated_opts: - if deprecated_opt.name: - deprecated_group = (deprecated_opt.group if - deprecated_opt.group else "DEFAULT") - print('# Deprecated group/name - [%s]/%s' % - (deprecated_group, - deprecated_opt.name)) - try: - if opt_default is None: - print('#%s=' % opt_name) - elif opt_type == STROPT: - assert(isinstance(opt_default, six.string_types)) - print('#%s=%s' % (opt_name, _sanitize_default(opt_name, - opt_default))) - elif opt_type == BOOLOPT: - assert(isinstance(opt_default, bool)) - print('#%s=%s' % (opt_name, str(opt_default).lower())) - elif opt_type == INTOPT: - assert(isinstance(opt_default, int) and - not isinstance(opt_default, bool)) - print('#%s=%s' % (opt_name, opt_default)) - elif opt_type == FLOATOPT: - assert(isinstance(opt_default, float)) - print('#%s=%s' % (opt_name, opt_default)) - elif opt_type == LISTOPT: - assert(isinstance(opt_default, list)) - print('#%s=%s' % (opt_name, ','.join(opt_default))) - elif opt_type == DICTOPT: - assert(isinstance(opt_default, dict)) - opt_default_strlist = [str(key) + ':' + str(value) - for (key, value) in opt_default.items()] - print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) - elif opt_type == MULTISTROPT: - assert(isinstance(opt_default, list)) - if not opt_default: - opt_default = [''] - for default in opt_default: - print('#%s=%s' % (opt_name, default)) - print('') - except Exception: - sys.stderr.write('Error in option "%s"\n' % opt_name) - sys.exit(1) - - -def main(): - generate(sys.argv[1:]) - -if __name__ == '__main__': - main() diff --git a/mistral/openstack/common/context.py b/mistral/openstack/common/context.py deleted file mode 100644 index 3eeb445e4..000000000 --- a/mistral/openstack/common/context.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Simple class that stores security context information in the web request. - -Projects should subclass this class if they wish to enhance the request -context or provide additional information in their specific WSGI pipeline. -""" - -import itertools -import uuid - - -def generate_request_id(): - return b'req-' + str(uuid.uuid4()).encode('ascii') - - -class RequestContext(object): - - """Helper class to represent useful information about a request context. - - Stores information about the security context under which the user - accesses the system, as well as additional request information. - """ - - user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}' - - def __init__(self, auth_token=None, user=None, tenant=None, domain=None, - user_domain=None, project_domain=None, is_admin=False, - read_only=False, show_deleted=False, request_id=None, - instance_uuid=None): - self.auth_token = auth_token - self.user = user - self.tenant = tenant - self.domain = domain - self.user_domain = user_domain - self.project_domain = project_domain - self.is_admin = is_admin - self.read_only = read_only - self.show_deleted = show_deleted - self.instance_uuid = instance_uuid - if not request_id: - request_id = generate_request_id() - self.request_id = request_id - - def to_dict(self): - user_idt = ( - self.user_idt_format.format(user=self.user or '-', - tenant=self.tenant or '-', - domain=self.domain or '-', - user_domain=self.user_domain or '-', - p_domain=self.project_domain or '-')) - - return {'user': self.user, - 'tenant': self.tenant, - 'domain': self.domain, - 'user_domain': self.user_domain, - 'project_domain': self.project_domain, - 'is_admin': self.is_admin, - 'read_only': self.read_only, - 'show_deleted': self.show_deleted, - 'auth_token': self.auth_token, - 'request_id': self.request_id, - 'instance_uuid': self.instance_uuid, - 'user_identity': user_idt} - - -def get_admin_context(show_deleted=False): - context = RequestContext(None, - tenant=None, - is_admin=True, - show_deleted=show_deleted) - return context - - -def get_context_from_function_and_args(function, args, kwargs): - """Find an arg of type RequestContext and return it. - - This is useful in a couple of decorators where we don't - know much about the function we're wrapping. - """ - - for arg in itertools.chain(kwargs.values(), args): - if isinstance(arg, RequestContext): - return arg - - return None - - -def is_user_context(context): - """Indicates if the request context is a normal user.""" - if not context: - return False - if context.is_admin: - return False - if not context.user_id or not context.project_id: - return False - return True diff --git a/mistral/openstack/common/excutils.py b/mistral/openstack/common/excutils.py deleted file mode 100644 index ab654a531..000000000 --- a/mistral/openstack/common/excutils.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from mistral.openstack.common.gettextutils import _LE - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example:: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - - If another exception occurs and reraise flag is False, - the saved exception will not be logged. - - If the caller wants to raise new exception during exception handling - he/she sets reraise to False initially with an ability to set it back to - True if needed:: - - except Exception: - with save_and_reraise_exception(reraise=False) as ctxt: - [if statements to determine whether to raise a new exception] - # Not raising a new exception, so reraise - ctxt.reraise = True - """ - def __init__(self, reraise=True): - self.reraise = reraise - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - if self.reraise: - logging.error(_LE('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = six.u(str(exc)) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _LE('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/mistral/openstack/common/fileutils.py b/mistral/openstack/common/fileutils.py deleted file mode 100644 index f54fc620c..000000000 --- a/mistral/openstack/common/fileutils.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import errno -import logging -import os -import tempfile - -from mistral.openstack.common import excutils - -LOG = logging.getLogger(__name__) - -_FILE_CACHE = {} - - -def ensure_tree(path): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise - - -def read_cached_file(filename, force_reload=False): - """Read from a file if it has been modified. - - :param force_reload: Whether to reload the file. - :returns: A tuple with a boolean specifying if the data is fresh - or not. - """ - global _FILE_CACHE - - if force_reload and filename in _FILE_CACHE: - del _FILE_CACHE[filename] - - reloaded = False - mtime = os.path.getmtime(filename) - cache_info = _FILE_CACHE.setdefault(filename, {}) - - if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug("Reloading cached file %s" % filename) - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - reloaded = True - return (reloaded, cache_info['data']) - - -def delete_if_exists(path, remove=os.unlink): - """Delete a file, but ignore file not found error. - - :param path: File to delete - :param remove: Optional function to remove passed path - """ - - try: - remove(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - -@contextlib.contextmanager -def remove_path_on_error(path, remove=delete_if_exists): - """Protect code that wants to operate on PATH atomically. - Any exception will cause PATH to be removed. - - :param path: File to work with - :param remove: Optional function to remove passed path - """ - - try: - yield - except Exception: - with excutils.save_and_reraise_exception(): - remove(path) - - -def file_open(*args, **kwargs): - """Open file - - see built-in file() documentation for more details - - Note: The reason this is kept in a separate module is to easily - be able to provide a stub module that doesn't alter system - state at all (for unit tests) - """ - return open(*args, **kwargs) - - -def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): - """Create temporary file or use existing file. - - This util is needed for creating temporary file with - specified content, suffix and prefix. If path is not None, - it will be used for writing content. If the path doesn't - exist it'll be created. - - :param content: content for temporary file. - :param path: same as parameter 'dir' for mkstemp - :param suffix: same as parameter 'suffix' for mkstemp - :param prefix: same as parameter 'prefix' for mkstemp - - For example: it can be used in database tests for creating - configuration files. - """ - if path: - ensure_tree(path) - - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) - try: - os.write(fd, content) - finally: - os.close(fd) - return path diff --git a/mistral/openstack/common/gettextutils.py b/mistral/openstack/common/gettextutils.py deleted file mode 100644 index adf95408d..000000000 --- a/mistral/openstack/common/gettextutils.py +++ /dev/null @@ -1,498 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from mistral.openstack.common.gettextutils import _ -""" - -import copy -import functools -import gettext -import locale -from logging import handlers -import os - -from babel import localedata -import six - -_AVAILABLE_LANGUAGES = {} - -# FIXME(dhellmann): Remove this when moving to oslo.i18n. -USE_LAZY = False - - -class TranslatorFactory(object): - """Create translator functions - """ - - def __init__(self, domain, lazy=False, localedir=None): - """Establish a set of translation functions for the domain. - - :param domain: Name of translation domain, - specifying a message catalog. - :type domain: str - :param lazy: Delays translation until a message is emitted. - Defaults to False. - :type lazy: Boolean - :param localedir: Directory with translation catalogs. - :type localedir: str - """ - self.domain = domain - self.lazy = lazy - if localedir is None: - localedir = os.environ.get(domain.upper() + '_LOCALEDIR') - self.localedir = localedir - - def _make_translation_func(self, domain=None): - """Return a new translation function ready for use. - - Takes into account whether or not lazy translation is being - done. - - The domain can be specified to override the default from the - factory, but the localedir from the factory is always used - because we assume the log-level translation catalogs are - installed in the same directory as the main application - catalog. - - """ - if domain is None: - domain = self.domain - if self.lazy: - return functools.partial(Message, domain=domain) - t = gettext.translation( - domain, - localedir=self.localedir, - fallback=True, - ) - if six.PY3: - return t.gettext - return t.ugettext - - @property - def primary(self): - "The default translation function." - return self._make_translation_func() - - def _make_log_translation_func(self, level): - return self._make_translation_func(self.domain + '-log-' + level) - - @property - def log_info(self): - "Translate info-level log messages." - return self._make_log_translation_func('info') - - @property - def log_warning(self): - "Translate warning-level log messages." - return self._make_log_translation_func('warning') - - @property - def log_error(self): - "Translate error-level log messages." - return self._make_log_translation_func('error') - - @property - def log_critical(self): - "Translate critical-level log messages." - return self._make_log_translation_func('critical') - - -# NOTE(dhellmann): When this module moves out of the incubator into -# oslo.i18n, these global variables can be moved to an integration -# module within each application. - -# Create the global translation functions. -_translators = TranslatorFactory('mistral') - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = _translators.log_info -_LW = _translators.log_warning -_LE = _translators.log_error -_LC = _translators.log_critical - -# NOTE(dhellmann): End of globals that will move to the application's -# integration module. - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - # FIXME(dhellmann): This function will be removed in oslo.i18n, - # because the TranslatorFactory makes it superfluous. - global _, _LI, _LW, _LE, _LC, USE_LAZY - tf = TranslatorFactory('mistral', lazy=True) - _ = tf.primary - _LI = tf.log_info - _LW = tf.log_warning - _LE = tf.log_error - _LC = tf.log_critical - USE_LAZY = True - - -def install(domain, lazy=False): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - :param domain: the translation domain - :param lazy: indicates whether or not to install the lazy _() function. - The lazy _() introduces a way to do deferred translation - of messages by installing a _ that builds Message objects, - instead of strings, which can then be lazily translated into - any available locale. - """ - if lazy: - from six import moves - tf = TranslatorFactory(domain, lazy=True) - moves.builtins.__dict__['_'] = tf.primary - else: - localedir = '%s_LOCALEDIR' % domain.upper() - if six.PY3: - gettext.install(domain, - localedir=os.environ.get(localedir)) - else: - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) - - -class Message(six.text_type): - """A Message object is a unicode object that can be translated. - - Translation of Message is done explicitly using the translate() method. - For all non-translation intents and purposes, a Message is simply unicode, - and can be treated as such. - """ - - def __new__(cls, msgid, msgtext=None, params=None, - domain='mistral', *args): - """Create a new Message object. - - In order for translation to work gettext requires a message ID, this - msgid will be used as the base unicode text. It is also possible - for the msgid and the base unicode text to be different by passing - the msgtext parameter. - """ - # If the base msgtext is not given, we use the default translation - # of the msgid (which is in English) just in case the system locale is - # not English, so that the base text will be in that locale by default. - if not msgtext: - msgtext = Message._translate_msgid(msgid, domain) - # We want to initialize the parent unicode with the actual object that - # would have been plain unicode if 'Message' was not enabled. - msg = super(Message, cls).__new__(cls, msgtext) - msg.msgid = msgid - msg.domain = domain - msg.params = params - return msg - - def translate(self, desired_locale=None): - """Translate this message to the desired locale. - - :param desired_locale: The desired locale to translate the message to, - if no locale is provided the message will be - translated to the system's default locale. - - :returns: the translated message in unicode - """ - - translated_message = Message._translate_msgid(self.msgid, - self.domain, - desired_locale) - if self.params is None: - # No need for more translation - return translated_message - - # This Message object may have been formatted with one or more - # Message objects as substitution arguments, given either as a single - # argument, part of a tuple, or as one or more values in a dictionary. - # When translating this Message we need to translate those Messages too - translated_params = _translate_args(self.params, desired_locale) - - translated_message = translated_message % translated_params - - return translated_message - - @staticmethod - def _translate_msgid(msgid, domain, desired_locale=None): - if not desired_locale: - system_locale = locale.getdefaultlocale() - # If the system locale is not available to the runtime use English - if not system_locale[0]: - desired_locale = 'en_US' - else: - desired_locale = system_locale[0] - - locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') - lang = gettext.translation(domain, - localedir=locale_dir, - languages=[desired_locale], - fallback=True) - if six.PY3: - translator = lang.gettext - else: - translator = lang.ugettext - - translated_message = translator(msgid) - return translated_message - - def __mod__(self, other): - # When we mod a Message we want the actual operation to be performed - # by the parent class (i.e. unicode()), the only thing we do here is - # save the original msgid and the parameters in case of a translation - params = self._sanitize_mod_params(other) - unicode_mod = super(Message, self).__mod__(params) - modded = Message(self.msgid, - msgtext=unicode_mod, - params=params, - domain=self.domain) - return modded - - def _sanitize_mod_params(self, other): - """Sanitize the object being modded with this Message. - - - Add support for modding 'None' so translation supports it - - Trim the modded object, which can be a large dictionary, to only - those keys that would actually be used in a translation - - Snapshot the object being modded, in case the message is - translated, it will be used as it was when the Message was created - """ - if other is None: - params = (other,) - elif isinstance(other, dict): - # Merge the dictionaries - # Copy each item in case one does not support deep copy. - params = {} - if isinstance(self.params, dict): - for key, val in self.params.items(): - params[key] = self._copy_param(val) - for key, val in other.items(): - params[key] = self._copy_param(val) - else: - params = self._copy_param(other) - return params - - def _copy_param(self, param): - try: - return copy.deepcopy(param) - except Exception: - # Fallback to casting to unicode this will handle the - # python code-like objects that can't be deep-copied - return six.text_type(param) - - def __add__(self, other): - msg = _('Message objects do not support addition.') - raise TypeError(msg) - - def __radd__(self, other): - return self.__add__(other) - - if six.PY2: - def __str__(self): - # NOTE(luisg): Logging in python 2.6 tries to str() log records, - # and it expects specifically a UnicodeError in order to proceed. - msg = _('Message objects do not support str() because they may ' - 'contain non-ascii characters. ' - 'Please use unicode() or translate() instead.') - raise UnicodeError(msg) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - - # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported - # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they - # are perfectly legitimate locales: - # https://github.com/mitsuhiko/babel/issues/37 - # In Babel 1.3 they fixed the bug and they support these locales, but - # they are still not explicitly "listed" by locale_identifiers(). - # That is why we add the locales here explicitly if necessary so that - # they are listed as supported. - aliases = {'zh': 'zh_CN', - 'zh_Hant_HK': 'zh_HK', - 'zh_Hant': 'zh_TW', - 'fil': 'tl_PH'} - for (locale, alias) in six.iteritems(aliases): - if locale in language_list and alias not in language_list: - language_list.append(alias) - - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def translate(obj, desired_locale=None): - """Gets the translated unicode representation of the given object. - - If the object is not translatable it is returned as-is. - If the locale is None the object is translated to the system locale. - - :param obj: the object to translate - :param desired_locale: the locale to translate the message to, if None the - default system locale will be used - :returns: the translated object in unicode, or the original object if - it could not be translated - """ - message = obj - if not isinstance(message, Message): - # If the object to translate is not already translatable, - # let's first get its unicode representation - message = six.text_type(obj) - if isinstance(message, Message): - # Even after unicoding() we still need to check if we are - # running with translatable unicode before translating - return message.translate(desired_locale) - return obj - - -def _translate_args(args, desired_locale=None): - """Translates all the translatable elements of the given arguments object. - - This method is used for translating the translatable values in method - arguments which include values of tuples or dictionaries. - If the object is not a tuple or a dictionary the object itself is - translated if it is translatable. - - If the locale is None the object is translated to the system locale. - - :param args: the args to translate - :param desired_locale: the locale to translate the args to, if None the - default system locale will be used - :returns: a new args object with the translated contents of the original - """ - if isinstance(args, tuple): - return tuple(translate(v, desired_locale) for v in args) - if isinstance(args, dict): - translated_dict = {} - for (k, v) in six.iteritems(args): - translated_v = translate(v, desired_locale) - translated_dict[k] = translated_v - return translated_dict - return translate(args, desired_locale) - - -class TranslationHandler(handlers.MemoryHandler): - """Handler that translates records before logging them. - - The TranslationHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating them. This handler - depends on Message objects being logged, instead of regular strings. - - The handler can be configured declaratively in the logging.conf as follows: - - [handlers] - keys = translatedlog, translator - - [handler_translatedlog] - class = handlers.WatchedFileHandler - args = ('/var/log/api-localized.log',) - formatter = context - - [handler_translator] - class = openstack.common.log.TranslationHandler - target = translatedlog - args = ('zh_CN',) - - If the specified locale is not available in the system, the handler will - log in the default locale. - """ - - def __init__(self, locale=None, target=None): - """Initialize a TranslationHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - # NOTE(luisg): In order to allow this handler to be a wrapper for - # other handlers, such as a FileHandler, and still be able to - # configure it using logging.conf, this handler has to extend - # MemoryHandler because only the MemoryHandlers' logging.conf - # parsing is implemented such that it accepts a target handler. - handlers.MemoryHandler.__init__(self, capacity=0, target=target) - self.locale = locale - - def setFormatter(self, fmt): - self.target.setFormatter(fmt) - - def emit(self, record): - # We save the message from the original record to restore it - # after translation, so other handlers are not affected by this - original_msg = record.msg - original_args = record.args - - try: - self._translate_and_log_record(record) - finally: - record.msg = original_msg - record.args = original_args - - def _translate_and_log_record(self, record): - record.msg = translate(record.msg, self.locale) - - # In addition to translating the message, we also need to translate - # arguments that were passed to the log method that were not part - # of the main message e.g., log.info(_('Some message %s'), this_one)) - record.args = _translate_args(record.args, self.locale) - - self.target.emit(record) diff --git a/mistral/openstack/common/importutils.py b/mistral/openstack/common/importutils.py deleted file mode 100644 index 97bdd9ec1..000000000 --- a/mistral/openstack/common/importutils.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - __import__(mod_str) - try: - return getattr(sys.modules[mod_str], class_str) - except AttributeError: - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def import_versioned_module(version, submodule=None): - module = 'mistral.v%s' % version - if submodule: - module = '.'.join((module, submodule)) - return import_module(module) - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/mistral/openstack/common/jsonutils.py b/mistral/openstack/common/jsonutils.py deleted file mode 100644 index 91c3cacf3..000000000 --- a/mistral/openstack/common/jsonutils.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -''' -JSON related utilities. - -This module provides a few things: - - 1) A handy function for getting an object down to something that can be - JSON serialized. See to_primitive(). - - 2) Wrappers around loads() and dumps(). The dumps() wrapper will - automatically use to_primitive() for you if needed. - - 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson - is available. -''' - - -import codecs -import datetime -import functools -import inspect -import itertools -import sys - -if sys.version_info < (2, 7): - # On Python <= 2.6, json module is not C boosted, so try to use - # simplejson module if available - try: - import simplejson as json - except ImportError: - import json -else: - import json - -import six -import six.moves.xmlrpc_client as xmlrpclib - -from mistral.openstack.common import gettextutils -from mistral.openstack.common import importutils -from mistral.openstack.common import strutils -from mistral.openstack.common import timeutils - -netaddr = importutils.try_import("netaddr") - -_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - -_simple_types = (six.string_types + six.integer_types - + (type(None), bool, float)) - - -def to_primitive(value, convert_instances=False, convert_datetime=True, - level=0, max_depth=3): - """Convert a complex object into primitives. - - Handy for JSON serialization. We can optionally handle instances, - but since this is a recursive function, we could have cyclical - data structures. - - To handle cyclical data structures we could track the actual objects - visited in a set, but not all objects are hashable. Instead we just - track the depth of the object inspections and don't go too deep. - - Therefore, convert_instances=True is lossy ... be aware. - - """ - # handle obvious types first - order of basic types determined by running - # full tests on nova project, resulting in the following counts: - # 572754 - # 460353 - # 379632 - # 274610 - # 199918 - # 114200 - # 51817 - # 26164 - # 6491 - # 283 - # 19 - if isinstance(value, _simple_types): - return value - - if isinstance(value, datetime.datetime): - if convert_datetime: - return timeutils.strtime(value) - else: - return value - - # value of itertools.count doesn't get caught by nasty_type_tests - # and results in infinite loop when list(value) is called. - if type(value) == itertools.count: - return six.text_type(value) - - # FIXME(vish): Workaround for LP bug 852095. Without this workaround, - # tests that raise an exception in a mocked method that - # has a @wrap_exception with a notifier will fail. If - # we up the dependency to 0.5.4 (when it is released) we - # can remove this workaround. - if getattr(value, '__module__', None) == 'mox': - return 'mock' - - if level > max_depth: - return '?' - - # The try block may not be necessary after the class check above, - # but just in case ... - try: - recursive = functools.partial(to_primitive, - convert_instances=convert_instances, - convert_datetime=convert_datetime, - level=level, - max_depth=max_depth) - if isinstance(value, dict): - return dict((k, recursive(v)) for k, v in six.iteritems(value)) - elif isinstance(value, (list, tuple)): - return [recursive(lv) for lv in value] - - # It's not clear why xmlrpclib created their own DateTime type, but - # for our purposes, make it a datetime type which is explicitly - # handled - if isinstance(value, xmlrpclib.DateTime): - value = datetime.datetime(*tuple(value.timetuple())[:6]) - - if convert_datetime and isinstance(value, datetime.datetime): - return timeutils.strtime(value) - elif isinstance(value, gettextutils.Message): - return value.data - elif hasattr(value, 'iteritems'): - return recursive(dict(value.iteritems()), level=level + 1) - elif hasattr(value, '__iter__'): - return recursive(list(value)) - elif convert_instances and hasattr(value, '__dict__'): - # Likely an instance of something. Watch for cycles. - # Ignore class member vars. - return recursive(value.__dict__, level=level + 1) - elif netaddr and isinstance(value, netaddr.IPAddress): - return six.text_type(value) - else: - if any(test(value) for test in _nasty_type_tests): - return six.text_type(value) - return value - except TypeError: - # Class objects are tricky since they may define something like - # __iter__ defined but it isn't callable as list(). - return six.text_type(value) - - -def dumps(value, default=to_primitive, **kwargs): - return json.dumps(value, default=default, **kwargs) - - -def loads(s, encoding='utf-8'): - return json.loads(strutils.safe_decode(s, encoding)) - - -def load(fp, encoding='utf-8'): - return json.load(codecs.getreader(encoding)(fp)) - - -try: - import anyjson -except ImportError: - pass -else: - anyjson._modules.append((__name__, 'dumps', TypeError, - 'loads', ValueError, 'load')) - anyjson.force_implementation(__name__) diff --git a/mistral/openstack/common/local.py b/mistral/openstack/common/local.py deleted file mode 100644 index 0819d5b97..000000000 --- a/mistral/openstack/common/local.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Local storage of variables using weak references""" - -import threading -import weakref - - -class WeakLocal(threading.local): - def __getattribute__(self, attr): - rval = super(WeakLocal, self).__getattribute__(attr) - if rval: - # NOTE(mikal): this bit is confusing. What is stored is a weak - # reference, not the value itself. We therefore need to lookup - # the weak reference and return the inner value here. - rval = rval() - return rval - - def __setattr__(self, attr, value): - value = weakref.ref(value) - return super(WeakLocal, self).__setattr__(attr, value) - - -# NOTE(mikal): the name "store" should be deprecated in the future -store = WeakLocal() - -# A "weak" store uses weak references and allows an object to fall out of scope -# when it falls out of scope in the code that uses the thread local storage. A -# "strong" store will hold a reference to the object so that it never falls out -# of scope. -weak_store = WeakLocal() -strong_store = threading.local() diff --git a/mistral/openstack/common/lockutils.py b/mistral/openstack/common/lockutils.py deleted file mode 100644 index dacfc16da..000000000 --- a/mistral/openstack/common/lockutils.py +++ /dev/null @@ -1,378 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import errno -import fcntl -import functools -import logging -import os -import shutil -import subprocess -import sys -import tempfile -import threading -import time -import weakref - -from oslo_config import cfg - -from mistral.openstack.common import fileutils -from mistral.openstack.common.gettextutils import _, _LE, _LI - - -LOG = logging.getLogger(__name__) - - -util_opts = [ - cfg.BoolOpt('disable_process_locking', default=False, - help='Enables or disables inter-process locks.'), - cfg.StrOpt('lock_path', - default=os.environ.get("MISTRAL_LOCK_PATH"), - help='Directory to use for lock files.') -] - - -CONF = cfg.CONF -CONF.register_opts(util_opts) - - -def set_defaults(lock_path): - cfg.set_defaults(util_opts, lock_path=lock_path) - - -class _FileLock(object): - """Lock implementation which allows multiple locks, working around - issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does - not require any cleanup. Since the lock is always held on a file - descriptor rather than outside of the process, the lock gets dropped - automatically if the process crashes, even if __exit__ is not executed. - - There are no guarantees regarding usage by multiple green threads in a - single process here. This lock works only between processes. Exclusive - access between local threads should be achieved using the semaphores - in the @synchronized decorator. - - Note these locks are released when the descriptor is closed, so it's not - safe to close the file descriptor while another green thread holds the - lock. Just opening and closing the lock file can break synchronisation, - so lock files must be accessed only using this abstraction. - """ - - def __init__(self, name): - self.lockfile = None - self.fname = name - - def acquire(self): - basedir = os.path.dirname(self.fname) - - if not os.path.exists(basedir): - fileutils.ensure_tree(basedir) - LOG.info(_LI('Created lock path: %s'), basedir) - - self.lockfile = open(self.fname, 'w') - - while True: - try: - # Using non-blocking locks since green threads are not - # patched to deal with blocking locking calls. - # Also upon reading the MSDN docs for locking(), it seems - # to have a laughable 10 attempts "blocking" mechanism. - self.trylock() - LOG.debug('Got file lock "%s"', self.fname) - return True - except IOError as e: - if e.errno in (errno.EACCES, errno.EAGAIN): - # external locks synchronise things like iptables - # updates - give it some time to prevent busy spinning - time.sleep(0.01) - else: - raise threading.ThreadError(_("Unable to acquire lock on" - " `%(filename)s` due to" - " %(exception)s") % - { - 'filename': self.fname, - 'exception': e, - }) - - def __enter__(self): - self.acquire() - return self - - def release(self): - try: - self.unlock() - self.lockfile.close() - LOG.debug('Released file lock "%s"', self.fname) - except IOError: - LOG.exception(_LE("Could not release the acquired lock `%s`"), - self.fname) - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - def exists(self): - return os.path.exists(self.fname) - - def trylock(self): - raise NotImplementedError() - - def unlock(self): - raise NotImplementedError() - - -class _WindowsLock(_FileLock): - def trylock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) - - def unlock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) - - -class _FcntlLock(_FileLock): - def trylock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - - def unlock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_UN) - - -class _PosixLock(object): - def __init__(self, name): - # Hash the name because it's not valid to have POSIX semaphore - # names with things like / in them. Then use base64 to encode - # the digest() instead taking the hexdigest() because the - # result is shorter and most systems can't have shm sempahore - # names longer than 31 characters. - h = hashlib.sha1() - h.update(name.encode('ascii')) - self.name = str((b'/' + base64.urlsafe_b64encode( - h.digest())).decode('ascii')) - - def acquire(self, timeout=None): - self.semaphore = posix_ipc.Semaphore(self.name, - flags=posix_ipc.O_CREAT, - initial_value=1) - self.semaphore.acquire(timeout) - return self - - def __enter__(self): - self.acquire() - return self - - def release(self): - self.semaphore.release() - self.semaphore.close() - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - def exists(self): - try: - semaphore = posix_ipc.Semaphore(self.name) - except posix_ipc.ExistentialError: - return False - else: - semaphore.close() - return True - - -if os.name == 'nt': - import msvcrt - InterProcessLock = _WindowsLock - FileLock = _WindowsLock -else: - import base64 - import hashlib - import posix_ipc - InterProcessLock = _PosixLock - FileLock = _FcntlLock - -_semaphores = weakref.WeakValueDictionary() -_semaphores_lock = threading.Lock() - - -def _get_lock_path(name, lock_file_prefix, lock_path=None): - # NOTE(mikal): the lock name cannot contain directory - # separators - name = name.replace(os.sep, '_') - if lock_file_prefix: - sep = '' if lock_file_prefix.endswith('-') else '-' - name = '%s%s%s' % (lock_file_prefix, sep, name) - - local_lock_path = lock_path or CONF.lock_path - - if not local_lock_path: - # NOTE(bnemec): Create a fake lock path for posix locks so we don't - # unnecessarily raise the RequiredOptError below. - if InterProcessLock is not _PosixLock: - raise cfg.RequiredOptError('lock_path') - local_lock_path = 'posixlock:/' - - return os.path.join(local_lock_path, name) - - -def external_lock(name, lock_file_prefix=None, lock_path=None): - LOG.debug('Attempting to grab external lock "%(lock)s"', - {'lock': name}) - - lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path) - - # NOTE(bnemec): If an explicit lock_path was passed to us then it - # means the caller is relying on file-based locking behavior, so - # we can't use posix locks for those calls. - if lock_path: - return FileLock(lock_file_path) - return InterProcessLock(lock_file_path) - - -def remove_external_lock_file(name, lock_file_prefix=None): - """Remove a external lock file when it's not used anymore - This will be helpful when we have a lot of lock files - """ - with internal_lock(name): - lock_file_path = _get_lock_path(name, lock_file_prefix) - try: - os.remove(lock_file_path) - except OSError: - LOG.info(_LI('Failed to remove file %(file)s'), - {'file': lock_file_path}) - - -def internal_lock(name): - with _semaphores_lock: - try: - sem = _semaphores[name] - except KeyError: - sem = threading.Semaphore() - _semaphores[name] = sem - - LOG.debug('Got semaphore "%(lock)s"', {'lock': name}) - return sem - - -@contextlib.contextmanager -def lock(name, lock_file_prefix=None, external=False, lock_path=None): - """Context based lock - - This function yields a `threading.Semaphore` instance (if we don't use - eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is - True, in which case, it'll yield an InterProcessLock instance. - - :param lock_file_prefix: The lock_file_prefix argument is used to provide - lock files on disk with a meaningful prefix. - - :param external: The external keyword argument denotes whether this lock - should work across multiple processes. This means that if two different - workers both run a method decorated with @synchronized('mylock', - external=True), only one of them will execute at a time. - """ - int_lock = internal_lock(name) - with int_lock: - if external and not CONF.disable_process_locking: - ext_lock = external_lock(name, lock_file_prefix, lock_path) - with ext_lock: - yield ext_lock - else: - yield int_lock - LOG.debug('Released semaphore "%(lock)s"', {'lock': name}) - - -def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): - """Synchronization decorator. - - Decorating a method like so:: - - @synchronized('mylock') - def foo(self, *args): - ... - - ensures that only one thread will execute the foo method at a time. - - Different methods can share the same lock:: - - @synchronized('mylock') - def foo(self, *args): - ... - - @synchronized('mylock') - def bar(self, *args): - ... - - This way only one of either foo or bar can be executing at a time. - """ - - def wrap(f): - @functools.wraps(f) - def inner(*args, **kwargs): - try: - with lock(name, lock_file_prefix, external, lock_path): - LOG.debug('Got semaphore / lock "%(function)s"', - {'function': f.__name__}) - return f(*args, **kwargs) - finally: - LOG.debug('Semaphore / lock released "%(function)s"', - {'function': f.__name__}) - return inner - return wrap - - -def synchronized_with_prefix(lock_file_prefix): - """Partial object generator for the synchronization decorator. - - Redefine @synchronized in each project like so:: - - (in nova/utils.py) - from nova.openstack.common import lockutils - - synchronized = lockutils.synchronized_with_prefix('nova-') - - - (in nova/foo.py) - from nova import utils - - @utils.synchronized('mylock') - def bar(self, *args): - ... - - The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. - """ - - return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) - - -def main(argv): - """Create a dir for locks and pass it to command from arguments - - If you run this: - python -m openstack.common.lockutils python setup.py testr - - a temporary directory will be created for all your locks and passed to all - your tests in an environment variable. The temporary dir will be deleted - afterwards and the return value will be preserved. - """ - - lock_dir = tempfile.mkdtemp() - os.environ["MISTRAL_LOCK_PATH"] = lock_dir - try: - ret_val = subprocess.call(argv[1:]) - finally: - shutil.rmtree(lock_dir, ignore_errors=True) - return ret_val - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/mistral/openstack/common/loopingcall.py b/mistral/openstack/common/loopingcall.py deleted file mode 100644 index 4abfaed02..000000000 --- a/mistral/openstack/common/loopingcall.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import sys - -from eventlet import event -from eventlet import greenthread - -from mistral.openstack.common.gettextutils import _LE, _LW -from mistral.openstack.common import timeutils - -LOG = logging.getLogger(__name__) - - -class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCallBase. - - The poll-function passed to LoopingCallBase can raise this exception to - break out of the loop normally. This is somewhat analogous to - StopIteration. - - An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCallBase.wait() - - """ - - def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCallBase.wait() should return.""" - self.retvalue = retvalue - - -class LoopingCallBase(object): - def __init__(self, f=None, *args, **kw): - self.args = args - self.kw = kw - self.f = f - self._running = False - self.done = None - - def stop(self): - self._running = False - - def wait(self): - return self.done.wait() - - -class FixedIntervalLoopingCall(LoopingCallBase): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = timeutils.utcnow() - self.f(*self.args, **self.kw) - end = timeutils.utcnow() - if not self._running: - break - delay = interval - timeutils.delta_seconds(start, end) - if delay <= 0: - LOG.warn(_LW('task run outlasted interval by %s sec') % - -delay) - greenthread.sleep(delay if delay > 0 else 0) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn_n(_inner) - return self.done - - -class DynamicLoopingCall(LoopingCallBase): - """A looping call which sleeps until the next known event. - - The function called should return how long to sleep for before being - called again. - """ - - def start(self, initial_delay=None, periodic_interval_max=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - idle = self.f(*self.args, **self.kw) - if not self._running: - break - - if periodic_interval_max is not None: - idle = min(idle, periodic_interval_max) - LOG.debug('Dynamic looping call sleeping for %.02f ' - 'seconds', idle) - greenthread.sleep(idle) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in dynamic looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn(_inner) - return self.done diff --git a/mistral/openstack/common/periodic_task.py b/mistral/openstack/common/periodic_task.py deleted file mode 100644 index b4a720893..000000000 --- a/mistral/openstack/common/periodic_task.py +++ /dev/null @@ -1,183 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import time - -from oslo_config import cfg -import six - -from mistral.openstack.common.gettextutils import _, _LE, _LI - - -periodic_opts = [ - cfg.BoolOpt('run_external_periodic_tasks', - default=True, - help='Some periodic tasks can be run in a separate process. ' - 'Should we run them here?'), -] - -CONF = cfg.CONF -CONF.register_opts(periodic_opts) - -LOG = logging.getLogger(__name__) - -DEFAULT_INTERVAL = 60.0 - - -class InvalidPeriodicTaskArg(Exception): - message = _("Unexpected argument for periodic task creation: %(arg)s.") - - -def periodic_task(*args, **kwargs): - """Decorator to indicate that a method is a periodic task. - - This decorator can be used in two ways: - - 1. Without arguments '@periodic_task', this will be run on the default - interval of 60 seconds. - - 2. With arguments: - @periodic_task(spacing=N [, run_immediately=[True|False]]) - this will be run on approximately every N seconds. If this number is - negative the periodic task will be disabled. If the run_immediately - argument is provided and has a value of 'True', the first run of the - task will be shortly after task scheduler starts. If - run_immediately is omitted or set to 'False', the first time the - task runs will be approximately N seconds after the task scheduler - starts. - """ - def decorator(f): - # Test for old style invocation - if 'ticks_between_runs' in kwargs: - raise InvalidPeriodicTaskArg(arg='ticks_between_runs') - - # Control if run at all - f._periodic_task = True - f._periodic_external_ok = kwargs.pop('external_process_ok', False) - if f._periodic_external_ok and not CONF.run_external_periodic_tasks: - f._periodic_enabled = False - else: - f._periodic_enabled = kwargs.pop('enabled', True) - - # Control frequency - f._periodic_spacing = kwargs.pop('spacing', 0) - f._periodic_immediate = kwargs.pop('run_immediately', False) - if f._periodic_immediate: - f._periodic_last_run = None - else: - f._periodic_last_run = time.time() - return f - - # NOTE(sirp): The `if` is necessary to allow the decorator to be used with - # and without parents. - # - # In the 'with-parents' case (with kwargs present), this function needs to - # return a decorator function since the interpreter will invoke it like: - # - # periodic_task(*args, **kwargs)(f) - # - # In the 'without-parents' case, the original function will be passed - # in as the first argument, like: - # - # periodic_task(f) - if kwargs: - return decorator - else: - return decorator(args[0]) - - -class _PeriodicTasksMeta(type): - def __init__(cls, names, bases, dict_): - """Metaclass that allows us to collect decorated periodic tasks.""" - super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_) - - # NOTE(sirp): if the attribute is not present then we must be the base - # class, so, go ahead an initialize it. If the attribute is present, - # then we're a subclass so make a copy of it so we don't step on our - # parent's toes. - try: - cls._periodic_tasks = cls._periodic_tasks[:] - except AttributeError: - cls._periodic_tasks = [] - - try: - cls._periodic_spacing = cls._periodic_spacing.copy() - except AttributeError: - cls._periodic_spacing = {} - - for value in cls.__dict__.values(): - if getattr(value, '_periodic_task', False): - task = value - name = task.__name__ - - if task._periodic_spacing < 0: - LOG.info(_LI('Skipping periodic task %(task)s because ' - 'its interval is negative'), - {'task': name}) - continue - if not task._periodic_enabled: - LOG.info(_LI('Skipping periodic task %(task)s because ' - 'it is disabled'), - {'task': name}) - continue - - # A periodic spacing of zero indicates that this task should - # be run on the default interval to avoid running too - # frequently. - if task._periodic_spacing == 0: - task._periodic_spacing = DEFAULT_INTERVAL - - cls._periodic_tasks.append((name, task)) - cls._periodic_spacing[name] = task._periodic_spacing - - -@six.add_metaclass(_PeriodicTasksMeta) -class PeriodicTasks(object): - def __init__(self): - super(PeriodicTasks, self).__init__() - self._periodic_last_run = {} - for name, task in self._periodic_tasks: - self._periodic_last_run[name] = task._periodic_last_run - - def run_periodic_tasks(self, context, raise_on_error=False): - """Tasks to be run at a periodic interval.""" - idle_for = DEFAULT_INTERVAL - for task_name, task in self._periodic_tasks: - full_task_name = '.'.join([self.__class__.__name__, task_name]) - - spacing = self._periodic_spacing[task_name] - last_run = self._periodic_last_run[task_name] - - # If a periodic task is _nearly_ due, then we'll run it early - idle_for = min(idle_for, spacing) - if last_run is not None: - delta = last_run + spacing - time.time() - if delta > 0.2: - idle_for = min(idle_for, delta) - continue - - LOG.debug("Running periodic task %(full_task_name)s", - {"full_task_name": full_task_name}) - self._periodic_last_run[task_name] = time.time() - - try: - task(self, context) - except Exception as e: - if raise_on_error: - raise - LOG.exception(_LE("Error during %(full_task_name)s: %(e)s"), - {"full_task_name": full_task_name, "e": e}) - time.sleep(0) - - return idle_for diff --git a/mistral/openstack/common/strutils.py b/mistral/openstack/common/strutils.py deleted file mode 100644 index 98d0f3197..000000000 --- a/mistral/openstack/common/strutils.py +++ /dev/null @@ -1,239 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import math -import re -import sys -import unicodedata - -import six - -from mistral.openstack.common.gettextutils import _ - - -UNIT_PREFIX_EXPONENT = { - 'k': 1, - 'K': 1, - 'Ki': 1, - 'M': 2, - 'Mi': 2, - 'G': 3, - 'Gi': 3, - 'T': 4, - 'Ti': 4, -} -UNIT_SYSTEM_INFO = { - 'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')), - 'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')), -} - -TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes') -FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no') - -SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]") -SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+") - - -def int_from_bool_as_string(subject): - """Interpret a string as a boolean and return either 1 or 0. - - Any string value in: - - ('True', 'true', 'On', 'on', '1') - - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - return bool_from_string(subject) and 1 or 0 - - -def bool_from_string(subject, strict=False, default=False): - """Interpret a string as a boolean. - - A case-insensitive match is performed such that strings matching 't', - 'true', 'on', 'y', 'yes', or '1' are considered True and, when - `strict=False`, anything else returns the value specified by 'default'. - - Useful for JSON-decoded stuff and config file parsing. - - If `strict=True`, unrecognized values, including None, will raise a - ValueError which is useful when parsing values passed in from an API call. - Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. - """ - if not isinstance(subject, six.string_types): - subject = six.text_type(subject) - - lowered = subject.strip().lower() - - if lowered in TRUE_STRINGS: - return True - elif lowered in FALSE_STRINGS: - return False - elif strict: - acceptable = ', '.join( - "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS)) - msg = _("Unrecognized value '%(val)s', acceptable values are:" - " %(acceptable)s") % {'val': subject, - 'acceptable': acceptable} - raise ValueError(msg) - else: - return default - - -def safe_decode(text, incoming=None, errors='strict'): - """Decodes incoming text/bytes string using `incoming` if they're not - already unicode. - - :param incoming: Text's current encoding - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: text or a unicode `incoming` encoded - representation of it. - :raises TypeError: If text is not an instance of str - """ - if not isinstance(text, (six.string_types, six.binary_type)): - raise TypeError("%s can't be decoded" % type(text)) - - if isinstance(text, six.text_type): - return text - - if not incoming: - incoming = (sys.stdin.encoding or - sys.getdefaultencoding()) - - try: - return text.decode(incoming, errors) - except UnicodeDecodeError: - # Note(flaper87) If we get here, it means that - # sys.stdin.encoding / sys.getdefaultencoding - # didn't return a suitable encoding to decode - # text. This happens mostly when global LANG - # var is not set correctly and there's no - # default encoding. In this case, most likely - # python will use ASCII or ANSI encoders as - # default encodings but they won't be capable - # of decoding non-ASCII characters. - # - # Also, UTF-8 is being used since it's an ASCII - # extension. - return text.decode('utf-8', errors) - - -def safe_encode(text, incoming=None, - encoding='utf-8', errors='strict'): - """Encodes incoming text/bytes string using `encoding`. - - If incoming is not specified, text is expected to be encoded with - current python's default encoding. (`sys.getdefaultencoding`) - - :param incoming: Text's current encoding - :param encoding: Expected encoding for text (Default UTF-8) - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: text or a bytestring `encoding` encoded - representation of it. - :raises TypeError: If text is not an instance of str - """ - if not isinstance(text, (six.string_types, six.binary_type)): - raise TypeError("%s can't be encoded" % type(text)) - - if not incoming: - incoming = (sys.stdin.encoding or - sys.getdefaultencoding()) - - if isinstance(text, six.text_type): - return text.encode(encoding, errors) - elif text and encoding != incoming: - # Decode text before encoding it with `encoding` - text = safe_decode(text, incoming, errors) - return text.encode(encoding, errors) - else: - return text - - -def string_to_bytes(text, unit_system='IEC', return_int=False): - """Converts a string into an float representation of bytes. - - The units supported for IEC :: - - Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it) - KB, KiB, MB, MiB, GB, GiB, TB, TiB - - The units supported for SI :: - - kb(it), Mb(it), Gb(it), Tb(it) - kB, MB, GB, TB - - Note that the SI unit system does not support capital letter 'K' - - :param text: String input for bytes size conversion. - :param unit_system: Unit system for byte size conversion. - :param return_int: If True, returns integer representation of text - in bytes. (default: decimal) - :returns: Numerical representation of text in bytes. - :raises ValueError: If text has an invalid value. - - """ - try: - base, reg_ex = UNIT_SYSTEM_INFO[unit_system] - except KeyError: - msg = _('Invalid unit system: "%s"') % unit_system - raise ValueError(msg) - match = reg_ex.match(text) - if match: - magnitude = float(match.group(1)) - unit_prefix = match.group(2) - if match.group(3) in ['b', 'bit']: - magnitude /= 8 - else: - msg = _('Invalid string format: %s') % text - raise ValueError(msg) - if not unit_prefix: - res = magnitude - else: - res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix]) - if return_int: - return int(math.ceil(res)) - return res - - -def to_slug(value, incoming=None, errors="strict"): - """Normalize string. - - Convert to lowercase, remove non-word characters, and convert spaces - to hyphens. - - Inspired by Django's `slugify` filter. - - :param value: Text to slugify - :param incoming: Text's current encoding - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: slugified unicode representation of `value` - :raises TypeError: If text is not an instance of str - """ - value = safe_decode(value, incoming, errors) - # NOTE(aababilov): no need to use safe_(encode|decode) here: - # encodings are always "ascii", error handling is always "ignore" - # and types are always known (first: unicode; second: str) - value = unicodedata.normalize("NFKD", value).encode( - "ascii", "ignore").decode("ascii") - value = SLUGIFY_STRIP_RE.sub("", value).strip().lower() - return SLUGIFY_HYPHENATE_RE.sub("-", value) diff --git a/mistral/openstack/common/test.py b/mistral/openstack/common/test.py deleted file mode 100644 index 94fe23e5b..000000000 --- a/mistral/openstack/common/test.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -############################################################################## -############################################################################## -## -## DO NOT MODIFY THIS FILE -## -## This file is being graduated to the oslotest library. Please make all -## changes there, and only backport critical fixes here. - dhellmann -## -############################################################################## -############################################################################## - -"""Common utilities used in testing""" - -import logging -import os -import tempfile - -import fixtures -import testtools - -_TRUE_VALUES = ('True', 'true', '1', 'yes') -_LOG_FORMAT = "%(levelname)8s [%(name)s] %(message)s" - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._set_timeout() - self._fake_output() - self._fake_logs() - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - self.tempdirs = [] - - def _set_timeout(self): - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - def _fake_output(self): - if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES: - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES: - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - - def _fake_logs(self): - if os.environ.get('OS_DEBUG') in _TRUE_VALUES: - level = logging.DEBUG - else: - level = logging.INFO - capture_logs = os.environ.get('OS_LOG_CAPTURE') in _TRUE_VALUES - if capture_logs: - self.useFixture( - fixtures.FakeLogger( - format=_LOG_FORMAT, - level=level, - nuke_handlers=capture_logs, - ) - ) - else: - logging.basicConfig(format=_LOG_FORMAT, level=level) - - def create_tempfiles(self, files, ext='.conf'): - tempfiles = [] - for (basename, contents) in files: - if not os.path.isabs(basename): - (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) - else: - path = basename + ext - fd = os.open(path, os.O_CREAT | os.O_WRONLY) - tempfiles.append(path) - try: - os.write(fd, contents) - finally: - os.close(fd) - return tempfiles diff --git a/mistral/openstack/common/threadgroup.py b/mistral/openstack/common/threadgroup.py deleted file mode 100644 index edcabc8bd..000000000 --- a/mistral/openstack/common/threadgroup.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import logging -import threading - -import eventlet -from eventlet import greenpool - -from mistral.openstack.common import loopingcall - - -LOG = logging.getLogger(__name__) - - -def _thread_done(gt, *args, **kwargs): - """Callback function to be passed to GreenThread.link() when we spawn() - Calls the :class:`ThreadGroup` to notify if. - - """ - kwargs['group'].thread_done(kwargs['thread']) - - -class Thread(object): - """Wrapper around a greenthread, that holds a reference to the - :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when - it has done so it can be removed from the threads list. - """ - def __init__(self, thread, group): - self.thread = thread - self.thread.link(_thread_done, group=group, thread=self) - - def stop(self): - self.thread.kill() - - def wait(self): - return self.thread.wait() - - def link(self, func, *args, **kwargs): - self.thread.link(func, *args, **kwargs) - - -class ThreadGroup(object): - """The point of the ThreadGroup class is to: - - * keep track of timers and greenthreads (making it easier to stop them - when need be). - * provide an easy API to add timers. - """ - def __init__(self, thread_pool_size=10): - self.pool = greenpool.GreenPool(thread_pool_size) - self.threads = [] - self.timers = [] - - def add_dynamic_timer(self, callback, initial_delay=None, - periodic_interval_max=None, *args, **kwargs): - timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) - timer.start(initial_delay=initial_delay, - periodic_interval_max=periodic_interval_max) - self.timers.append(timer) - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = Thread(gt, self) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - - def _stop_threads(self): - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - # don't kill the current thread. - continue - try: - x.stop() - except Exception as ex: - LOG.exception(six.text_type(ex)) - - def stop_timers(self): - for x in self.timers: - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - self.timers = [] - - def stop(self, graceful=False): - """stop function has the option of graceful=True/False. - - * In case of graceful=True, wait for all threads to be finished. - Never kill threads. - * In case of graceful=False, kill threads immediately. - """ - self.stop_timers() - if graceful: - # In case of graceful=True, wait for all threads to be - # finished, never kill threads - self.wait() - else: - # In case of graceful=False(Default), kill threads - # immediately - self._stop_threads() - - def wait(self): - for x in self.timers: - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - continue - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) diff --git a/mistral/openstack/common/timeutils.py b/mistral/openstack/common/timeutils.py deleted file mode 100644 index 52688a026..000000000 --- a/mistral/openstack/common/timeutils.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - else: - before = before.replace(tzinfo=None) - - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - else: - after = after.replace(tzinfo=None) - - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formatted date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert(not utcnow.override_time is None) - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - return total_seconds(delta) - - -def total_seconds(delta): - """Return the total seconds of datetime.timedelta object. - - Compute total seconds of datetime.timedelta, datetime.timedelta - doesn't have method total_seconds in Python2.6, calculate it manually. - """ - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :param dt: the time - :param window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/mistral/openstack/common/uuidutils.py b/mistral/openstack/common/uuidutils.py deleted file mode 100644 index 234b880c9..000000000 --- a/mistral/openstack/common/uuidutils.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2012 Intel Corporation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -UUID related utilities and helper functions. -""" - -import uuid - - -def generate_uuid(): - return str(uuid.uuid4()) - - -def is_uuid_like(val): - """Returns validation of a value as a UUID. - - For our purposes, a UUID is a canonical form string: - aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa - - """ - try: - return str(uuid.UUID(val)) == val - except (TypeError, ValueError, AttributeError): - return False diff --git a/mistral/services/periodic.py b/mistral/services/periodic.py index 57ffc619d..4440f1587 100644 --- a/mistral/services/periodic.py +++ b/mistral/services/periodic.py @@ -14,18 +14,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +from oslo_config import cfg from oslo_log import log as logging +from oslo_service import periodic_task +from oslo_service import threadgroup from mistral import context as auth_ctx from mistral.db.v2 import api as db_api_v2 from mistral.engine import rpc -from mistral.openstack.common import periodic_task -from mistral.openstack.common import threadgroup from mistral.services import security from mistral.services import triggers LOG = logging.getLogger(__name__) +CONF = cfg.CONF + class MistralPeriodicTasks(periodic_task.PeriodicTasks): @periodic_task.periodic_task(spacing=1, run_immediately=True) @@ -67,7 +70,7 @@ class MistralPeriodicTasks(periodic_task.PeriodicTasks): def setup(): tg = threadgroup.ThreadGroup() - pt = MistralPeriodicTasks() + pt = MistralPeriodicTasks(CONF) ctx = auth_ctx.MistralContext( user_id=None, diff --git a/mistral/services/scheduler.py b/mistral/services/scheduler.py index d4edcbaa1..8051cb96f 100644 --- a/mistral/services/scheduler.py +++ b/mistral/services/scheduler.py @@ -15,18 +15,21 @@ import copy import datetime +from oslo_config import cfg from oslo_log import log as logging +from oslo_service import periodic_task +from oslo_service import threadgroup +from oslo_utils import importutils from mistral import context from mistral.db.v2 import api as db_api from mistral import exceptions as exc -from mistral.openstack.common import importutils -from mistral.openstack.common import periodic_task -from mistral.openstack.common import threadgroup LOG = logging.getLogger(__name__) +CONF = cfg.CONF + # {scheduler_instance: thread_group} _schedulers = {} @@ -145,7 +148,7 @@ class CallScheduler(periodic_task.PeriodicTasks): def setup(): tg = threadgroup.ThreadGroup() - scheduler = CallScheduler() + scheduler = CallScheduler(CONF) tg.add_dynamic_timer( scheduler.run_periodic_tasks, diff --git a/mistral/tests/unit/api/test_auth.py b/mistral/tests/unit/api/test_auth.py index ca85badfe..01ecfbacb 100644 --- a/mistral/tests/unit/api/test_auth.py +++ b/mistral/tests/unit/api/test_auth.py @@ -16,10 +16,10 @@ import datetime import uuid from oslo_config import cfg +from oslo_utils import timeutils import pecan import pecan.testing -from mistral.openstack.common import timeutils from mistral.tests.unit.api import base diff --git a/mistral/tests/unit/api/v2/test_root.py b/mistral/tests/unit/api/v2/test_root.py index 00e2cd2bd..a6875fc2e 100644 --- a/mistral/tests/unit/api/v2/test_root.py +++ b/mistral/tests/unit/api/v2/test_root.py @@ -13,8 +13,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from oslo_serialization import jsonutils -from mistral.openstack.common import jsonutils from mistral.tests.unit.api import base diff --git a/mistral/utils/javascript.py b/mistral/utils/javascript.py index 67d98e4b7..2e8eb302a 100644 --- a/mistral/utils/javascript.py +++ b/mistral/utils/javascript.py @@ -15,8 +15,9 @@ import abc import json +from oslo_utils import importutils + from mistral import exceptions as exc -from mistral.openstack.common import importutils _PYV8 = importutils.try_import('PyV8') diff --git a/openstack-common.conf b/openstack-common.conf deleted file mode 100644 index ddb697d03..000000000 --- a/openstack-common.conf +++ /dev/null @@ -1,15 +0,0 @@ -[DEFAULT] - -# The list of modules to copy from oslo-incubator.git -module=jsonutils -module=lockutils -module=loopingcall -module=periodic_task -module=threadgroup -module=timeutils -module=importutils -module=strutils -module=uuidutils - -# The base module to hold the copy of openstack.common -base=mistral diff --git a/requirements.txt b/requirements.txt index 3b9a4e8e7..909eee36e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,34 +1,38 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. alembic>=0.7.2 -amqplib>=0.6.1 # This is not in global requirements (master branch) argparse Babel>=1.3 croniter>=0.3.4 # MIT License -eventlet>=0.17.3 +eventlet>=0.17.4 iso8601>=0.1.9 -jsonschema>=2.0.0,<3.0.0 +jsonschema!=2.5.0,<3.0.0,>=2.0.0 keystonemiddleware>=1.5.0 kombu>=3.0.7 mock>=1.0 networkx>=1.8 -oslo.config>=1.11.0 # Apache-2.0 -oslo.db>=1.7.0 # Apache-2.0 -oslo.messaging>=1.8.0 -oslo.utils>=1.4.0 # Apache-2.0 -oslo.log>=1.2.0 # Apache-2.0 +oslo.config>=1.11.0 # Apache-2.0 +oslo.db>=1.10.0 # Apache-2.0 +oslo.messaging!=1.12.0,>=1.8.0 # Apache-2.0 +oslo.utils>=1.6.0 # Apache-2.0 +oslo.log>=1.2.0 # Apache-2.0 +oslo.serialization>=1.4.0 # Apache-2.0 +oslo.service>=0.1.0 # Apache-2.0 paramiko>=1.13.0 -pbr>=0.6,!=0.7,<1.0 +pbr<2.0,>=0.11 pecan>=0.8.0 -posix_ipc -python-cinderclient>=1.1.0 -python-glanceclient>=0.15.0 +posix-ipc +python-cinderclient>=1.2.2 +python-glanceclient>=0.18.0 python-heatclient>=0.3.0 -python-keystoneclient>=1.1.0 -python-neutronclient>=2.3.11,<3 +python-keystoneclient>=1.6.0 +python-neutronclient<3,>=2.3.11 python-novaclient>=2.22.0 PyYAML>=3.1.0 -requests>=2.2.0,!=2.4.0 +requests>=2.5.2 six>=1.9.0 -SQLAlchemy>=0.9.7,<=0.9.99 -stevedore>=1.3.0 # Apache-2.0 -WSME>=0.6 +SQLAlchemy<1.1.0,>=0.9.7 +stevedore>=1.5.0 # Apache-2.0 +WSME>=0.7 yaql==0.2.6 # This is not in global requirements diff --git a/run_tests.sh b/run_tests.sh index 305c4e571..7fdf2b47c 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -167,7 +167,7 @@ function run_pep8 { } -TESTRTESTS="python -m mistral.openstack.common.lockutils python setup.py testr" +TESTRTESTS="python setup.py testr" if [ $never_venv -eq 0 ] then diff --git a/test-requirements.txt b/test-requirements.txt index 447e4eec4..71b0df691 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,16 +1,17 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. coverage>=3.6 -docutils==0.9.1 fixtures>=0.3.14 -hacking>=0.10.0,<0.11 -lockfile>=0.8 +hacking<0.11,>=0.10.0 nose -oslosphinx>=2.5.0,<2.6.0 # Apache-2.0 -oslotest>=1.5.1,<1.6.0 # Apache-2.0 +oslosphinx>=2.5.0 # Apache-2.0 +oslotest>=1.5.1 # Apache-2.0 pyflakes==0.8.1 -pylint>=1.3.0 # GNU GPL v2 -sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3 +pylint==1.4.1 # GNU GPL v2 +sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2 sphinxcontrib-httpdomain sphinxcontrib-pecanwsme>=0.8 testrepository>=0.0.18 -testtools>=0.9.36,!=1.2.0 +testtools>=1.4.0 unittest2 diff --git a/tox.ini b/tox.ini index 6c50ed0b6..2e8c0f3ca 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ setenv = VIRTUAL_ENV={envdir} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = - python -m mistral.openstack.common.lockutils python setup.py test --slowest --testr-args='{posargs}' + python setup.py test --slowest --testr-args='{posargs}' whitelist_externals = rm [testenv:pep8]