From 185dd9305a0609f4ed6d8416e447aca35cb7d22b Mon Sep 17 00:00:00 2001 From: Steve McLellan Date: Wed, 28 May 2014 14:53:33 -0500 Subject: [PATCH] Add policy checks to API Adds ability for deploy-time auth checks to allow/disallow functionality, in line with other openstack projects. Includes update of code in openstack/common, which is why the patchset is so large. oslo-incubator changeset is May 27th caed79d8239679cb74476bb0d9e5011b4fcc39da. Implements blueprint policy-checks-in-api Change-Id: I67a431dcc74f0a77ed48b7a489136d5008773cea --- contrib/devstack/lib/murano | 2 + etc/murano/policy.json | 6 + murano/api/middleware/context.py | 5 +- murano/api/v1/catalog.py | 23 + murano/api/v1/deployments.py | 8 + murano/api/v1/environments.py | 10 +- murano/api/v1/instance_statistics.py | 7 + murano/cmd/api.py | 2 + murano/common/policy.py | 83 ++ murano/context.py | 10 +- murano/openstack/common/__init__.py | 15 + murano/openstack/common/config/generator.py | 16 +- murano/openstack/common/context.py | 2 +- .../common/db/sqlalchemy/migration.py | 18 +- .../openstack/common/db/sqlalchemy/models.py | 10 +- .../common/db/sqlalchemy/provision.py | 88 +- .../openstack/common/db/sqlalchemy/session.py | 113 ++- .../common/db/sqlalchemy/test_base.py | 30 +- .../common/db/sqlalchemy/test_migrations.py | 6 +- .../openstack/common/db/sqlalchemy/utils.py | 33 +- murano/openstack/common/eventlet_backdoor.py | 4 +- murano/openstack/common/excutils.py | 26 +- murano/openstack/common/fileutils.py | 2 +- murano/openstack/common/gettextutils.py | 238 ++--- murano/openstack/common/importutils.py | 4 +- murano/openstack/common/jsonutils.py | 22 +- murano/openstack/common/lockutils.py | 11 +- murano/openstack/common/log.py | 75 +- murano/openstack/common/loopingcall.py | 13 +- murano/openstack/common/policy.py | 898 ++++++++++++++++++ murano/openstack/common/service.py | 5 +- murano/openstack/common/sslutils.py | 3 - murano/openstack/common/strutils.py | 239 +++++ murano/openstack/common/systemd.py | 6 +- murano/openstack/common/threadgroup.py | 20 +- murano/tests/api/base.py | 28 +- murano/tests/api/v1/test_environments.py | 37 +- tools/config/check_uptodate.sh | 4 +- tools/config/generate_sample.sh | 20 +- tools/install_venv_common.py | 2 +- 40 files changed, 1812 insertions(+), 332 deletions(-) create mode 100644 etc/murano/policy.json create mode 100644 murano/common/policy.py create mode 100644 murano/openstack/common/policy.py create mode 100644 murano/openstack/common/strutils.py diff --git a/contrib/devstack/lib/murano b/contrib/devstack/lib/murano index 0c21e344..68506a3d 100644 --- a/contrib/devstack/lib/murano +++ b/contrib/devstack/lib/murano @@ -32,6 +32,7 @@ MURANO_PYTHONCLIENT_DIR=$DEST/python-muranoclient MURANO_DIR=$DEST/murano MURANO_CONF_DIR=${MURANO_CONF_DIR:-/etc/murano} MURANO_CONF_FILE=${MURANO_CONF_DIR}/murano.conf +MURANO_POLICY_FILE=${MURANO_CONF_DIR}/policy.json MURANO_DEBUG=${MURANO_DEBUG:-True} MURANO_SERVICE_HOST=${MURANO_SERVICE_HOST:-$SERVICE_HOST} @@ -134,6 +135,7 @@ function configure_murano { # Copy over Murano configuration file and configure common parameters. cp $MURANO_DIR/etc/murano/murano.conf.sample $MURANO_CONF_FILE cp $MURANO_DIR/etc/murano/murano-paste.ini $MURANO_CONF_DIR + cp $MURANO_DIR/etc/murano/policy.json $MURANO_POLICY_FILE iniset $MURANO_CONF_FILE DEFAULT debug $MURANO_DEBUG iniset $MURANO_CONF_FILE DEFAULT use_syslog $SYSLOG diff --git a/etc/murano/policy.json b/etc/murano/policy.json new file mode 100644 index 00000000..feb8cb62 --- /dev/null +++ b/etc/murano/policy.json @@ -0,0 +1,6 @@ +{ + "context_is_admin": "role:admin or is_admin:True", + + "default": "" +} + diff --git a/murano/api/middleware/context.py b/murano/api/middleware/context.py index 2763e16b..a2dd519d 100644 --- a/murano/api/middleware/context.py +++ b/murano/api/middleware/context.py @@ -41,13 +41,14 @@ class ContextMiddleware(wsgi.Middleware): :param req: wsgi request object that will be given the context object """ + roles = [r.strip() for r in req.headers.get('X-Roles').split(',')] kwargs = { 'user': req.headers.get('X-User-Id'), 'tenant': req.headers.get('X-Tenant-Id'), 'auth_token': req.headers.get('X-Auth-Token'), 'session': req.headers.get('X-Configuration-Session'), - 'is_admin': CONF.admin_role in [ - role.strip() for role in req.headers.get('X-Roles').split(',')] + 'is_admin': CONF.admin_role in roles, + 'roles': roles } req.context = murano.context.RequestContext(**kwargs) diff --git a/murano/api/v1/catalog.py b/murano/api/v1/catalog.py index e3fe0570..f35fcb42 100644 --- a/murano/api/v1/catalog.py +++ b/murano/api/v1/catalog.py @@ -23,6 +23,7 @@ from webob import exc import murano.api.v1 from murano.api.v1 import schemas +from murano.common import policy from murano.db.catalog import api as db_api from murano.openstack.common.db import exception as db_exc from murano.openstack.common import exception @@ -132,6 +133,8 @@ class Controller(object): "value":"New description" } { "op": "replace", "path": "/name", "value": "New name" } """ + policy.check("update_package", req.context, {'package_id': package_id}) + _check_content_type(req, 'application/murano-packages-json-patch') if not isinstance(body, list): msg = _('Request body must be a JSON array of operation objects.') @@ -142,6 +145,8 @@ class Controller(object): return package.to_dict() def get(self, req, package_id): + policy.check("get_package", req.context, {'package_id': package_id}) + package = db_api.package_get(package_id, req.context) return package.to_dict() @@ -163,6 +168,8 @@ class Controller(object): return value + policy.check("search_packages", req.context) + filters = _get_filters(req.GET.items()) limit = _validate_limit(filters.get('limit')) if limit is None: @@ -181,6 +188,8 @@ class Controller(object): Upload new file archive for the new package together with package metadata """ + policy.check("upload_package", req.context) + _check_content_type(req, 'multipart/form-data') file_obj, package_meta = _validate_body(body) if package_meta: @@ -224,21 +233,35 @@ class Controller(object): return package.to_dict() def get_ui(self, req, package_id): + target = {'package_id': package_id} + policy.check("get_package_ui", req.context, target) + package = db_api.package_get(package_id, req.context) return package.ui_definition def get_logo(self, req, package_id): + target = {'package_id': package_id} + policy.check("get_package_logo", req.context, target) + package = db_api.package_get(package_id, req.context) return package.logo def download(self, req, package_id): + target = {'package_id': package_id} + policy.check("download_package", req.context, target) + package = db_api.package_get(package_id, req.context) return package.archive def delete(self, req, package_id): + target = {'package_id': package_id} + policy.check("delete_package", req.context, target) + db_api.package_delete(package_id, req.context) def show_categories(self, req): + policy.check("show_categories", req.context) + categories = db_api.categories_list() return {'categories': [category.name for category in categories]} diff --git a/murano/api/v1/deployments.py b/murano/api/v1/deployments.py index 74774903..8f214772 100644 --- a/murano/api/v1/deployments.py +++ b/murano/api/v1/deployments.py @@ -15,6 +15,7 @@ from sqlalchemy import desc from webob import exc from murano.api.v1 import request_statistics +from murano.common import policy from murano.common import utils from murano.db import models from murano.db import session as db_session @@ -31,6 +32,9 @@ API_NAME = 'Deployments' class Controller(object): @request_statistics.stats_count(API_NAME, 'Index') def index(self, request, environment_id): + target = {"environment_id": environment_id} + policy.check("list_deployments", request.context, target) + unit = db_session.get_session() verify_and_get_env(unit, environment_id, request) query = unit.query(models.Deployment) \ @@ -43,6 +47,10 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'Statuses') def statuses(self, request, environment_id, deployment_id): + target = {"environment_id": environment_id, + "deployment_id": deployment_id} + policy.check("statuses_deployments", request.context, target) + unit = db_session.get_session() query = unit.query(models.Status) \ .filter_by(deployment_id=deployment_id) \ diff --git a/murano/api/v1/environments.py b/murano/api/v1/environments.py index 2a148095..f2b2f211 100644 --- a/murano/api/v1/environments.py +++ b/murano/api/v1/environments.py @@ -16,6 +16,7 @@ from sqlalchemy import desc from webob import exc from murano.api.v1 import request_statistics +from murano.common import policy from murano.common import utils from murano.db import models from murano.db.services import core_services @@ -33,10 +34,10 @@ API_NAME = 'Environments' class Controller(object): - @request_statistics.stats_count(API_NAME, 'Index') def index(self, request): LOG.debug('Environments:List') + policy.check('list_environments', request.context, {}) #Only environments from same tenant as user should be returned filters = {'tenant_id': request.context.tenant} @@ -48,6 +49,7 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'Create') def create(self, request, body): LOG.debug('Environments:Create '.format(body)) + policy.check('create_environment', request.context, {}) try: environment = envs.EnvironmentServices.create( @@ -62,6 +64,8 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'Show') def show(self, request, environment_id): LOG.debug('Environments:Show '.format(environment_id)) + target = {"environment_id": environment_id} + policy.check('show_environment', request.context, target) session = db_session.get_session() environment = session.query(models.Environment).get(environment_id) @@ -93,6 +97,8 @@ class Controller(object): def update(self, request, environment_id, body): LOG.debug('Environments:Update '.format(environment_id, body)) + target = {"environment_id": environment_id} + policy.check('update_environment', request.context, target) session = db_session.get_session() environment = session.query(models.Environment).get(environment_id) @@ -115,6 +121,8 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'Delete') def delete(self, request, environment_id): LOG.debug('Environments:Delete '.format(environment_id)) + target = {"environment_id": environment_id} + policy.check('delete_environment', request.context, target) unit = db_session.get_session() environment = unit.query(models.Environment).get(environment_id) diff --git a/murano/api/v1/instance_statistics.py b/murano/api/v1/instance_statistics.py index bf249e78..946d388c 100644 --- a/murano/api/v1/instance_statistics.py +++ b/murano/api/v1/instance_statistics.py @@ -13,6 +13,7 @@ # under the License. from murano.api.v1 import request_statistics +from murano.common import policy from murano.db.services import instances from murano.openstack.common.gettextutils import _ # noqa @@ -28,6 +29,8 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'GetAggregated') def get_aggregated(self, request, environment_id): LOG.debug('EnvironmentStatistics:GetAggregated') + target = {"environment_id": environment_id} + policy.check("get_aggregated_statistics", request.context, target) # TODO (stanlagun): Check that caller is authorized to access # tenant's statistics @@ -38,6 +41,8 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'GetForInstance') def get_for_instance(self, request, environment_id, instance_id): LOG.debug('EnvironmentStatistics:GetForInstance') + target = {"environment_id": environment_id, "instance_id": instance_id} + policy.check("get_instance_statistics", request.context, target) # TODO (stanlagun): Check that caller is authorized to access # tenant's statistics @@ -48,6 +53,8 @@ class Controller(object): @request_statistics.stats_count(API_NAME, 'GetForEnvironment') def get_for_environment(self, request, environment_id): LOG.debug('EnvironmentStatistics:GetForEnvironment') + target = {"environment_id": environment_id} + policy.check("get_statistics", request.context, target) # TODO (stanlagun): Check that caller is authorized to access # tenant's statistics diff --git a/murano/cmd/api.py b/murano/cmd/api.py index bb2fffc2..42e6e2a0 100644 --- a/murano/cmd/api.py +++ b/murano/cmd/api.py @@ -35,6 +35,7 @@ if os.path.exists(os.path.join(root, 'murano', '__init__.py')): from murano.api.v1 import request_statistics from murano.common import config +from murano.common import policy from murano.common import server from murano.common import statservice as stats from murano.openstack.common import log @@ -47,6 +48,7 @@ def main(): config.parse_args() log.setup('murano') request_statistics.init_stats() + policy.init() launcher = service.ServiceLauncher() diff --git a/murano/common/policy.py b/murano/common/policy.py new file mode 100644 index 00000000..1e40bd78 --- /dev/null +++ b/murano/common/policy.py @@ -0,0 +1,83 @@ +# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Based on designate/policy.py + +import murano.openstack.common.log as logging +from murano.openstack.common import policy +from oslo.config import cfg +from webob import exc as exceptions + +LOG = logging.getLogger(__name__) + +CONF = cfg.CONF + +_ENFORCER = None + + +def reset(): + global _ENFORCER + if _ENFORCER: + _ENFORCER.clear() + _ENFORCER = None + + +def set_rules(data, default_rule=None, overwrite=True): + default_rule = default_rule or cfg.CONF.policy_default_rule + if not _ENFORCER: + LOG.debug("Enforcer not present, recreating at rules stage.") + init() + + if default_rule: + _ENFORCER.default_rule = default_rule + + msg = "Loading rules %s, default: %s, overwrite: %s" + LOG.debug(msg, data, default_rule, overwrite) + + if isinstance(data, dict): + rules = dict((k, policy.parse_rule(v)) for k, v in data.items()) + rules = policy.Rules(rules, default_rule) + else: + rules = policy.Rules.load_json(data, default_rule) + + _ENFORCER.set_rules(rules, overwrite=overwrite) + + +def init(default_rule=None): + global _ENFORCER + if not _ENFORCER: + LOG.debug("Enforcer is not present, recreating.") + _ENFORCER = policy.Enforcer() + _ENFORCER.load_rules() + + +def check(rule, ctxt, target={}, do_raise=True, exc=exceptions.HTTPForbidden): + creds = ctxt.to_dict() + + try: + result = _ENFORCER.enforce(rule, target, creds, do_raise, exc) + except Exception: + result = False + raise + else: + return result + finally: + extra = {'policy': {'rule': rule, 'target': target}} + + if result: + LOG.audit("Policy check succeeded for rule '%s' on target %s", + rule, repr(target), extra=extra) + else: + LOG.audit("Policy check failed for rule '%s' on target: %s", + rule, repr(target), extra=extra) diff --git a/murano/context.py b/murano/context.py index 13e84198..cf600354 100644 --- a/murano/context.py +++ b/murano/context.py @@ -17,22 +17,28 @@ class RequestContext(object): """ Stores information about the security context under which the user accesses the system, as well as additional request information. + + TODO: (sjmc7) - extend openstack.common.context """ def __init__(self, auth_token=None, user=None, - tenant=None, session=None, is_admin=None): + tenant=None, session=None, is_admin=None, + roles=None): self.auth_token = auth_token self.user = user self.tenant = tenant self.session = session self.is_admin = is_admin + self.roles = roles or [] def to_dict(self): return { 'user': self.user, 'tenant': self.tenant, 'auth_token': self.auth_token, - 'session': self.session + 'session': self.session, + 'roles': self.roles, + 'is_admin': self.is_admin } @classmethod diff --git a/murano/openstack/common/__init__.py b/murano/openstack/common/__init__.py index 2a00f3bc..d1223eaf 100644 --- a/murano/openstack/common/__init__.py +++ b/murano/openstack/common/__init__.py @@ -1,2 +1,17 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + import six + + six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) diff --git a/murano/openstack/common/config/generator.py b/murano/openstack/common/config/generator.py index 882bcebc..08244408 100644 --- a/murano/openstack/common/config/generator.py +++ b/murano/openstack/common/config/generator.py @@ -64,6 +64,10 @@ BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), WORDWRAP_WIDTH = 60 +def raise_extension_exception(extmanager, ep, err): + raise + + def generate(argv): parser = argparse.ArgumentParser( description='generate sample configuration file', @@ -107,6 +111,7 @@ def generate(argv): 'oslo.config.opts', names=list(set(parsed_args.libraries)), invoke_on_load=False, + on_load_failure_callback=raise_extension_exception ) for ext in loader: for group, opts in ext.plugin(): @@ -218,6 +223,8 @@ def _get_my_ip(): def _sanitize_default(name, value): """Set up a reasonably sensible default for pybasedir, my_ip and host.""" + hostname = socket.gethostname() + fqdn = socket.getfqdn() if value.startswith(sys.prefix): # NOTE(jd) Don't use os.path.join, because it is likely to think the # second part is an absolute pathname and therefore drop the first @@ -229,8 +236,13 @@ def _sanitize_default(name, value): return value.replace(BASEDIR, '') elif value == _get_my_ip(): return '10.0.0.1' - elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: - return 'murano' + elif value in (hostname, fqdn): + if 'host' in name: + return 'murano' + elif value.endswith(hostname): + return value.replace(hostname, 'murano') + elif value.endswith(fqdn): + return value.replace(fqdn, 'murano') elif value.strip() != value: return '"%s"' % value return value diff --git a/murano/openstack/common/context.py b/murano/openstack/common/context.py index 09019ee3..3eeb445e 100644 --- a/murano/openstack/common/context.py +++ b/murano/openstack/common/context.py @@ -25,7 +25,7 @@ import uuid def generate_request_id(): - return 'req-%s' % str(uuid.uuid4()) + return b'req-' + str(uuid.uuid4()).encode('ascii') class RequestContext(object): diff --git a/murano/openstack/common/db/sqlalchemy/migration.py b/murano/openstack/common/db/sqlalchemy/migration.py index e98db13b..e0f7a718 100644 --- a/murano/openstack/common/db/sqlalchemy/migration.py +++ b/murano/openstack/common/db/sqlalchemy/migration.py @@ -168,7 +168,7 @@ def patch_migrate(): sqlite.SQLiteConstraintGenerator) -def db_sync(engine, abs_path, version=None, init_version=0): +def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. @@ -179,7 +179,9 @@ def db_sync(engine, abs_path, version=None, init_version=0): If None - database will update to the latest available version. :param init_version: Initial database version + :param sanity_check: Require schema sanity checking for all tables """ + if version is not None: try: version = int(version) @@ -189,7 +191,8 @@ def db_sync(engine, abs_path, version=None, init_version=0): current_version = db_version(engine, abs_path, init_version) repository = _find_migrate_repo(abs_path) - _db_schema_sanity_check(engine) + if sanity_check: + _db_schema_sanity_check(engine) if version is None or version > current_version: return versioning_api.upgrade(engine, repository, version) else: @@ -210,8 +213,15 @@ def _db_schema_sanity_check(engine): 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') - table_names = [res[0] for res in engine.execute(onlyutf8_sql, - engine.url.database)] + # NOTE(morganfainberg): exclude the sqlalchemy-migrate and alembic + # versioning tables from the tables we need to verify utf8 status on. + # Non-standard table names are not supported. + EXCLUDED_TABLES = ['migrate_version', 'alembic_version'] + + table_names = [res[0] for res in + engine.execute(onlyutf8_sql, engine.url.database) if + res[0].lower() not in EXCLUDED_TABLES] + if len(table_names) > 0: raise ValueError(_('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8' diff --git a/murano/openstack/common/db/sqlalchemy/models.py b/murano/openstack/common/db/sqlalchemy/models.py index 2b54119c..e0c07e6c 100644 --- a/murano/openstack/common/db/sqlalchemy/models.py +++ b/murano/openstack/common/db/sqlalchemy/models.py @@ -29,7 +29,7 @@ from sqlalchemy.orm import object_mapper from murano.openstack.common import timeutils -class ModelBase(object): +class ModelBase(six.Iterator): """Base class for models.""" __table_initialized__ = False @@ -70,7 +70,7 @@ class ModelBase(object): return [] def __iter__(self): - columns = dict(object_mapper(self).columns).keys() + columns = list(dict(object_mapper(self).columns).keys()) # NOTE(russellb): Allow models to specify other keys that can be looked # up, beyond the actual db columns. An example would be the 'name' # property for an Instance. @@ -78,10 +78,14 @@ class ModelBase(object): self._i = iter(columns) return self - def next(self): + # In Python 3, __next__() has replaced next(). + def __next__(self): n = six.advance_iterator(self._i) return n, getattr(self, n) + def next(self): + return self.__next__() + def update(self, values): """Make the model object behave like a dict.""" for k, v in six.iteritems(values): diff --git a/murano/openstack/common/db/sqlalchemy/provision.py b/murano/openstack/common/db/sqlalchemy/provision.py index 8284534a..78c5db25 100644 --- a/murano/openstack/common/db/sqlalchemy/provision.py +++ b/murano/openstack/common/db/sqlalchemy/provision.py @@ -16,6 +16,7 @@ """Provision test environment for specific DB backends""" import argparse +import logging import os import random import string @@ -26,23 +27,12 @@ import sqlalchemy from murano.openstack.common.db import exception as exc -SQL_CONNECTION = os.getenv('OS_TEST_DBAPI_ADMIN_CONNECTION', 'sqlite://') +LOG = logging.getLogger(__name__) -def _gen_credentials(*names): - """Generate credentials.""" - auth_dict = {} - for name in names: - val = ''.join(random.choice(string.ascii_lowercase) - for i in moves.range(10)) - auth_dict[name] = val - return auth_dict - - -def _get_engine(uri=SQL_CONNECTION): +def get_engine(uri): """Engine creation - By default the uri is SQL_CONNECTION which is admin credentials. Call the function without arguments to get admin connection. Admin connection required to create temporary user and database for each particular test. Otherwise use existing connection to recreate connection @@ -62,50 +52,43 @@ def _execute_sql(engine, sql, driver): except sqlalchemy.exc.OperationalError: msg = ('%s does not match database admin ' 'credentials or database does not exist.') - raise exc.DBConnectionError(msg % SQL_CONNECTION) + LOG.exception(msg % engine.url) + raise exc.DBConnectionError(msg % engine.url) def create_database(engine): """Provide temporary user and database for each particular test.""" driver = engine.name - auth = _gen_credentials('database', 'user', 'passwd') - - sqls = { - 'mysql': [ - "drop database if exists %(database)s;", - "grant all on %(database)s.* to '%(user)s'@'localhost'" - " identified by '%(passwd)s';", - "create database %(database)s;", - ], - 'postgresql': [ - "drop database if exists %(database)s;", - "drop user if exists %(user)s;", - "create user %(user)s with password '%(passwd)s';", - "create database %(database)s owner %(user)s;", - ] + auth = { + 'database': ''.join(random.choice(string.ascii_lowercase) + for i in moves.range(10)), + 'user': engine.url.username, + 'passwd': engine.url.password, } + sqls = [ + "drop database if exists %(database)s;", + "create database %(database)s;" + ] + if driver == 'sqlite': return 'sqlite:////tmp/%s' % auth['database'] - - try: - sql_rows = sqls[driver] - except KeyError: + elif driver in ['mysql', 'postgresql']: + sql_query = map(lambda x: x % auth, sqls) + _execute_sql(engine, sql_query, driver) + else: raise ValueError('Unsupported RDBMS %s' % driver) - sql_query = map(lambda x: x % auth, sql_rows) - - _execute_sql(engine, sql_query, driver) params = auth.copy() params['backend'] = driver return "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % params -def drop_database(engine, current_uri): +def drop_database(admin_engine, current_uri): """Drop temporary database and user after each particular test.""" - engine = _get_engine(current_uri) - admin_engine = _get_engine() + + engine = get_engine(current_uri) driver = engine.name auth = {'database': engine.url.database, 'user': engine.url.username} @@ -114,26 +97,11 @@ def drop_database(engine, current_uri): os.remove(auth['database']) except OSError: pass - return - - sqls = { - 'mysql': [ - "drop database if exists %(database)s;", - "drop user '%(user)s'@'localhost';", - ], - 'postgresql': [ - "drop database if exists %(database)s;", - "drop user if exists %(user)s;", - ] - } - - try: - sql_rows = sqls[driver] - except KeyError: + elif driver in ['mysql', 'postgresql']: + sql = "drop database if exists %(database)s;" + _execute_sql(admin_engine, [sql % auth], driver) + else: raise ValueError('Unsupported RDBMS %s' % driver) - sql_query = map(lambda x: x % auth, sql_rows) - - _execute_sql(admin_engine, sql_query, driver) def main(): @@ -172,7 +140,9 @@ def main(): args = parser.parse_args() - engine = _get_engine() + connection_string = os.getenv('OS_TEST_DBAPI_ADMIN_CONNECTION', + 'sqlite://') + engine = get_engine(connection_string) which = args.which if which == "create": diff --git a/murano/openstack/common/db/sqlalchemy/session.py b/murano/openstack/common/db/sqlalchemy/session.py index 41360b41..170fb2cf 100644 --- a/murano/openstack/common/db/sqlalchemy/session.py +++ b/murano/openstack/common/db/sqlalchemy/session.py @@ -291,7 +291,7 @@ from sqlalchemy.pool import NullPool, StaticPool from sqlalchemy.sql.expression import literal_column from murano.openstack.common.db import exception -from murano.openstack.common.gettextutils import _LE, _LW, _LI +from murano.openstack.common.gettextutils import _LE, _LW from murano.openstack.common import timeutils @@ -305,7 +305,6 @@ class SqliteForeignKeysListener(PoolListener): so the foreign key constraints will be enabled here for every database connection """ - def connect(self, dbapi_con, con_record): dbapi_con.execute('pragma foreign_keys=ON') @@ -368,7 +367,7 @@ def _raise_if_duplicate_entry_error(integrity_error, engine_name): return [columns] return columns[len(uniqbase):].split("0")[1:] - if engine_name not in ["ibm_db_sa", "mysql", "sqlite", "postgresql"]: + if engine_name not in ("ibm_db_sa", "mysql", "sqlite", "postgresql"): return # FIXME(johannes): The usage of the .message attribute has been @@ -417,7 +416,7 @@ def _raise_if_deadlock_error(operational_error, engine_name): re = _DEADLOCK_RE_DB.get(engine_name) if re is None: return - # FIXME(johannes): The usage of the .message attribute has been + # FIXME(johannes): The usage of the .message attribute has been # deprecated since Python 2.6. However, the exceptions raised by # SQLAlchemy can differ when using unicode() and accessing .message. # An audit across all three supported engines will be necessary to @@ -460,7 +459,6 @@ def _wrap_db_error(f): except Exception as e: LOG.exception(_LE('DB exception wrapped.')) raise exception.DBError(e) - return _wrap @@ -475,7 +473,6 @@ def _add_regexp_listener(dbapi_con, con_record): def regexp(expr, item): reg = re.compile(expr) return reg.search(six.text_type(item)) is not None - dbapi_con.create_function('regexp', 2, regexp) @@ -492,7 +489,7 @@ def _thread_yield(dbapi_con, con_record): def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): - """Ensures that MySQL and DB2 connections are alive. + """Ensures that MySQL, PostgreSQL or DB2 connections are alive. Borrowed from: http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f @@ -508,13 +505,20 @@ def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): if engine.dialect.is_disconnect(ex, dbapi_conn, cursor): msg = _LW('Database server has gone away: %s') % ex LOG.warning(msg) + + # if the database server has gone away, all connections in the pool + # have become invalid and we can safely close all of them here, + # rather than waste time on checking of every single connection + engine.dispose() + + # this will be handled by SQLAlchemy and will force it to create + # a new connection and retry the original action raise sqla_exc.DisconnectionError(msg) else: raise -def _set_session_sql_mode(dbapi_con, connection_rec, - connection_proxy, sql_mode=None): +def _set_session_sql_mode(dbapi_con, connection_rec, sql_mode=None): """Set the sql_mode session variable. MySQL supports several server modes. The default is None, but sessions @@ -523,30 +527,54 @@ def _set_session_sql_mode(dbapi_con, connection_rec, Note: passing in '' (empty string) for sql_mode clears the SQL mode for the session, overriding a potentially set - server default. Passing in None (the default) makes this - a no-op, meaning if a server-side SQL mode is set, it still applies. + server default. """ - cursor = dbapi_con.cursor() - if sql_mode is not None: - cursor.execute("SET SESSION sql_mode = %s", [sql_mode]) - # Check against the real effective SQL mode. Even when unset by + cursor = dbapi_con.cursor() + cursor.execute("SET SESSION sql_mode = %s", [sql_mode]) + + +def _mysql_get_effective_sql_mode(engine): + """Returns the effective SQL mode for connections from the engine pool. + + Returns ``None`` if the mode isn't available, otherwise returns the mode. + + """ + # Get the real effective SQL mode. Even when unset by # our own config, the server may still be operating in a specific - # SQL mode as set by the server configuration - cursor.execute("SHOW VARIABLES LIKE 'sql_mode'") - row = cursor.fetchone() + # SQL mode as set by the server configuration. + # Also note that the checkout listener will be called on execute to + # set the mode if it's registered. + row = engine.execute("SHOW VARIABLES LIKE 'sql_mode'").fetchone() if row is None: + return + return row[1] + + +def _mysql_check_effective_sql_mode(engine): + """Logs a message based on the effective SQL mode for MySQL connections.""" + realmode = _mysql_get_effective_sql_mode(engine) + + if realmode is None: LOG.warning(_LW('Unable to detect effective SQL mode')) return - realmode = row[1] - LOG.info(_LI('MySQL server mode set to %s') % realmode) + + LOG.debug('MySQL server mode set to %s', realmode) # 'TRADITIONAL' mode enables several other modes, so # we need a substring match here if not ('TRADITIONAL' in realmode.upper() or 'STRICT_ALL_TABLES' in realmode.upper()): LOG.warning(_LW("MySQL SQL mode is '%s', " - "consider enabling TRADITIONAL or STRICT_ALL_TABLES") - % realmode) + "consider enabling TRADITIONAL or STRICT_ALL_TABLES"), + realmode) + + +def _mysql_set_mode_callback(engine, sql_mode): + if sql_mode is not None: + mode_callback = functools.partial(_set_session_sql_mode, + sql_mode=sql_mode) + sqlalchemy.event.listen(engine, 'connect', mode_callback) + _mysql_check_effective_sql_mode(engine) def _is_db_connection_error(args): @@ -617,14 +645,12 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None, sqlalchemy.event.listen(engine, 'checkin', _thread_yield) - if engine.name in ['mysql', 'ibm_db_sa']: + if engine.name in ('ibm_db_sa', 'mysql', 'postgresql'): ping_callback = functools.partial(_ping_listener, engine) sqlalchemy.event.listen(engine, 'checkout', ping_callback) if engine.name == 'mysql': if mysql_sql_mode: - mode_callback = functools.partial(_set_session_sql_mode, - sql_mode=mysql_sql_mode) - sqlalchemy.event.listen(engine, 'checkout', mode_callback) + _mysql_set_mode_callback(engine, mysql_sql_mode) elif 'sqlite' in connection_dict.drivername: if not sqlite_synchronous: sqlalchemy.event.listen(engine, 'connect', @@ -661,7 +687,6 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None, class Query(sqlalchemy.orm.query.Query): """Subclass of sqlalchemy.query with soft_delete() method.""" - def soft_delete(self, synchronize_session='evaluate'): return self.update({'deleted': literal_column('id'), 'updated_at': literal_column('updated_at'), @@ -671,7 +696,6 @@ class Query(sqlalchemy.orm.query.Query): class Session(sqlalchemy.orm.session.Session): """Custom Session class to avoid SqlAlchemy Session monkey patching.""" - @_wrap_db_error def query(self, *args, **kwargs): return super(Session, self).query(*args, **kwargs) @@ -716,10 +740,10 @@ def _patch_mysqldb_with_stacktrace_comments(): continue if filename.endswith('exception.py') and method == '_wrap': continue - # db/api is just a wrapper around db/sqlalchemy/api + # db/api is just a wrapper around db/sqlalchemy/api if filename.endswith('db/api.py'): continue - # only trace inside murano + # only trace inside murano index = filename.rfind('murano') if index == -1: continue @@ -738,18 +762,16 @@ def _patch_mysqldb_with_stacktrace_comments(): class EngineFacade(object): - """A helper class for removing of global engine instances from - murano.db. + """A helper class for removing of global engine instances from murano.db. - As a library, murano.db can't decide where to store/when to create - engine and sessionmaker instances, so this must be left for a target - application. + As a library, murano.db can't decide where to store/when to create engine + and sessionmaker instances, so this must be left for a target application. - On the other hand, in order to simplify the adoption of murano.db - changes, we'll provide a helper class, which creates engine and - sessionmaker on its instantiation and provides get_engine()/get_session() - methods that are compatible with corresponding utility functions that - currently exist in target projects, e.g. in Nova. + On the other hand, in order to simplify the adoption of murano.db changes, + we'll provide a helper class, which creates engine and sessionmaker + on its instantiation and provides get_engine()/get_session() methods + that are compatible with corresponding utility functions that currently + exist in target projects, e.g. in Nova. engine/sessionmaker instances will still be global (and they are meant to be global), but they will be stored in the app context, rather that in the @@ -759,11 +781,12 @@ class EngineFacade(object): integrate engine/sessionmaker instances into your apps any way you like (e.g. one might want to bind a session to a request context). Two important things to remember: - 1. An Engine instance is effectively a pool of DB connections, so it's - meant to be shared (and it's thread-safe). - 2. A Session instance is not meant to be shared and represents a DB - transactional context (i.e. it's not thread-safe). sessionmaker is - a factory of sessions. + + 1. An Engine instance is effectively a pool of DB connections, so it's + meant to be shared (and it's thread-safe). + 2. A Session instance is not meant to be shared and represents a DB + transactional context (i.e. it's not thread-safe). sessionmaker is + a factory of sessions. """ diff --git a/murano/openstack/common/db/sqlalchemy/test_base.py b/murano/openstack/common/db/sqlalchemy/test_base.py index 95b97e8a..d37f3cf4 100644 --- a/murano/openstack/common/db/sqlalchemy/test_base.py +++ b/murano/openstack/common/db/sqlalchemy/test_base.py @@ -18,11 +18,12 @@ import functools import os import fixtures +from oslotest import base as test_base import six +from murano.openstack.common.db.sqlalchemy import provision from murano.openstack.common.db.sqlalchemy import session from murano.openstack.common.db.sqlalchemy import utils -from murano.openstack.common import test class DbFixture(fixtures.Fixture): @@ -42,15 +43,17 @@ class DbFixture(fixtures.Fixture): self.test = test + def cleanUp(self): + self.test.engine.dispose() + def setUp(self): super(DbFixture, self).setUp() self.test.engine = session.create_engine(self._get_uri()) self.test.sessionmaker = session.get_maker(self.test.engine) - self.addCleanup(self.test.engine.dispose) -class DbTestCase(test.BaseTestCase): +class DbTestCase(test_base.BaseTestCase): """Base class for testing of DB code. Using `DbFixture`. Intended to be the main database test case to use all @@ -102,11 +105,24 @@ class OpportunisticFixture(DbFixture): DRIVER = abc.abstractproperty(lambda: None) DBNAME = PASSWORD = USERNAME = 'openstack_citest' + def setUp(self): + self._provisioning_engine = provision.get_engine( + utils.get_connect_string(backend=self.DRIVER, + user=self.USERNAME, + passwd=self.PASSWORD, + database=self.DBNAME) + ) + self._uri = provision.create_database(self._provisioning_engine) + + super(OpportunisticFixture, self).setUp() + + def cleanUp(self): + super(OpportunisticFixture, self).cleanUp() + + provision.drop_database(self._provisioning_engine, self._uri) + def _get_uri(self): - return utils.get_connect_string(backend=self.DRIVER, - user=self.USERNAME, - passwd=self.PASSWORD, - database=self.DBNAME) + return self._uri @six.add_metaclass(abc.ABCMeta) diff --git a/murano/openstack/common/db/sqlalchemy/test_migrations.py b/murano/openstack/common/db/sqlalchemy/test_migrations.py index 8fd9a658..9a883a34 100644 --- a/murano/openstack/common/db/sqlalchemy/test_migrations.py +++ b/murano/openstack/common/db/sqlalchemy/test_migrations.py @@ -20,6 +20,7 @@ import os import subprocess import lockfile +from oslotest import base as test_base from six import moves from six.moves.urllib import parse import sqlalchemy @@ -27,7 +28,6 @@ import sqlalchemy.exc from murano.openstack.common.db.sqlalchemy import utils from murano.openstack.common.gettextutils import _LE -from murano.openstack.common import test LOG = logging.getLogger(__name__) @@ -57,7 +57,7 @@ def _set_db_lock(lock_path=None, lock_prefix=None): @functools.wraps(f) def wrapper(*args, **kwargs): try: - path = lock_path or os.environ.get("MURANOAPI_LOCK_PATH") + path = lock_path or os.environ.get("MURANO_LOCK_PATH") lock = lockfile.FileLock(os.path.join(path, lock_prefix)) with lock: LOG.debug('Got lock "%s"' % f.__name__) @@ -68,7 +68,7 @@ def _set_db_lock(lock_path=None, lock_prefix=None): return decorator -class BaseMigrationTestCase(test.BaseTestCase): +class BaseMigrationTestCase(test_base.BaseTestCase): """Base class fort testing of migration utils.""" def __init__(self, *args, **kwargs): diff --git a/murano/openstack/common/db/sqlalchemy/utils.py b/murano/openstack/common/db/sqlalchemy/utils.py index b4f31a4a..56d92f8b 100644 --- a/murano/openstack/common/db/sqlalchemy/utils.py +++ b/murano/openstack/common/db/sqlalchemy/utils.py @@ -19,7 +19,6 @@ import logging import re -from migrate.changeset import UniqueConstraint import sqlalchemy from sqlalchemy import Boolean from sqlalchemy import CheckConstraint @@ -33,7 +32,6 @@ from sqlalchemy import MetaData from sqlalchemy import or_ from sqlalchemy.sql.expression import literal_column from sqlalchemy.sql.expression import UpdateBase -from sqlalchemy.sql import select from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.types import NullType @@ -218,6 +216,9 @@ def model_query(context, model, session, args=None, project_only=False, :type read_deleted: bool Usage: + + ..code:: python + result = (utils.model_query(context, models.Instance, session=session) .filter_by(uuid=instance_uuid) .all()) @@ -226,7 +227,8 @@ def model_query(context, model, session, args=None, project_only=False, context, Node, session=session, args=(func.count(Node.id), func.sum(Node.ram)) - ).filter_by(project_id=project_id) + ).filter_by(project_id=project_id) + """ if not read_deleted: @@ -252,6 +254,14 @@ def get_table(engine, name): Needed because the models don't work for us in migrations as models will be far out of sync with the current data. + + .. warning:: + + Do not use this method when creating ForeignKeys in database migrations + because sqlalchemy needs the same MetaData object to hold information + about the parent table and the reference table in the ForeignKey. This + method uses a unique MetaData object per table object so it won't work + with ForeignKey creation. """ metadata = MetaData() metadata.bind = engine @@ -298,6 +308,10 @@ def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, **col_name_col_instance): """Drop unique constraint from table. + DEPRECATED: this function is deprecated and will be removed from murano.db + in a few releases. Please use UniqueConstraint.drop() method directly for + sqlalchemy-migrate migration scripts. + This method drops UC from table and works for mysql, postgresql and sqlite. In mysql and postgresql we are able to use "alter table" construction. Sqlalchemy doesn't support some sqlite column types and replaces their @@ -314,6 +328,8 @@ def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, types by sqlite. For example BigInteger. """ + from migrate.changeset import UniqueConstraint + meta = MetaData() meta.bind = migrate_engine t = Table(table_name, meta, autoload=True) @@ -353,9 +369,9 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name, columns_for_select = [func.max(table.c.id)] columns_for_select.extend(columns_for_group_by) - duplicated_rows_select = select(columns_for_select, - group_by=columns_for_group_by, - having=func.count(table.c.id) > 1) + duplicated_rows_select = sqlalchemy.sql.select( + columns_for_select, group_by=columns_for_group_by, + having=func.count(table.c.id) > 1) for row in migrate_engine.execute(duplicated_rows_select): # NOTE(boris-42): Do not remove row that has the biggest ID. @@ -365,7 +381,8 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name, for name in uc_column_names: delete_condition &= table.c[name] == row[name] - rows_to_delete_select = select([table.c.id]).where(delete_condition) + rows_to_delete_select = sqlalchemy.sql.select( + [table.c.id]).where(delete_condition) for row in migrate_engine.execute(rows_to_delete_select).fetchall(): LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: " "%(table)s") % dict(id=row[0], table=table_name)) @@ -476,7 +493,7 @@ def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name, else: c_select.append(table.c.deleted == table.c.id) - ins = InsertFromSelect(new_table, select(c_select)) + ins = InsertFromSelect(new_table, sqlalchemy.sql.select(c_select)) migrate_engine.execute(ins) table.drop() diff --git a/murano/openstack/common/eventlet_backdoor.py b/murano/openstack/common/eventlet_backdoor.py index 77e4ebf5..842f9d44 100644 --- a/murano/openstack/common/eventlet_backdoor.py +++ b/murano/openstack/common/eventlet_backdoor.py @@ -41,7 +41,6 @@ help_for_backdoor_port = ( "chosen port is displayed in the service's log file.") eventlet_backdoor_opts = [ cfg.StrOpt('backdoor_port', - default=None, help="Enable eventlet backdoor. %s" % help_for_backdoor_port) ] @@ -102,7 +101,8 @@ def _listen(host, start_port, end_port, listen_func): try: return listen_func((host, try_port)) except socket.error as exc: - if exc.errno != errno.EADDRINUSE or try_port >= end_port: + if (exc.errno != errno.EADDRINUSE or + try_port >= end_port): raise try_port += 1 diff --git a/murano/openstack/common/excutils.py b/murano/openstack/common/excutils.py index cc65e0c0..7d996e07 100644 --- a/murano/openstack/common/excutils.py +++ b/murano/openstack/common/excutils.py @@ -49,9 +49,22 @@ class save_and_reraise_exception(object): decide_if_need_reraise() if not should_be_reraised: ctxt.reraise = False + + If another exception occurs and reraise flag is False, + the saved exception will not be logged. + + If the caller wants to raise new exception during exception handling + he/she sets reraise to False initially with an ability to set it back to + True if needed:: + + except Exception: + with save_and_reraise_exception(reraise=False) as ctxt: + [if statements to determine whether to raise a new exception] + # Not raising a new exception, so reraise + ctxt.reraise = True """ - def __init__(self): - self.reraise = True + def __init__(self, reraise=True): + self.reraise = reraise def __enter__(self): self.type_, self.value, self.tb, = sys.exc_info() @@ -59,10 +72,11 @@ class save_and_reraise_exception(object): def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: - logging.error(_LE('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) + if self.reraise: + logging.error(_LE('Original exception being dropped: %s'), + traceback.format_exception(self.type_, + self.value, + self.tb)) return False if self.reraise: six.reraise(self.type_, self.value, self.tb) diff --git a/murano/openstack/common/fileutils.py b/murano/openstack/common/fileutils.py index c25fcf80..c128e1b5 100644 --- a/murano/openstack/common/fileutils.py +++ b/murano/openstack/common/fileutils.py @@ -105,7 +105,7 @@ def file_open(*args, **kwargs): be able to provide a stub module that doesn't alter system state at all (for unit tests) """ - return file(*args, **kwargs) + return open(*args, **kwargs) def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): diff --git a/murano/openstack/common/gettextutils.py b/murano/openstack/common/gettextutils.py index f8af7435..2a8ab1bd 100644 --- a/murano/openstack/common/gettextutils.py +++ b/murano/openstack/common/gettextutils.py @@ -28,29 +28,117 @@ import gettext import locale from logging import handlers import os -import re from babel import localedata import six -_localedir = os.environ.get('murano'.upper() + '_LOCALEDIR') -_t = gettext.translation('murano', localedir=_localedir, fallback=True) - -# We use separate translation catalogs for each log level, so set up a -# mapping between the log level name and the translator. The domain -# for the log level is project_name + "-log-" + log_level so messages -# for each level end up in their own catalog. -_t_log_levels = dict( - (level, gettext.translation('murano' + '-log-' + level, - localedir=_localedir, - fallback=True)) - for level in ['info', 'warning', 'error', 'critical'] -) - _AVAILABLE_LANGUAGES = {} + +# FIXME(dhellmann): Remove this when moving to oslo.i18n. USE_LAZY = False +class TranslatorFactory(object): + """Create translator functions + """ + + def __init__(self, domain, lazy=False, localedir=None): + """Establish a set of translation functions for the domain. + + :param domain: Name of translation domain, + specifying a message catalog. + :type domain: str + :param lazy: Delays translation until a message is emitted. + Defaults to False. + :type lazy: Boolean + :param localedir: Directory with translation catalogs. + :type localedir: str + """ + self.domain = domain + self.lazy = lazy + if localedir is None: + localedir = os.environ.get(domain.upper() + '_LOCALEDIR') + self.localedir = localedir + + def _make_translation_func(self, domain=None): + """Return a new translation function ready for use. + + Takes into account whether or not lazy translation is being + done. + + The domain can be specified to override the default from the + factory, but the localedir from the factory is always used + because we assume the log-level translation catalogs are + installed in the same directory as the main application + catalog. + + """ + if domain is None: + domain = self.domain + if self.lazy: + return functools.partial(Message, domain=domain) + t = gettext.translation( + domain, + localedir=self.localedir, + fallback=True, + ) + if six.PY3: + return t.gettext + return t.ugettext + + @property + def primary(self): + "The default translation function." + return self._make_translation_func() + + def _make_log_translation_func(self, level): + return self._make_translation_func(self.domain + '-log-' + level) + + @property + def log_info(self): + "Translate info-level log messages." + return self._make_log_translation_func('info') + + @property + def log_warning(self): + "Translate warning-level log messages." + return self._make_log_translation_func('warning') + + @property + def log_error(self): + "Translate error-level log messages." + return self._make_log_translation_func('error') + + @property + def log_critical(self): + "Translate critical-level log messages." + return self._make_log_translation_func('critical') + + +# NOTE(dhellmann): When this module moves out of the incubator into +# oslo.i18n, these global variables can be moved to an integration +# module within each application. + +# Create the global translation functions. +_translators = TranslatorFactory('murano') + +# The primary translation function using the well-known name "_" +_ = _translators.primary + +# Translators for log levels. +# +# The abbreviated names are meant to reflect the usual use of a short +# name like '_'. The "L" is for "log" and the other letter comes from +# the level. +_LI = _translators.log_info +_LW = _translators.log_warning +_LE = _translators.log_error +_LC = _translators.log_critical + +# NOTE(dhellmann): End of globals that will move to the application's +# integration module. + + def enable_lazy(): """Convenience function for configuring _() to use lazy gettext @@ -59,41 +147,18 @@ def enable_lazy(): your project is importing _ directly instead of using the gettextutils.install() way of importing the _ function. """ - global USE_LAZY + # FIXME(dhellmann): This function will be removed in oslo.i18n, + # because the TranslatorFactory makes it superfluous. + global _, _LI, _LW, _LE, _LC, USE_LAZY + tf = TranslatorFactory('murano', lazy=True) + _ = tf.primary + _LI = tf.log_info + _LW = tf.log_warning + _LE = tf.log_error + _LC = tf.log_critical USE_LAZY = True -def _(msg): - if USE_LAZY: - return Message(msg, domain='murano') - else: - if six.PY3: - return _t.gettext(msg) - return _t.ugettext(msg) - - -def _log_translation(msg, level): - """Build a single translation of a log message - """ - if USE_LAZY: - return Message(msg, domain='murano' + '-log-' + level) - else: - translator = _t_log_levels[level] - if six.PY3: - return translator.gettext(msg) - return translator.ugettext(msg) - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = functools.partial(_log_translation, level='info') -_LW = functools.partial(_log_translation, level='warning') -_LE = functools.partial(_log_translation, level='error') -_LC = functools.partial(_log_translation, level='critical') - - def install(domain, lazy=False): """Install a _() function using the given translation domain. @@ -113,26 +178,9 @@ def install(domain, lazy=False): any available locale. """ if lazy: - # NOTE(mrodden): Lazy gettext functionality. - # - # The following introduces a deferred way to do translations on - # messages in OpenStack. We override the standard _() function - # and % (format string) operation to build Message objects that can - # later be translated when we have more information. - def _lazy_gettext(msg): - """Create and return a Message object. - - Lazy gettext function for a given domain, it is a factory method - for a project/module to get a lazy gettext function for its own - translation domain (i.e. nova, glance, cinder, etc.) - - Message encapsulates a string so that we can translate - it later when needed. - """ - return Message(msg, domain=domain) - from six import moves - moves.builtins.__dict__['_'] = _lazy_gettext + tf = TranslatorFactory(domain, lazy=True) + moves.builtins.__dict__['_'] = tf.primary else: localedir = '%s_LOCALEDIR' % domain.upper() if six.PY3: @@ -248,47 +296,22 @@ class Message(six.text_type): if other is None: params = (other,) elif isinstance(other, dict): - params = self._trim_dictionary_parameters(other) + # Merge the dictionaries + # Copy each item in case one does not support deep copy. + params = {} + if isinstance(self.params, dict): + for key, val in self.params.items(): + params[key] = self._copy_param(val) + for key, val in other.items(): + params[key] = self._copy_param(val) else: params = self._copy_param(other) return params - def _trim_dictionary_parameters(self, dict_param): - """Return a dict that only has matching entries in the msgid.""" - # NOTE(luisg): Here we trim down the dictionary passed as parameters - # to avoid carrying a lot of unnecessary weight around in the message - # object, for example if someone passes in Message() % locals() but - # only some params are used, and additionally we prevent errors for - # non-deepcopyable objects by unicoding() them. - - # Look for %(param) keys in msgid; - # Skip %% and deal with the case where % is first character on the line - keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid) - - # If we don't find any %(param) keys but have a %s - if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid): - # Apparently the full dictionary is the parameter - params = self._copy_param(dict_param) - else: - params = {} - # Save our existing parameters as defaults to protect - # ourselves from losing values if we are called through an - # (erroneous) chain that builds a valid Message with - # arguments, and then does something like "msg % kwds" - # where kwds is an empty dictionary. - src = {} - if isinstance(self.params, dict): - src.update(self.params) - src.update(dict_param) - for key in keys: - params[key] = self._copy_param(src[key]) - - return params - def _copy_param(self, param): try: return copy.deepcopy(param) - except TypeError: + except Exception: # Fallback to casting to unicode this will handle the # python code-like objects that can't be deep-copied return six.text_type(param) @@ -300,13 +323,14 @@ class Message(six.text_type): def __radd__(self, other): return self.__add__(other) - def __str__(self): - # NOTE(luisg): Logging in python 2.6 tries to str() log records, - # and it expects specifically a UnicodeError in order to proceed. - msg = _('Message objects do not support str() because they may ' - 'contain non-ascii characters. ' - 'Please use unicode() or translate() instead.') - raise UnicodeError(msg) + if six.PY2: + def __str__(self): + # NOTE(luisg): Logging in python 2.6 tries to str() log records, + # and it expects specifically a UnicodeError in order to proceed. + msg = _('Message objects do not support str() because they may ' + 'contain non-ascii characters. ' + 'Please use unicode() or translate() instead.') + raise UnicodeError(msg) def get_available_languages(domain): diff --git a/murano/openstack/common/importutils.py b/murano/openstack/common/importutils.py index 66048d77..adc77946 100644 --- a/murano/openstack/common/importutils.py +++ b/murano/openstack/common/importutils.py @@ -24,10 +24,10 @@ import traceback def import_class(import_str): """Returns a class from a string including module and class.""" mod_str, _sep, class_str = import_str.rpartition('.') + __import__(mod_str) try: - __import__(mod_str) return getattr(sys.modules[mod_str], class_str) - except (ValueError, AttributeError): + except AttributeError: raise ImportError('Class %s cannot be found (%s)' % (class_str, traceback.format_exception(*sys.exc_info()))) diff --git a/murano/openstack/common/jsonutils.py b/murano/openstack/common/jsonutils.py index 1f0e4695..80fb8252 100644 --- a/murano/openstack/common/jsonutils.py +++ b/murano/openstack/common/jsonutils.py @@ -31,17 +31,29 @@ This module provides a few things: ''' +import codecs import datetime import functools import inspect import itertools -import json +import sys + +if sys.version_info < (2, 7): + # On Python <= 2.6, json module is not C boosted, so try to use + # simplejson module if available + try: + import simplejson as json + except ImportError: + import json +else: + import json import six import six.moves.xmlrpc_client as xmlrpclib from murano.openstack.common import gettextutils from murano.openstack.common import importutils +from murano.openstack.common import strutils from murano.openstack.common import timeutils netaddr = importutils.try_import("netaddr") @@ -156,12 +168,12 @@ def dumps(value, default=to_primitive, **kwargs): return json.dumps(value, default=default, **kwargs) -def loads(s): - return json.loads(s) +def loads(s, encoding='utf-8'): + return json.loads(strutils.safe_decode(s, encoding)) -def load(s): - return json.load(s) +def load(fp, encoding='utf-8'): + return json.load(codecs.getreader(encoding)(fp)) try: diff --git a/murano/openstack/common/lockutils.py b/murano/openstack/common/lockutils.py index b363a197..4b4b78bd 100644 --- a/murano/openstack/common/lockutils.py +++ b/murano/openstack/common/lockutils.py @@ -38,10 +38,10 @@ LOG = logging.getLogger(__name__) util_opts = [ cfg.BoolOpt('disable_process_locking', default=False, - help='Whether to disable inter-process locks'), + help='Enables or disables inter-process locks.'), cfg.StrOpt('lock_path', - default=os.environ.get("MURANOAPI_LOCK_PATH"), - help=('Directory to use for lock files.')) + default=os.environ.get("MURANO_LOCK_PATH"), + help='Directory to use for lock files.') ] @@ -276,7 +276,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None): :param external: The external keyword argument denotes whether this lock should work across multiple processes. This means that if two different - workers both run a a method decorated with @synchronized('mylock', + workers both run a method decorated with @synchronized('mylock', external=True), only one of them will execute at a time. """ int_lock = internal_lock(name) @@ -287,6 +287,7 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None): yield ext_lock else: yield int_lock + LOG.debug('Released semaphore "%(lock)s"', {'lock': name}) def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): @@ -365,7 +366,7 @@ def main(argv): """ lock_dir = tempfile.mkdtemp() - os.environ["MURANOAPI_LOCK_PATH"] = lock_dir + os.environ["MURANO_LOCK_PATH"] = lock_dir try: ret_val = subprocess.call(argv[1:]) finally: diff --git a/murano/openstack/common/log.py b/murano/openstack/common/log.py index 41f4c4be..f2a39a3a 100644 --- a/murano/openstack/common/log.py +++ b/murano/openstack/common/log.py @@ -59,7 +59,10 @@ _SANITIZE_PATTERNS = [] _FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', r'(<%(key)s>).*?()', r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] + r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])', + r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])' + '.*?([\'"])', + r'(%(key)s\s*--?[A-z]+\s*).*?([\s])'] for key in _SANITIZE_KEYS: for pattern in _FORMAT_PATTERNS: @@ -84,14 +87,11 @@ logging_cli_opts = [ cfg.StrOpt('log-config-append', metavar='PATH', deprecated_name='log-config', - help='The name of logging configuration file. It does not ' - 'disable existing loggers, but just appends specified ' - 'logging configuration to any other existing logging ' - 'options. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), + help='The name of a logging configuration file. This file ' + 'is appended to any existing logging configuration ' + 'files. For details about logging configuration files, ' + 'see the Python logging module documentation.'), cfg.StrOpt('log-format', - default=None, metavar='FORMAT', help='DEPRECATED. ' 'A logging.Formatter log message format string which may ' @@ -103,7 +103,7 @@ logging_cli_opts = [ default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), + 'Default: %(default)s .'), cfg.StrOpt('log-file', metavar='PATH', deprecated_name='logfile', @@ -112,30 +112,30 @@ logging_cli_opts = [ cfg.StrOpt('log-dir', deprecated_name='logdir', help='(Optional) The base directory used for relative ' - '--log-file paths'), + '--log-file paths.'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging. ' 'Existing syslog format is DEPRECATED during I, ' - 'and then will be changed in J to honor RFC5424'), + 'and will chang in J to honor RFC5424.'), cfg.BoolOpt('use-syslog-rfc-format', # TODO(bogdando) remove or use True after existing # syslog format deprecation in J default=False, - help='(Optional) Use syslog rfc5424 format for logging. ' - 'If enabled, will add APP-NAME (RFC5424) before the ' - 'MSG part of the syslog message. The old format ' - 'without APP-NAME is deprecated in I, ' + help='(Optional) Enables or disables syslog rfc5424 format ' + 'for logging. If enabled, prefixes the MSG part of the ' + 'syslog message with APP-NAME (RFC5424). The ' + 'format without the APP-NAME is deprecated in I, ' 'and will be removed in J.'), cfg.StrOpt('syslog-log-facility', default='LOG_USER', - help='Syslog facility to receive log lines') + help='Syslog facility to receive log lines.') ] generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, - help='Log output to standard error') + help='Log output to standard error.') ] log_opts = [ @@ -143,18 +143,18 @@ log_opts = [ default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [%(request_id)s %(user_identity)s] ' '%(instance)s%(message)s', - help='Format string to use for log messages with context'), + help='Format string to use for log messages with context.'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [-] %(instance)s%(message)s', - help='Format string to use for log messages without context'), + help='Format string to use for log messages without context.'), cfg.StrOpt('logging_debug_format_suffix', default='%(funcName)s %(pathname)s:%(lineno)d', - help='Data to append to log format when level is DEBUG'), + help='Data to append to log format when level is DEBUG.'), cfg.StrOpt('logging_exception_prefix', default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' '%(instance)s', - help='Prefix each line of exception output with this format'), + help='Prefix each line of exception output with this format.'), cfg.ListOpt('default_log_levels', default=[ 'amqp=WARN', @@ -163,32 +163,29 @@ log_opts = [ 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', - 'keystone=INFO', - 'eventlet.wsgi.server=WARN', - 'qpid.messaging=INFO', - 'keystoneclient=INFO', + 'oslo.messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN' ], - help='List of logger=LEVEL pairs'), + help='List of logger=LEVEL pairs.'), cfg.BoolOpt('publish_errors', default=False, - help='Publish error events'), + help='Enables or disables publication of error events.'), cfg.BoolOpt('fatal_deprecations', default=False, - help='Make deprecations fatal'), + help='Enables or disables fatal status of deprecations.'), # NOTE(mikal): there are two options here because sometimes we are handed # a full instance (and could include more information), and other times we # are just handed a UUID for the instance. cfg.StrOpt('instance_format', default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), + help='The format for an instance that is passed with the log ' + 'message. '), cfg.StrOpt('instance_uuid_format', default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), + help='The format for an instance UUID that is passed with the ' + 'log message. '), ] CONF = cfg.CONF @@ -454,7 +451,7 @@ def _load_log_config(log_config_append): logging.config.fileConfig(log_config_append, disable_existing_loggers=False) except moves.configparser.Error as exc: - raise LogConfigError(log_config_append, str(exc)) + raise LogConfigError(log_config_append, six.text_type(exc)) def setup(product_name, version='unknown'): @@ -574,9 +571,15 @@ def _setup_logging_from_conf(project, version): for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) logger = logging.getLogger(mod) - logger.setLevel(level) + # NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name + # to integer code. + if sys.version_info < (2, 7): + level = logging.getLevelName(level_name) + logger.setLevel(level) + else: + logger.setLevel(level_name) + _loggers = {} @@ -660,7 +663,7 @@ class ContextFormatter(logging.Formatter): # NOTE(sdague): default the fancier formatting params # to an empty string so we don't throw an exception if # they get used - for key in ('instance', 'color'): + for key in ('instance', 'color', 'user_identity'): if key not in record.__dict__: record.__dict__[key] = '' diff --git a/murano/openstack/common/loopingcall.py b/murano/openstack/common/loopingcall.py index 8e862e1f..c462504c 100644 --- a/murano/openstack/common/loopingcall.py +++ b/murano/openstack/common/loopingcall.py @@ -28,19 +28,19 @@ LOG = logging.getLogger(__name__) class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCall. + """Exception to break out and stop a LoopingCallBase. - The poll-function passed to LoopingCall can raise this exception to + The poll-function passed to LoopingCallBase can raise this exception to break out of the loop normally. This is somewhat analogous to StopIteration. An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCall.wait() + this return-value will be returned by LoopingCallBase.wait() """ def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCall.wait() should return.""" + """:param retvalue: Value that LoopingCallBase.wait() should return.""" self.retvalue = retvalue @@ -98,11 +98,6 @@ class FixedIntervalLoopingCall(LoopingCallBase): return self.done -# TODO(mikal): this class name is deprecated in Havana and should be removed -# in the I release -LoopingCall = FixedIntervalLoopingCall - - class DynamicLoopingCall(LoopingCallBase): """A looping call which sleeps until the next known event. diff --git a/murano/openstack/common/policy.py b/murano/openstack/common/policy.py new file mode 100644 index 00000000..fe34c470 --- /dev/null +++ b/murano/openstack/common/policy.py @@ -0,0 +1,898 @@ +# Copyright (c) 2012 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Common Policy Engine Implementation + +Policies can be expressed in one of two forms: A list of lists, or a +string written in the new policy language. + +In the list-of-lists representation, each check inside the innermost +list is combined as with an "and" conjunction--for that check to pass, +all the specified checks must pass. These innermost lists are then +combined as with an "or" conjunction. This is the original way of +expressing policies, but there now exists a new way: the policy +language. + +In the policy language, each check is specified the same way as in the +list-of-lists representation: a simple "a:b" pair that is matched to +the correct code to perform that check. However, conjunction +operators are available, allowing for more expressiveness in crafting +policies. + +As an example, take the following rule, expressed in the list-of-lists +representation:: + + [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] + +In the policy language, this becomes:: + + role:admin or (project_id:%(project_id)s and role:projectadmin) + +The policy language also has the "not" operator, allowing a richer +policy rule:: + + project_id:%(project_id)s and not role:dunce + +It is possible to perform policy checks on the following user +attributes (obtained through the token): user_id, domain_id or +project_id:: + + domain_id: + +Attributes sent along with API calls can be used by the policy engine +(on the right side of the expression), by using the following syntax:: + + :user.id + +Contextual attributes of objects identified by their IDs are loaded +from the database. They are also available to the policy engine and +can be checked through the `target` keyword:: + + :target.role.name + +All these attributes (related to users, API calls, and context) can be +checked against each other or against constants, be it literals (True, +) or strings. + +Finally, two special policy checks should be mentioned; the policy +check "@" will always accept an access, and the policy check "!" will +always reject an access. (Note that if a rule is either the empty +list ("[]") or the empty string, this is equivalent to the "@" policy +check.) Of these, the "!" policy check is probably the most useful, +as it allows particular rules to be explicitly disabled. +""" + +import abc +import ast +import re + +from oslo.config import cfg +import six +import six.moves.urllib.parse as urlparse +import six.moves.urllib.request as urlrequest + +from murano.openstack.common import fileutils +from murano.openstack.common.gettextutils import _, _LE +from murano.openstack.common import jsonutils +from murano.openstack.common import log as logging + + +policy_opts = [ + cfg.StrOpt('policy_file', + default='policy.json', + help=_('The JSON file that defines policies.')), + cfg.StrOpt('policy_default_rule', + default='default', + help=_('Default rule. Enforced when a requested rule is not ' + 'found.')), +] + +CONF = cfg.CONF +CONF.register_opts(policy_opts) + +LOG = logging.getLogger(__name__) + +_checks = {} + + +class PolicyNotAuthorized(Exception): + + def __init__(self, rule): + msg = _("Policy doesn't allow %s to be performed.") % rule + super(PolicyNotAuthorized, self).__init__(msg) + + +class Rules(dict): + """A store for rules. Handles the default_rule setting directly.""" + + @classmethod + def load_json(cls, data, default_rule=None): + """Allow loading of JSON rule data.""" + + # Suck in the JSON data and parse the rules + rules = dict((k, parse_rule(v)) for k, v in + jsonutils.loads(data).items()) + + return cls(rules, default_rule) + + def __init__(self, rules=None, default_rule=None): + """Initialize the Rules store.""" + + super(Rules, self).__init__(rules or {}) + self.default_rule = default_rule + + def __missing__(self, key): + """Implements the default rule handling.""" + + if isinstance(self.default_rule, dict): + raise KeyError(key) + + # If the default rule isn't actually defined, do something + # reasonably intelligent + if not self.default_rule: + raise KeyError(key) + + if isinstance(self.default_rule, BaseCheck): + return self.default_rule + + # We need to check this or we can get infinite recursion + if self.default_rule not in self: + raise KeyError(key) + + elif isinstance(self.default_rule, six.string_types): + return self[self.default_rule] + + def __str__(self): + """Dumps a string representation of the rules.""" + + # Start by building the canonical strings for the rules + out_rules = {} + for key, value in self.items(): + # Use empty string for singleton TrueCheck instances + if isinstance(value, TrueCheck): + out_rules[key] = '' + else: + out_rules[key] = str(value) + + # Dump a pretty-printed JSON representation + return jsonutils.dumps(out_rules, indent=4) + + +class Enforcer(object): + """Responsible for loading and enforcing rules. + + :param policy_file: Custom policy file to use, if none is + specified, `CONF.policy_file` will be + used. + :param rules: Default dictionary / Rules to use. It will be + considered just in the first instantiation. If + `load_rules(True)`, `clear()` or `set_rules(True)` + is called this will be overwritten. + :param default_rule: Default rule to use, CONF.default_rule will + be used if none is specified. + :param use_conf: Whether to load rules from cache or config file. + """ + + def __init__(self, policy_file=None, rules=None, + default_rule=None, use_conf=True): + self.rules = Rules(rules, default_rule) + self.default_rule = default_rule or CONF.policy_default_rule + + self.policy_path = None + self.policy_file = policy_file or CONF.policy_file + self.use_conf = use_conf + + def set_rules(self, rules, overwrite=True, use_conf=False): + """Create a new Rules object based on the provided dict of rules. + + :param rules: New rules to use. It should be an instance of dict. + :param overwrite: Whether to overwrite current rules or update them + with the new rules. + :param use_conf: Whether to reload rules from cache or config file. + """ + + if not isinstance(rules, dict): + raise TypeError(_("Rules must be an instance of dict or Rules, " + "got %s instead") % type(rules)) + self.use_conf = use_conf + if overwrite: + self.rules = Rules(rules, self.default_rule) + else: + self.rules.update(rules) + + def clear(self): + """Clears Enforcer rules, policy's cache and policy's path.""" + self.set_rules({}) + self.default_rule = None + self.policy_path = None + + def load_rules(self, force_reload=False): + """Loads policy_path's rules. + + Policy file is cached and will be reloaded if modified. + + :param force_reload: Whether to overwrite current rules. + """ + + if force_reload: + self.use_conf = force_reload + + if self.use_conf: + if not self.policy_path: + self.policy_path = self._get_policy_path() + + reloaded, data = fileutils.read_cached_file( + self.policy_path, force_reload=force_reload) + if reloaded or not self.rules: + rules = Rules.load_json(data, self.default_rule) + self.set_rules(rules) + LOG.debug("Rules successfully reloaded") + + def _get_policy_path(self): + """Locate the policy json data file. + + :param policy_file: Custom policy file to locate. + + :returns: The policy path + + :raises: ConfigFilesNotFoundError if the file couldn't + be located. + """ + policy_file = CONF.find_file(self.policy_file) + + if policy_file: + return policy_file + + raise cfg.ConfigFilesNotFoundError((self.policy_file,)) + + def enforce(self, rule, target, creds, do_raise=False, + exc=None, *args, **kwargs): + """Checks authorization of a rule against the target and credentials. + + :param rule: A string or BaseCheck instance specifying the rule + to evaluate. + :param target: As much information about the object being operated + on as possible, as a dictionary. + :param creds: As much information about the user performing the + action as possible, as a dictionary. + :param do_raise: Whether to raise an exception or not if check + fails. + :param exc: Class of the exception to raise if the check fails. + Any remaining arguments passed to check() (both + positional and keyword arguments) will be passed to + the exception class. If not specified, PolicyNotAuthorized + will be used. + + :return: Returns False if the policy does not allow the action and + exc is not provided; otherwise, returns a value that + evaluates to True. Note: for rules using the "case" + expression, this True value will be the specified string + from the expression. + """ + + # NOTE(flaper87): Not logging target or creds to avoid + # potential security issues. + LOG.debug("Rule %s will be now enforced" % rule) + + self.load_rules() + + # Allow the rule to be a Check tree + if isinstance(rule, BaseCheck): + result = rule(target, creds, self) + elif not self.rules: + # No rules to reference means we're going to fail closed + result = False + else: + try: + # Evaluate the rule + result = self.rules[rule](target, creds, self) + except KeyError: + LOG.debug("Rule [%s] doesn't exist" % rule) + # If the rule doesn't exist, fail closed + result = False + + # If it is False, raise the exception if requested + if do_raise and not result: + if exc: + raise exc(*args, **kwargs) + + raise PolicyNotAuthorized(rule) + + return result + + +@six.add_metaclass(abc.ABCMeta) +class BaseCheck(object): + """Abstract base class for Check classes.""" + + @abc.abstractmethod + def __str__(self): + """String representation of the Check tree rooted at this node.""" + + pass + + @abc.abstractmethod + def __call__(self, target, cred, enforcer): + """Triggers if instance of the class is called. + + Performs the check. Returns False to reject the access or a + true value (not necessary True) to accept the access. + """ + + pass + + +class FalseCheck(BaseCheck): + """A policy check that always returns False (disallow).""" + + def __str__(self): + """Return a string representation of this check.""" + + return "!" + + def __call__(self, target, cred, enforcer): + """Check the policy.""" + + return False + + +class TrueCheck(BaseCheck): + """A policy check that always returns True (allow).""" + + def __str__(self): + """Return a string representation of this check.""" + + return "@" + + def __call__(self, target, cred, enforcer): + """Check the policy.""" + + return True + + +class Check(BaseCheck): + """A base class to allow for user-defined policy checks.""" + + def __init__(self, kind, match): + """Initiates Check instance. + + :param kind: The kind of the check, i.e., the field before the + ':'. + :param match: The match of the check, i.e., the field after + the ':'. + """ + + self.kind = kind + self.match = match + + def __str__(self): + """Return a string representation of this check.""" + + return "%s:%s" % (self.kind, self.match) + + +class NotCheck(BaseCheck): + """Implements the "not" logical operator. + + A policy check that inverts the result of another policy check. + """ + + def __init__(self, rule): + """Initialize the 'not' check. + + :param rule: The rule to negate. Must be a Check. + """ + + self.rule = rule + + def __str__(self): + """Return a string representation of this check.""" + + return "not %s" % self.rule + + def __call__(self, target, cred, enforcer): + """Check the policy. + + Returns the logical inverse of the wrapped check. + """ + + return not self.rule(target, cred, enforcer) + + +class AndCheck(BaseCheck): + """Implements the "and" logical operator. + + A policy check that requires that a list of other checks all return True. + """ + + def __init__(self, rules): + """Initialize the 'and' check. + + :param rules: A list of rules that will be tested. + """ + + self.rules = rules + + def __str__(self): + """Return a string representation of this check.""" + + return "(%s)" % ' and '.join(str(r) for r in self.rules) + + def __call__(self, target, cred, enforcer): + """Check the policy. + + Requires that all rules accept in order to return True. + """ + + for rule in self.rules: + if not rule(target, cred, enforcer): + return False + + return True + + def add_check(self, rule): + """Adds rule to be tested. + + Allows addition of another rule to the list of rules that will + be tested. Returns the AndCheck object for convenience. + """ + + self.rules.append(rule) + return self + + +class OrCheck(BaseCheck): + """Implements the "or" operator. + + A policy check that requires that at least one of a list of other + checks returns True. + """ + + def __init__(self, rules): + """Initialize the 'or' check. + + :param rules: A list of rules that will be tested. + """ + + self.rules = rules + + def __str__(self): + """Return a string representation of this check.""" + + return "(%s)" % ' or '.join(str(r) for r in self.rules) + + def __call__(self, target, cred, enforcer): + """Check the policy. + + Requires that at least one rule accept in order to return True. + """ + + for rule in self.rules: + if rule(target, cred, enforcer): + return True + return False + + def add_check(self, rule): + """Adds rule to be tested. + + Allows addition of another rule to the list of rules that will + be tested. Returns the OrCheck object for convenience. + """ + + self.rules.append(rule) + return self + + +def _parse_check(rule): + """Parse a single base check rule into an appropriate Check object.""" + + # Handle the special checks + if rule == '!': + return FalseCheck() + elif rule == '@': + return TrueCheck() + + try: + kind, match = rule.split(':', 1) + except Exception: + LOG.exception(_LE("Failed to understand rule %s") % rule) + # If the rule is invalid, we'll fail closed + return FalseCheck() + + # Find what implements the check + if kind in _checks: + return _checks[kind](kind, match) + elif None in _checks: + return _checks[None](kind, match) + else: + LOG.error(_LE("No handler for matches of kind %s") % kind) + return FalseCheck() + + +def _parse_list_rule(rule): + """Translates the old list-of-lists syntax into a tree of Check objects. + + Provided for backwards compatibility. + """ + + # Empty rule defaults to True + if not rule: + return TrueCheck() + + # Outer list is joined by "or"; inner list by "and" + or_list = [] + for inner_rule in rule: + # Elide empty inner lists + if not inner_rule: + continue + + # Handle bare strings + if isinstance(inner_rule, six.string_types): + inner_rule = [inner_rule] + + # Parse the inner rules into Check objects + and_list = [_parse_check(r) for r in inner_rule] + + # Append the appropriate check to the or_list + if len(and_list) == 1: + or_list.append(and_list[0]) + else: + or_list.append(AndCheck(and_list)) + + # If we have only one check, omit the "or" + if not or_list: + return FalseCheck() + elif len(or_list) == 1: + return or_list[0] + + return OrCheck(or_list) + + +# Used for tokenizing the policy language +_tokenize_re = re.compile(r'\s+') + + +def _parse_tokenize(rule): + """Tokenizer for the policy language. + + Most of the single-character tokens are specified in the + _tokenize_re; however, parentheses need to be handled specially, + because they can appear inside a check string. Thankfully, those + parentheses that appear inside a check string can never occur at + the very beginning or end ("%(variable)s" is the correct syntax). + """ + + for tok in _tokenize_re.split(rule): + # Skip empty tokens + if not tok or tok.isspace(): + continue + + # Handle leading parens on the token + clean = tok.lstrip('(') + for i in range(len(tok) - len(clean)): + yield '(', '(' + + # If it was only parentheses, continue + if not clean: + continue + else: + tok = clean + + # Handle trailing parens on the token + clean = tok.rstrip(')') + trail = len(tok) - len(clean) + + # Yield the cleaned token + lowered = clean.lower() + if lowered in ('and', 'or', 'not'): + # Special tokens + yield lowered, clean + elif clean: + # Not a special token, but not composed solely of ')' + if len(tok) >= 2 and ((tok[0], tok[-1]) in + [('"', '"'), ("'", "'")]): + # It's a quoted string + yield 'string', tok[1:-1] + else: + yield 'check', _parse_check(clean) + + # Yield the trailing parens + for i in range(trail): + yield ')', ')' + + +class ParseStateMeta(type): + """Metaclass for the ParseState class. + + Facilitates identifying reduction methods. + """ + + def __new__(mcs, name, bases, cls_dict): + """Create the class. + + Injects the 'reducers' list, a list of tuples matching token sequences + to the names of the corresponding reduction methods. + """ + + reducers = [] + + for key, value in cls_dict.items(): + if not hasattr(value, 'reducers'): + continue + for reduction in value.reducers: + reducers.append((reduction, key)) + + cls_dict['reducers'] = reducers + + return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) + + +def reducer(*tokens): + """Decorator for reduction methods. + + Arguments are a sequence of tokens, in order, which should trigger running + this reduction method. + """ + + def decorator(func): + # Make sure we have a list of reducer sequences + if not hasattr(func, 'reducers'): + func.reducers = [] + + # Add the tokens to the list of reducer sequences + func.reducers.append(list(tokens)) + + return func + + return decorator + + +@six.add_metaclass(ParseStateMeta) +class ParseState(object): + """Implement the core of parsing the policy language. + + Uses a greedy reduction algorithm to reduce a sequence of tokens into + a single terminal, the value of which will be the root of the Check tree. + + Note: error reporting is rather lacking. The best we can get with + this parser formulation is an overall "parse failed" error. + Fortunately, the policy language is simple enough that this + shouldn't be that big a problem. + """ + + def __init__(self): + """Initialize the ParseState.""" + + self.tokens = [] + self.values = [] + + def reduce(self): + """Perform a greedy reduction of the token stream. + + If a reducer method matches, it will be executed, then the + reduce() method will be called recursively to search for any more + possible reductions. + """ + + for reduction, methname in self.reducers: + if (len(self.tokens) >= len(reduction) and + self.tokens[-len(reduction):] == reduction): + # Get the reduction method + meth = getattr(self, methname) + + # Reduce the token stream + results = meth(*self.values[-len(reduction):]) + + # Update the tokens and values + self.tokens[-len(reduction):] = [r[0] for r in results] + self.values[-len(reduction):] = [r[1] for r in results] + + # Check for any more reductions + return self.reduce() + + def shift(self, tok, value): + """Adds one more token to the state. Calls reduce().""" + + self.tokens.append(tok) + self.values.append(value) + + # Do a greedy reduce... + self.reduce() + + @property + def result(self): + """Obtain the final result of the parse. + + Raises ValueError if the parse failed to reduce to a single result. + """ + + if len(self.values) != 1: + raise ValueError("Could not parse rule") + return self.values[0] + + @reducer('(', 'check', ')') + @reducer('(', 'and_expr', ')') + @reducer('(', 'or_expr', ')') + def _wrap_check(self, _p1, check, _p2): + """Turn parenthesized expressions into a 'check' token.""" + + return [('check', check)] + + @reducer('check', 'and', 'check') + def _make_and_expr(self, check1, _and, check2): + """Create an 'and_expr'. + + Join two checks by the 'and' operator. + """ + + return [('and_expr', AndCheck([check1, check2]))] + + @reducer('and_expr', 'and', 'check') + def _extend_and_expr(self, and_expr, _and, check): + """Extend an 'and_expr' by adding one more check.""" + + return [('and_expr', and_expr.add_check(check))] + + @reducer('check', 'or', 'check') + def _make_or_expr(self, check1, _or, check2): + """Create an 'or_expr'. + + Join two checks by the 'or' operator. + """ + + return [('or_expr', OrCheck([check1, check2]))] + + @reducer('or_expr', 'or', 'check') + def _extend_or_expr(self, or_expr, _or, check): + """Extend an 'or_expr' by adding one more check.""" + + return [('or_expr', or_expr.add_check(check))] + + @reducer('not', 'check') + def _make_not_expr(self, _not, check): + """Invert the result of another check.""" + + return [('check', NotCheck(check))] + + +def _parse_text_rule(rule): + """Parses policy to the tree. + + Translates a policy written in the policy language into a tree of + Check objects. + """ + + # Empty rule means always accept + if not rule: + return TrueCheck() + + # Parse the token stream + state = ParseState() + for tok, value in _parse_tokenize(rule): + state.shift(tok, value) + + try: + return state.result + except ValueError: + # Couldn't parse the rule + LOG.exception(_LE("Failed to understand rule %r") % rule) + + # Fail closed + return FalseCheck() + + +def parse_rule(rule): + """Parses a policy rule into a tree of Check objects.""" + + # If the rule is a string, it's in the policy language + if isinstance(rule, six.string_types): + return _parse_text_rule(rule) + return _parse_list_rule(rule) + + +def register(name, func=None): + """Register a function or Check class as a policy check. + + :param name: Gives the name of the check type, e.g., 'rule', + 'role', etc. If name is None, a default check type + will be registered. + :param func: If given, provides the function or class to register. + If not given, returns a function taking one argument + to specify the function or class to register, + allowing use as a decorator. + """ + + # Perform the actual decoration by registering the function or + # class. Returns the function or class for compliance with the + # decorator interface. + def decorator(func): + _checks[name] = func + return func + + # If the function or class is given, do the registration + if func: + return decorator(func) + + return decorator + + +@register("rule") +class RuleCheck(Check): + def __call__(self, target, creds, enforcer): + """Recursively checks credentials based on the defined rules.""" + + try: + return enforcer.rules[self.match](target, creds, enforcer) + except KeyError: + # We don't have any matching rule; fail closed + return False + + +@register("role") +class RoleCheck(Check): + def __call__(self, target, creds, enforcer): + """Check that there is a matching role in the cred dict.""" + + return self.match.lower() in [x.lower() for x in creds['roles']] + + +@register('http') +class HttpCheck(Check): + def __call__(self, target, creds, enforcer): + """Check http: rules by calling to a remote server. + + This example implementation simply verifies that the response + is exactly 'True'. + """ + + url = ('http:' + self.match) % target + data = {'target': jsonutils.dumps(target), + 'credentials': jsonutils.dumps(creds)} + post_data = urlparse.urlencode(data) + f = urlrequest.urlopen(url, post_data) + return f.read() == "True" + + +@register(None) +class GenericCheck(Check): + def __call__(self, target, creds, enforcer): + """Check an individual match. + + Matches look like: + + tenant:%(tenant_id)s + role:compute:admin + True:%(user.enabled)s + 'Member':%(role.name)s + """ + + # TODO(termie): do dict inspection via dot syntax + try: + match = self.match % target + except KeyError: + # While doing GenericCheck if key not + # present in Target return false + return False + + try: + # Try to interpret self.kind as a literal + leftval = ast.literal_eval(self.kind) + except ValueError: + try: + leftval = creds[self.kind] + except KeyError: + return False + return match == six.text_type(leftval) diff --git a/murano/openstack/common/service.py b/murano/openstack/common/service.py index f4e80061..36b97815 100644 --- a/murano/openstack/common/service.py +++ b/murano/openstack/common/service.py @@ -190,6 +190,7 @@ class ServiceLauncher(Launcher): return status, signo def wait(self, ready_callback=None): + systemd.notify_once() while True: self.handle_signal() status, signo = self._wait_for_exit_or_signal(ready_callback) @@ -267,7 +268,7 @@ class ProcessLauncher(object): launcher.wait() except SignalExit as exc: signame = _signo_to_signame(exc.signo) - LOG.info(_LI('Caught %s, exiting'), signame) + LOG.info(_LI('Child caught %s, exiting'), signame) status = exc.code signo = exc.signo except SystemExit as exc: @@ -382,6 +383,7 @@ class ProcessLauncher(object): def wait(self): """Loop waiting on children to die and respawning as necessary.""" + systemd.notify_once() LOG.debug('Full set of CONF:') CONF.log_opt_values(LOG, std_logging.DEBUG) @@ -488,7 +490,6 @@ class Services(object): """ service.start() - systemd.notify_once() done.wait() diff --git a/murano/openstack/common/sslutils.py b/murano/openstack/common/sslutils.py index 022eb4ff..57862251 100644 --- a/murano/openstack/common/sslutils.py +++ b/murano/openstack/common/sslutils.py @@ -22,15 +22,12 @@ from murano.openstack.common.gettextutils import _ ssl_opts = [ cfg.StrOpt('ca_file', - default=None, help="CA certificate file to use to verify " "connecting clients."), cfg.StrOpt('cert_file', - default=None, help="Certificate file to use when starting " "the server securely."), cfg.StrOpt('key_file', - default=None, help="Private key file to use when starting " "the server securely."), ] diff --git a/murano/openstack/common/strutils.py b/murano/openstack/common/strutils.py new file mode 100644 index 00000000..8a90f7fd --- /dev/null +++ b/murano/openstack/common/strutils.py @@ -0,0 +1,239 @@ +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +System-level utilities and helper functions. +""" + +import math +import re +import sys +import unicodedata + +import six + +from murano.openstack.common.gettextutils import _ + + +UNIT_PREFIX_EXPONENT = { + 'k': 1, + 'K': 1, + 'Ki': 1, + 'M': 2, + 'Mi': 2, + 'G': 3, + 'Gi': 3, + 'T': 4, + 'Ti': 4, +} +UNIT_SYSTEM_INFO = { + 'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')), + 'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')), +} + +TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes') +FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no') + +SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]") +SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+") + + +def int_from_bool_as_string(subject): + """Interpret a string as a boolean and return either 1 or 0. + + Any string value in: + + ('True', 'true', 'On', 'on', '1') + + is interpreted as a boolean True. + + Useful for JSON-decoded stuff and config file parsing + """ + return bool_from_string(subject) and 1 or 0 + + +def bool_from_string(subject, strict=False, default=False): + """Interpret a string as a boolean. + + A case-insensitive match is performed such that strings matching 't', + 'true', 'on', 'y', 'yes', or '1' are considered True and, when + `strict=False`, anything else returns the value specified by 'default'. + + Useful for JSON-decoded stuff and config file parsing. + + If `strict=True`, unrecognized values, including None, will raise a + ValueError which is useful when parsing values passed in from an API call. + Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. + """ + if not isinstance(subject, six.string_types): + subject = six.text_type(subject) + + lowered = subject.strip().lower() + + if lowered in TRUE_STRINGS: + return True + elif lowered in FALSE_STRINGS: + return False + elif strict: + acceptable = ', '.join( + "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS)) + msg = _("Unrecognized value '%(val)s', acceptable values are:" + " %(acceptable)s") % {'val': subject, + 'acceptable': acceptable} + raise ValueError(msg) + else: + return default + + +def safe_decode(text, incoming=None, errors='strict'): + """Decodes incoming text/bytes string using `incoming` if they're not + already unicode. + + :param incoming: Text's current encoding + :param errors: Errors handling policy. See here for valid + values http://docs.python.org/2/library/codecs.html + :returns: text or a unicode `incoming` encoded + representation of it. + :raises TypeError: If text is not an instance of str + """ + if not isinstance(text, (six.string_types, six.binary_type)): + raise TypeError("%s can't be decoded" % type(text)) + + if isinstance(text, six.text_type): + return text + + if not incoming: + incoming = (sys.stdin.encoding or + sys.getdefaultencoding()) + + try: + return text.decode(incoming, errors) + except UnicodeDecodeError: + # Note(flaper87) If we get here, it means that + # sys.stdin.encoding / sys.getdefaultencoding + # didn't return a suitable encoding to decode + # text. This happens mostly when global LANG + # var is not set correctly and there's no + # default encoding. In this case, most likely + # python will use ASCII or ANSI encoders as + # default encodings but they won't be capable + # of decoding non-ASCII characters. + # + # Also, UTF-8 is being used since it's an ASCII + # extension. + return text.decode('utf-8', errors) + + +def safe_encode(text, incoming=None, + encoding='utf-8', errors='strict'): + """Encodes incoming text/bytes string using `encoding`. + + If incoming is not specified, text is expected to be encoded with + current python's default encoding. (`sys.getdefaultencoding`) + + :param incoming: Text's current encoding + :param encoding: Expected encoding for text (Default UTF-8) + :param errors: Errors handling policy. See here for valid + values http://docs.python.org/2/library/codecs.html + :returns: text or a bytestring `encoding` encoded + representation of it. + :raises TypeError: If text is not an instance of str + """ + if not isinstance(text, (six.string_types, six.binary_type)): + raise TypeError("%s can't be encoded" % type(text)) + + if not incoming: + incoming = (sys.stdin.encoding or + sys.getdefaultencoding()) + + if isinstance(text, six.text_type): + return text.encode(encoding, errors) + elif text and encoding != incoming: + # Decode text before encoding it with `encoding` + text = safe_decode(text, incoming, errors) + return text.encode(encoding, errors) + else: + return text + + +def string_to_bytes(text, unit_system='IEC', return_int=False): + """Converts a string into an float representation of bytes. + + The units supported for IEC :: + + Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it) + KB, KiB, MB, MiB, GB, GiB, TB, TiB + + The units supported for SI :: + + kb(it), Mb(it), Gb(it), Tb(it) + kB, MB, GB, TB + + Note that the SI unit system does not support capital letter 'K' + + :param text: String input for bytes size conversion. + :param unit_system: Unit system for byte size conversion. + :param return_int: If True, returns integer representation of text + in bytes. (default: decimal) + :returns: Numerical representation of text in bytes. + :raises ValueError: If text has an invalid value. + + """ + try: + base, reg_ex = UNIT_SYSTEM_INFO[unit_system] + except KeyError: + msg = _('Invalid unit system: "%s"') % unit_system + raise ValueError(msg) + match = reg_ex.match(text) + if match: + magnitude = float(match.group(1)) + unit_prefix = match.group(2) + if match.group(3) in ['b', 'bit']: + magnitude /= 8 + else: + msg = _('Invalid string format: %s') % text + raise ValueError(msg) + if not unit_prefix: + res = magnitude + else: + res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix]) + if return_int: + return int(math.ceil(res)) + return res + + +def to_slug(value, incoming=None, errors="strict"): + """Normalize string. + + Convert to lowercase, remove non-word characters, and convert spaces + to hyphens. + + Inspired by Django's `slugify` filter. + + :param value: Text to slugify + :param incoming: Text's current encoding + :param errors: Errors handling policy. See here for valid + values http://docs.python.org/2/library/codecs.html + :returns: slugified unicode representation of `value` + :raises TypeError: If text is not an instance of str + """ + value = safe_decode(value, incoming, errors) + # NOTE(aababilov): no need to use safe_(encode|decode) here: + # encodings are always "ascii", error handling is always "ignore" + # and types are always known (first: unicode; second: str) + value = unicodedata.normalize("NFKD", value).encode( + "ascii", "ignore").decode("ascii") + value = SLUGIFY_STRIP_RE.sub("", value).strip().lower() + return SLUGIFY_HYPHENATE_RE.sub("-", value) diff --git a/murano/openstack/common/systemd.py b/murano/openstack/common/systemd.py index ad124a42..9a2c6609 100644 --- a/murano/openstack/common/systemd.py +++ b/murano/openstack/common/systemd.py @@ -50,14 +50,16 @@ def _sd_notify(unset_env, msg): def notify(): """Send notification to Systemd that service is ready. + For details see - http://www.freedesktop.org/software/systemd/man/sd_notify.html + http://www.freedesktop.org/software/systemd/man/sd_notify.html """ _sd_notify(False, 'READY=1') def notify_once(): """Send notification once to Systemd that service is ready. + Systemd sets NOTIFY_SOCKET environment variable with the name of the socket listening for notifications from services. This method removes the NOTIFY_SOCKET environment variable to ensure @@ -75,7 +77,7 @@ def onready(notify_socket, timeout): :type timeout: float :returns: 0 service ready 1 service not ready - 2 timeout occured + 2 timeout occurred """ sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) sock.settimeout(timeout) diff --git a/murano/openstack/common/threadgroup.py b/murano/openstack/common/threadgroup.py index 2cff1e3c..60d591fd 100644 --- a/murano/openstack/common/threadgroup.py +++ b/murano/openstack/common/threadgroup.py @@ -85,7 +85,7 @@ class ThreadGroup(object): def thread_done(self, thread): self.threads.remove(thread) - def stop(self): + def _stop_threads(self): current = threading.current_thread() # Iterate over a copy of self.threads so thread_done doesn't @@ -99,6 +99,7 @@ class ThreadGroup(object): except Exception as ex: LOG.exception(ex) + def stop_timers(self): for x in self.timers: try: x.stop() @@ -106,6 +107,23 @@ class ThreadGroup(object): LOG.exception(ex) self.timers = [] + def stop(self, graceful=False): + """stop function has the option of graceful=True/False. + + * In case of graceful=True, wait for all threads to be finished. + Never kill threads. + * In case of graceful=False, kill threads immediately. + """ + self.stop_timers() + if graceful: + # In case of graceful=True, wait for all threads to be + # finished, never kill threads + self.wait() + else: + # In case of graceful=False(Default), kill threads + # immediately + self._stop_threads() + def wait(self): for x in self.timers: try: diff --git a/murano/tests/api/base.py b/murano/tests/api/base.py index 3982dd2b..81029a6f 100644 --- a/murano/tests/api/base.py +++ b/murano/tests/api/base.py @@ -108,7 +108,7 @@ class ControllerTest(object): #cfg.CONF.set_default('host', 'server.test') self.api_version = '1.0' self.tenant = 'test_tenant' - self.mock_enforce = None + self.mock_policy_check = None request_statistics.init_stats() @@ -161,6 +161,30 @@ class ControllerTest(object): def _put(self, path, data, content_type='application/json'): return self._data_request(path, data, content_type, method='PUT') + def _mock_policy_setup(self, mocker, action, allowed=True, + target=None, expected_request_count=1): + if self.mock_policy_check is not None: + # Test existing policy check record + self._check_policy() + self.mock_policy_check.reset_mock() + + self.mock_policy_check = mocker + self.policy_action = action + self.mock_policy_check.return_value = allowed + self.policy_target = target + self.expected_request_count = expected_request_count + + def _check_policy(self): + """Assert policy checks called as expected""" + if self.mock_policy_check: + # Check that policy enforcement got called as expected + self.mock_policy_check.assert_called_with( + self.policy_action, + self.context, + self.policy_target or {}) + self.assertEqual(self.expected_request_count, + len(self.mock_policy_check.call_args_list)) + def tearDown(self): - # TODO(sjmc7): Add policy check once it's implemented + self._check_policy() super(ControllerTest, self).tearDown() diff --git a/murano/tests/api/v1/test_environments.py b/murano/tests/api/v1/test_environments.py index 18bd005d..b1a34391 100644 --- a/murano/tests/api/v1/test_environments.py +++ b/murano/tests/api/v1/test_environments.py @@ -14,15 +14,18 @@ # limitations under the License. import json +import mock from webob import exc from murano.api.v1 import environments +from murano.common import policy from murano.db import models from murano.openstack.common import timeutils import murano.tests.api.base as test_base import murano.tests.utils as test_utils +@mock.patch.object(policy, 'check') class TestEnvironmentApi(test_base.ControllerTest, test_base.MuranoTestCase): RPC_IMPORT = 'murano.db.services.environments.rpc' @@ -30,14 +33,18 @@ class TestEnvironmentApi(test_base.ControllerTest, test_base.MuranoTestCase): super(TestEnvironmentApi, self).setUp() self.controller = environments.Controller() - def test_list_empty_environments(self): + def test_list_empty_environments(self, mock_policy_check): """Check that with no environments an empty list is returned""" + self._mock_policy_setup(mock_policy_check, 'list_environments') + req = self._get('/environments') result = self.controller.index(req) self.assertEqual({'environments': []}, result) - def test_create_environment(self): + def test_create_environment(self, mock_policy_check): """Create an environment, test environment.show()""" + self._mock_policy_setup(mock_policy_check, 'create_environment') + fake_now = timeutils.utcnow() timeutils.utcnow.override_time = fake_now @@ -59,6 +66,9 @@ class TestEnvironmentApi(test_base.ControllerTest, test_base.MuranoTestCase): expected['status'] = 'ready' + # Reset the policy expectation + self._mock_policy_setup(mock_policy_check, 'list_environments') + req = self._get('/environments') result = self.controller.index(req) @@ -66,20 +76,30 @@ class TestEnvironmentApi(test_base.ControllerTest, test_base.MuranoTestCase): expected['services'] = [] + # Reset the policy expectation + self._mock_policy_setup(mock_policy_check, 'show_environment', + target={'environment_id': uuids[-1]}) + req = self._get('/environments/%s' % uuids[-1]) result = self.controller.show(req, uuids[-1]) self.assertEqual(expected, result) self.assertEqual(3, mock_uuid.call_count) - def test_missing_environment(self): + def test_missing_environment(self, mock_policy_check): """Check that a missing environment results in an HTTPNotFound""" + self._mock_policy_setup(mock_policy_check, 'show_environment', + target={'environment_id': 'no-such-id'}) + req = self._get('/environments/no-such-id') self.assertRaises(exc.HTTPNotFound, self.controller.show, req, 'no-such-id') - def test_update_environment(self): + def test_update_environment(self, mock_policy_check): """Check that environment rename works""" + self._mock_policy_setup(mock_policy_check, 'update_environment', + target={'environment_id': '12345'}) + fake_now = timeutils.utcnow() timeutils.utcnow.override_time = fake_now @@ -116,13 +136,18 @@ class TestEnvironmentApi(test_base.ControllerTest, test_base.MuranoTestCase): req = self._post('/environments/12345', json.dumps(body)) result = self.controller.update(req, '12345', body) - req = self._get('/environments/%s' % '12345') + self._mock_policy_setup(mock_policy_check, 'show_environment', + target={'environment_id': '12345'}) + req = self._get('/environments/12345') result = self.controller.show(req, '12345') self.assertEqual(expected, result) - def test_delete_environment(self): + def test_delete_environment(self, mock_policy_check): """Test that environment deletion results in the correct rpc call""" + self._mock_policy_setup(mock_policy_check, 'delete_environment', + target={'environment_id': '12345'}) + fake_now = timeutils.utcnow() expected = dict( id='12345', diff --git a/tools/config/check_uptodate.sh b/tools/config/check_uptodate.sh index 70e4f151..bcda89fd 100755 --- a/tools/config/check_uptodate.sh +++ b/tools/config/check_uptodate.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash PROJECT_NAME=${PROJECT_NAME:-murano} -CFGFILE_NAME=murano.conf.sample +CFGFILE_NAME=${PROJECT_NAME}.conf.sample if [ -e etc/${PROJECT_NAME}/${CFGFILE_NAME} ]; then CFGFILE=etc/${PROJECT_NAME}/${CFGFILE_NAME} @@ -15,7 +15,7 @@ fi TEMPDIR=`mktemp -d /tmp/${PROJECT_NAME}.XXXXXX` trap "rm -rf $TEMPDIR" EXIT -tools/config/generate_sample.sh -b ./ -p murano -o ${TEMPDIR} +tools/config/generate_sample.sh -b ./ -p ${PROJECT_NAME} -o ${TEMPDIR} if ! diff -u ${TEMPDIR}/${CFGFILE_NAME} ${CFGFILE} then diff --git a/tools/config/generate_sample.sh b/tools/config/generate_sample.sh index 52cf52e0..4348edff 100755 --- a/tools/config/generate_sample.sh +++ b/tools/config/generate_sample.sh @@ -1,5 +1,15 @@ #!/usr/bin/env bash +# Generate sample configuration for your project. +# +# Aside from the command line flags, it also respects a config file which +# should be named oslo.config.generator.rc and be placed in the same directory. +# +# You can then export the following variables: +# MURANO_CONFIG_GENERATOR_EXTRA_MODULES: list of modules to interrogate for options. +# MURANO_CONFIG_GENERATOR_EXTRA_LIBRARIES: list of libraries to discover. +# MURANO_CONFIG_GENERATOR_EXCLUDED_FILES: list of files to remove from automatic listing. + print_hint() { echo "Try \`${0##*/} --help' for more information." >&2 } @@ -65,7 +75,7 @@ then BASEDIR=$(cd "$BASEDIR" && pwd) fi -PACKAGENAME=${PACKAGENAME:-${BASEDIR##*/}} +PACKAGENAME=${PACKAGENAME:-$(python setup.py --name)} TARGETDIR=$BASEDIR/$PACKAGENAME if ! [ -d $TARGETDIR ] then @@ -95,11 +105,15 @@ then source "$RC_FILE" fi -for mod in ${MURANOAPI_CONFIG_GENERATOR_EXTRA_MODULES}; do +for filename in ${MURANO_CONFIG_GENERATOR_EXCLUDED_FILES}; do + FILES="${FILES[@]/$filename/}" +done + +for mod in ${MURANO_CONFIG_GENERATOR_EXTRA_MODULES}; do MODULES="$MODULES -m $mod" done -for lib in ${MURANOAPI_CONFIG_GENERATOR_EXTRA_LIBRARIES}; do +for lib in ${MURANO_CONFIG_GENERATOR_EXTRA_LIBRARIES}; do LIBRARIES="$LIBRARIES -l $lib" done diff --git a/tools/install_venv_common.py b/tools/install_venv_common.py index 46822e32..e279159a 100644 --- a/tools/install_venv_common.py +++ b/tools/install_venv_common.py @@ -125,7 +125,7 @@ class InstallVenv(object): parser.add_option('-n', '--no-site-packages', action='store_true', help="Do not inherit packages from global Python " - "install") + "install.") return parser.parse_args(argv[1:])[0]