Update glance.openstack.common.policy and cleanup
1. Sync glance.openstack.common.policy up to latest version from oslo-inc. 2. Clean useless modules which depended by policy module and pinned gettextutils module there. For latter one, we are going to use glance.i18n instead. * jsonutils * strutils docImpact Closes-bug: #1288178 Closes-bug: #1387973 Partial-bug: #1381870 Change-Id: I84511ab1ee600e618985448dfbfbdc26cb130370 Signed-off-by: Zhi Yan Liu <zhiyanl@cn.ibm.com>
This commit is contained in:
parent
f82574fa68
commit
bdc4951d29
@ -289,6 +289,21 @@ scrubber_datadir = /var/lib/glance/scrubber
|
||||
# Base directory that the Image Cache uses
|
||||
image_cache_dir = /var/lib/glance/image-cache/
|
||||
|
||||
# =============== Policy Options ==================================
|
||||
|
||||
# The JSON file that defines policies.
|
||||
#policy_file = policy.json
|
||||
|
||||
# Default rule. Enforced when a requested rule is not found.
|
||||
#policy_default_rule = default
|
||||
|
||||
# Directories where policy configuration files are stored.
|
||||
# They can be relative to any directory in the search path
|
||||
# defined by the config_dir option, or absolute paths.
|
||||
# The file defined by policy_file must exist for these
|
||||
# directories to be searched.
|
||||
#policy_dirs = policy.d
|
||||
|
||||
# =============== Database Options =================================
|
||||
|
||||
[database]
|
||||
|
@ -198,3 +198,18 @@ s3_store_create_bucket_on_put = False
|
||||
# -- if used -- Swift or S3 credentials
|
||||
# Should be set to a random string of length 16, 24 or 32 bytes
|
||||
# metadata_encryption_key = <16, 24 or 32 char registry metadata key>
|
||||
|
||||
# =============== Policy Options ==============================
|
||||
|
||||
# The JSON file that defines policies.
|
||||
#policy_file = policy.json
|
||||
|
||||
# Default rule. Enforced when a requested rule is not found.
|
||||
#policy_default_rule = default
|
||||
|
||||
# Directories where policy configuration files are stored.
|
||||
# They can be relative to any directory in the search path
|
||||
# defined by the config_dir option, or absolute paths.
|
||||
# The file defined by policy_file must exist for these
|
||||
# directories to be searched.
|
||||
#policy_dirs = policy.d
|
||||
|
@ -122,6 +122,21 @@ qpid_protocol = tcp
|
||||
qpid_tcp_nodelay = True
|
||||
|
||||
|
||||
# =============== Policy Options ==============================
|
||||
|
||||
# The JSON file that defines policies.
|
||||
#policy_file = policy.json
|
||||
|
||||
# Default rule. Enforced when a requested rule is not found.
|
||||
#policy_default_rule = default
|
||||
|
||||
# Directories where policy configuration files are stored.
|
||||
# They can be relative to any directory in the search path
|
||||
# defined by the config_dir option, or absolute paths.
|
||||
# The file defined by policy_file must exist for these
|
||||
# directories to be searched.
|
||||
#policy_dirs = policy.d
|
||||
|
||||
# ================= Database Options ==========================
|
||||
|
||||
[database]
|
||||
|
@ -59,6 +59,21 @@ registry_port = 9191
|
||||
# Should be set to a random string of length 16, 24 or 32 bytes
|
||||
#metadata_encryption_key = <16, 24 or 32 char registry metadata key>
|
||||
|
||||
# =============== Policy Options ==============================
|
||||
|
||||
# The JSON file that defines policies.
|
||||
#policy_file = policy.json
|
||||
|
||||
# Default rule. Enforced when a requested rule is not found.
|
||||
#policy_default_rule = default
|
||||
|
||||
# Directories where policy configuration files are stored.
|
||||
# They can be relative to any directory in the search path
|
||||
# defined by the config_dir option, or absolute paths.
|
||||
# The file defined by policy_file must exist for these
|
||||
# directories to be searched.
|
||||
#policy_dirs = policy.d
|
||||
|
||||
# ================= Database Options ===============+==========
|
||||
|
||||
[database]
|
||||
|
124
glance/api/policy.py
Normal file → Executable file
124
glance/api/policy.py
Normal file → Executable file
@ -17,10 +17,8 @@
|
||||
"""Policy Engine For Glance"""
|
||||
|
||||
import copy
|
||||
import os.path
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo.serialization import jsonutils
|
||||
|
||||
from glance.common import exception
|
||||
import glance.domain.proxy
|
||||
@ -28,21 +26,9 @@ from glance import i18n
|
||||
import glance.openstack.common.log as logging
|
||||
from glance.openstack.common import policy
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_ = i18n._
|
||||
_LI = i18n._LI
|
||||
_LW = i18n._LW
|
||||
|
||||
policy_opts = [
|
||||
cfg.StrOpt('policy_file', default='policy.json',
|
||||
help=_('The location of the policy file.')),
|
||||
cfg.StrOpt('policy_default_rule', default='default',
|
||||
help=_('The default policy to use.')),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(policy_opts)
|
||||
|
||||
|
||||
DEFAULT_RULES = {
|
||||
'context_is_admin': policy.RoleCheck('role', 'admin'),
|
||||
@ -50,89 +36,24 @@ DEFAULT_RULES = {
|
||||
'manage_image_cache': policy.RoleCheck('role', 'admin'),
|
||||
}
|
||||
|
||||
_ = i18n._
|
||||
_LI = i18n._LI
|
||||
_LW = i18n._LW
|
||||
|
||||
class Enforcer(object):
|
||||
|
||||
class Enforcer(policy.Enforcer):
|
||||
"""Responsible for loading and enforcing rules"""
|
||||
|
||||
def __init__(self):
|
||||
self.default_rule = CONF.policy_default_rule
|
||||
self.policy_path = self._find_policy_file()
|
||||
self.policy_file_mtime = None
|
||||
self.policy_file_contents = None
|
||||
self.load_rules()
|
||||
|
||||
def set_rules(self, rules):
|
||||
"""Create a new Rules object based on the provided dict of rules"""
|
||||
rules_obj = policy.Rules(rules, self.default_rule)
|
||||
policy.set_rules(rules_obj)
|
||||
if CONF.find_file(CONF.policy_file):
|
||||
kwargs = dict(rules=None, use_conf=True)
|
||||
else:
|
||||
kwargs = dict(rules=DEFAULT_RULES, use_conf=False)
|
||||
super(Enforcer, self).__init__(overwrite=False, **kwargs)
|
||||
|
||||
def add_rules(self, rules):
|
||||
"""Add new rules to the Rules object"""
|
||||
if policy._rules:
|
||||
rules_obj = policy.Rules(rules)
|
||||
policy._rules.update(rules_obj)
|
||||
else:
|
||||
self.set_rules(rules)
|
||||
|
||||
def load_rules(self):
|
||||
"""Set the rules found in the json file on disk"""
|
||||
if self.policy_path:
|
||||
rules = self._read_policy_file()
|
||||
rule_type = ""
|
||||
else:
|
||||
rules = DEFAULT_RULES
|
||||
rule_type = "default "
|
||||
|
||||
text_rules = dict((k, str(v)) for k, v in rules.items())
|
||||
msg = ('Loaded %(rule_type)spolicy rules: %(text_rules)s' %
|
||||
{'rule_type': rule_type, 'text_rules': text_rules})
|
||||
LOG.debug(msg)
|
||||
|
||||
self.set_rules(rules)
|
||||
|
||||
@staticmethod
|
||||
def _find_policy_file():
|
||||
"""Locate the policy json data file"""
|
||||
policy_file = CONF.find_file(CONF.policy_file)
|
||||
if policy_file:
|
||||
return policy_file
|
||||
else:
|
||||
LOG.warn(_LW('Unable to find policy file'))
|
||||
return None
|
||||
|
||||
def _read_policy_file(self):
|
||||
"""Read contents of the policy file
|
||||
|
||||
This re-caches policy data if the file has been changed.
|
||||
"""
|
||||
mtime = os.path.getmtime(self.policy_path)
|
||||
if not self.policy_file_contents or mtime != self.policy_file_mtime:
|
||||
LOG.info(_LI("Loading policy from %s") % self.policy_path)
|
||||
with open(self.policy_path) as fap:
|
||||
raw_contents = fap.read()
|
||||
rules_dict = jsonutils.loads(raw_contents)
|
||||
self.policy_file_contents = dict(
|
||||
(k, policy.parse_rule(v))
|
||||
for k, v in rules_dict.items())
|
||||
self.policy_file_mtime = mtime
|
||||
return self.policy_file_contents
|
||||
|
||||
def _check(self, context, rule, target, *args, **kwargs):
|
||||
"""Verifies that the action is valid on the target in this context.
|
||||
|
||||
:param context: Glance request context
|
||||
:param rule: String representing the action to be checked
|
||||
:param object: Dictionary representing the object of the action.
|
||||
:raises: `glance.common.exception.Forbidden`
|
||||
:returns: A non-False value if access is allowed.
|
||||
"""
|
||||
credentials = {
|
||||
'roles': context.roles,
|
||||
'user': context.user,
|
||||
'tenant': context.tenant,
|
||||
}
|
||||
|
||||
return policy.check(rule, target, credentials, *args, **kwargs)
|
||||
self.set_rules(rules, overwrite=False, use_conf=self.use_conf)
|
||||
|
||||
def enforce(self, context, action, target):
|
||||
"""Verifies that the action is valid on the target in this context.
|
||||
@ -143,8 +64,15 @@ class Enforcer(object):
|
||||
:raises: `glance.common.exception.Forbidden`
|
||||
:returns: A non-False value if access is allowed.
|
||||
"""
|
||||
return self._check(context, action, target,
|
||||
exception.Forbidden, action=action)
|
||||
credentials = {
|
||||
'roles': context.roles,
|
||||
'user': context.user,
|
||||
'tenant': context.tenant,
|
||||
}
|
||||
return super(Enforcer, self).enforce(action, target, credentials,
|
||||
do_raise=True,
|
||||
exc=exception.Forbidden,
|
||||
action=action)
|
||||
|
||||
def check(self, context, action, target):
|
||||
"""Verifies that the action is valid on the target in this context.
|
||||
@ -154,7 +82,12 @@ class Enforcer(object):
|
||||
:param target: Dictionary representing the object of the action.
|
||||
:returns: A non-False value if access is allowed.
|
||||
"""
|
||||
return self._check(context, action, target)
|
||||
credentials = {
|
||||
'roles': context.roles,
|
||||
'user': context.user,
|
||||
'tenant': context.tenant,
|
||||
}
|
||||
return super(Enforcer, self).enforce(action, target, credentials)
|
||||
|
||||
def check_is_admin(self, context):
|
||||
"""Check if the given context is associated with an admin role,
|
||||
@ -163,8 +96,7 @@ class Enforcer(object):
|
||||
:param context: Glance request context
|
||||
:returns: A non-False value if context role is admin.
|
||||
"""
|
||||
target = context.to_dict()
|
||||
return self.check(context, 'context_is_admin', target)
|
||||
return self.check(context, 'context_is_admin', context.to_dict())
|
||||
|
||||
|
||||
class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
@ -1,186 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
'''
|
||||
JSON related utilities.
|
||||
|
||||
This module provides a few things:
|
||||
|
||||
1) A handy function for getting an object down to something that can be
|
||||
JSON serialized. See to_primitive().
|
||||
|
||||
2) Wrappers around loads() and dumps(). The dumps() wrapper will
|
||||
automatically use to_primitive() for you if needed.
|
||||
|
||||
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
|
||||
is available.
|
||||
'''
|
||||
|
||||
|
||||
import codecs
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
|
||||
if sys.version_info < (2, 7):
|
||||
# On Python <= 2.6, json module is not C boosted, so try to use
|
||||
# simplejson module if available
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
else:
|
||||
import json
|
||||
|
||||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
from glance.openstack.common import gettextutils
|
||||
from glance.openstack.common import importutils
|
||||
from glance.openstack.common import strutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
netaddr = importutils.try_import("netaddr")
|
||||
|
||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
||||
inspect.isfunction, inspect.isgeneratorfunction,
|
||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
||||
inspect.isabstract]
|
||||
|
||||
_simple_types = (six.string_types + six.integer_types
|
||||
+ (type(None), bool, float))
|
||||
|
||||
|
||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
||||
level=0, max_depth=3):
|
||||
"""Convert a complex object into primitives.
|
||||
|
||||
Handy for JSON serialization. We can optionally handle instances,
|
||||
but since this is a recursive function, we could have cyclical
|
||||
data structures.
|
||||
|
||||
To handle cyclical data structures we could track the actual objects
|
||||
visited in a set, but not all objects are hashable. Instead we just
|
||||
track the depth of the object inspections and don't go too deep.
|
||||
|
||||
Therefore, convert_instances=True is lossy ... be aware.
|
||||
|
||||
"""
|
||||
# handle obvious types first - order of basic types determined by running
|
||||
# full tests on nova project, resulting in the following counts:
|
||||
# 572754 <type 'NoneType'>
|
||||
# 460353 <type 'int'>
|
||||
# 379632 <type 'unicode'>
|
||||
# 274610 <type 'str'>
|
||||
# 199918 <type 'dict'>
|
||||
# 114200 <type 'datetime.datetime'>
|
||||
# 51817 <type 'bool'>
|
||||
# 26164 <type 'list'>
|
||||
# 6491 <type 'float'>
|
||||
# 283 <type 'tuple'>
|
||||
# 19 <type 'long'>
|
||||
if isinstance(value, _simple_types):
|
||||
return value
|
||||
|
||||
if isinstance(value, datetime.datetime):
|
||||
if convert_datetime:
|
||||
return timeutils.strtime(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
||||
# and results in infinite loop when list(value) is called.
|
||||
if type(value) == itertools.count:
|
||||
return six.text_type(value)
|
||||
|
||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
||||
# tests that raise an exception in a mocked method that
|
||||
# has a @wrap_exception with a notifier will fail. If
|
||||
# we up the dependency to 0.5.4 (when it is released) we
|
||||
# can remove this workaround.
|
||||
if getattr(value, '__module__', None) == 'mox':
|
||||
return 'mock'
|
||||
|
||||
if level > max_depth:
|
||||
return '?'
|
||||
|
||||
# The try block may not be necessary after the class check above,
|
||||
# but just in case ...
|
||||
try:
|
||||
recursive = functools.partial(to_primitive,
|
||||
convert_instances=convert_instances,
|
||||
convert_datetime=convert_datetime,
|
||||
level=level,
|
||||
max_depth=max_depth)
|
||||
if isinstance(value, dict):
|
||||
return dict((k, recursive(v)) for k, v in six.iteritems(value))
|
||||
elif isinstance(value, (list, tuple)):
|
||||
return [recursive(lv) for lv in value]
|
||||
|
||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
||||
# for our purposes, make it a datetime type which is explicitly
|
||||
# handled
|
||||
if isinstance(value, xmlrpclib.DateTime):
|
||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
||||
|
||||
if convert_datetime and isinstance(value, datetime.datetime):
|
||||
return timeutils.strtime(value)
|
||||
elif isinstance(value, gettextutils.Message):
|
||||
return value.data
|
||||
elif hasattr(value, 'iteritems'):
|
||||
return recursive(dict(value.iteritems()), level=level + 1)
|
||||
elif hasattr(value, '__iter__'):
|
||||
return recursive(list(value))
|
||||
elif convert_instances and hasattr(value, '__dict__'):
|
||||
# Likely an instance of something. Watch for cycles.
|
||||
# Ignore class member vars.
|
||||
return recursive(value.__dict__, level=level + 1)
|
||||
elif netaddr and isinstance(value, netaddr.IPAddress):
|
||||
return six.text_type(value)
|
||||
else:
|
||||
if any(test(value) for test in _nasty_type_tests):
|
||||
return six.text_type(value)
|
||||
return value
|
||||
except TypeError:
|
||||
# Class objects are tricky since they may define something like
|
||||
# __iter__ defined but it isn't callable as list().
|
||||
return six.text_type(value)
|
||||
|
||||
|
||||
def dumps(value, default=to_primitive, **kwargs):
|
||||
return json.dumps(value, default=default, **kwargs)
|
||||
|
||||
|
||||
def loads(s, encoding='utf-8', **kwargs):
|
||||
return json.loads(strutils.safe_decode(s, encoding), **kwargs)
|
||||
|
||||
|
||||
def load(fp, encoding='utf-8', **kwargs):
|
||||
return json.load(codecs.getreader(encoding)(fp), **kwargs)
|
||||
|
||||
|
||||
try:
|
||||
import anyjson
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
||||
'loads', ValueError, 'load'))
|
||||
anyjson.force_implementation(__name__)
|
@ -1,5 +1,3 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
@ -48,6 +46,27 @@ policy rule::
|
||||
|
||||
project_id:%(project_id)s and not role:dunce
|
||||
|
||||
It is possible to perform policy checks on the following user
|
||||
attributes (obtained through the token): user_id, domain_id or
|
||||
project_id::
|
||||
|
||||
domain_id:<some_value>
|
||||
|
||||
Attributes sent along with API calls can be used by the policy engine
|
||||
(on the right side of the expression), by using the following syntax::
|
||||
|
||||
<some_value>:user.id
|
||||
|
||||
Contextual attributes of objects identified by their IDs are loaded
|
||||
from the database. They are also available to the policy engine and
|
||||
can be checked through the `target` keyword::
|
||||
|
||||
<some_value>:target.role.name
|
||||
|
||||
All these attributes (related to users, API calls, and context) can be
|
||||
checked against each other or against constants, be it literals (True,
|
||||
<a_number>) or strings.
|
||||
|
||||
Finally, two special policy checks should be mentioned; the policy
|
||||
check "@" will always accept an access, and the policy check "!" will
|
||||
always reject an access. (Note that if a rule is either the empty
|
||||
@ -58,34 +77,65 @@ as it allows particular rules to be explicitly disabled.
|
||||
|
||||
import abc
|
||||
import ast
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import urllib
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo.serialization import jsonutils
|
||||
import six
|
||||
import urllib2
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import six.moves.urllib.request as urlrequest
|
||||
|
||||
from glance.openstack.common.gettextutils import _
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import fileutils
|
||||
from glance.openstack.common._i18n import _, _LE, _LW
|
||||
from glance.openstack.common import log as logging
|
||||
|
||||
|
||||
policy_opts = [
|
||||
cfg.StrOpt('policy_file',
|
||||
default='policy.json',
|
||||
help=_('The JSON file that defines policies.')),
|
||||
cfg.StrOpt('policy_default_rule',
|
||||
default='default',
|
||||
help=_('Default rule. Enforced when a requested rule is not '
|
||||
'found.')),
|
||||
cfg.MultiStrOpt('policy_dirs',
|
||||
default=['policy.d'],
|
||||
help=_('Directories where policy configuration files are '
|
||||
'stored. They can be relative to any directory '
|
||||
'in the search path defined by the config_dir '
|
||||
'option, or absolute paths. The file defined by '
|
||||
'policy_file must exist for these directories to '
|
||||
'be searched.')),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(policy_opts)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_rules = None
|
||||
_checks = {}
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(None, copy.deepcopy(policy_opts))]
|
||||
|
||||
|
||||
class PolicyNotAuthorized(Exception):
|
||||
|
||||
def __init__(self, rule):
|
||||
msg = _("Policy doesn't allow %s to be performed.") % rule
|
||||
super(PolicyNotAuthorized, self).__init__(msg)
|
||||
|
||||
|
||||
class Rules(dict):
|
||||
"""
|
||||
A store for rules. Handles the default_rule setting directly.
|
||||
"""
|
||||
"""A store for rules. Handles the default_rule setting directly."""
|
||||
|
||||
@classmethod
|
||||
def load_json(cls, data, default_rule=None):
|
||||
"""
|
||||
Allow loading of JSON rule data.
|
||||
"""
|
||||
"""Allow loading of JSON rule data."""
|
||||
|
||||
# Suck in the JSON data and parse the rules
|
||||
rules = dict((k, parse_rule(v)) for k, v in
|
||||
@ -102,12 +152,23 @@ class Rules(dict):
|
||||
def __missing__(self, key):
|
||||
"""Implements the default rule handling."""
|
||||
|
||||
# If the default rule isn't actually defined, do something
|
||||
# reasonably intelligent
|
||||
if not self.default_rule or self.default_rule not in self:
|
||||
if isinstance(self.default_rule, dict):
|
||||
raise KeyError(key)
|
||||
|
||||
return self[self.default_rule]
|
||||
# If the default rule isn't actually defined, do something
|
||||
# reasonably intelligent
|
||||
if not self.default_rule:
|
||||
raise KeyError(key)
|
||||
|
||||
if isinstance(self.default_rule, BaseCheck):
|
||||
return self.default_rule
|
||||
|
||||
# We need to check this or we can get infinite recursion
|
||||
if self.default_rule not in self:
|
||||
raise KeyError(key)
|
||||
|
||||
elif isinstance(self.default_rule, six.string_types):
|
||||
return self[self.default_rule]
|
||||
|
||||
def __str__(self):
|
||||
"""Dumps a string representation of the rules."""
|
||||
@ -125,87 +186,188 @@ class Rules(dict):
|
||||
return jsonutils.dumps(out_rules, indent=4)
|
||||
|
||||
|
||||
# Really have to figure out a way to deprecate this
|
||||
def set_rules(rules):
|
||||
"""Set the rules in use for policy checks."""
|
||||
class Enforcer(object):
|
||||
"""Responsible for loading and enforcing rules.
|
||||
|
||||
global _rules
|
||||
|
||||
_rules = rules
|
||||
|
||||
|
||||
# Ditto
|
||||
def reset():
|
||||
"""Clear the rules used for policy checks."""
|
||||
|
||||
global _rules
|
||||
|
||||
_rules = None
|
||||
|
||||
|
||||
def check(rule, target, creds, exc=None, *args, **kwargs):
|
||||
"""
|
||||
Checks authorization of a rule against the target and credentials.
|
||||
|
||||
:param rule: The rule to evaluate.
|
||||
:param target: As much information about the object being operated
|
||||
on as possible, as a dictionary.
|
||||
:param creds: As much information about the user performing the
|
||||
action as possible, as a dictionary.
|
||||
:param exc: Class of the exception to raise if the check fails.
|
||||
Any remaining arguments passed to check() (both
|
||||
positional and keyword arguments) will be passed to
|
||||
the exception class. If exc is not provided, returns
|
||||
False.
|
||||
|
||||
:return: Returns False if the policy does not allow the action and
|
||||
exc is not provided; otherwise, returns a value that
|
||||
evaluates to True. Note: for rules using the "case"
|
||||
expression, this True value will be the specified string
|
||||
from the expression.
|
||||
:param policy_file: Custom policy file to use, if none is
|
||||
specified, `CONF.policy_file` will be
|
||||
used.
|
||||
:param rules: Default dictionary / Rules to use. It will be
|
||||
considered just in the first instantiation. If
|
||||
`load_rules(True)`, `clear()` or `set_rules(True)`
|
||||
is called this will be overwritten.
|
||||
:param default_rule: Default rule to use, CONF.default_rule will
|
||||
be used if none is specified.
|
||||
:param use_conf: Whether to load rules from cache or config file.
|
||||
:param overwrite: Whether to overwrite existing rules when reload rules
|
||||
from config file.
|
||||
"""
|
||||
|
||||
# Allow the rule to be a Check tree
|
||||
if isinstance(rule, BaseCheck):
|
||||
result = rule(target, creds)
|
||||
elif not _rules:
|
||||
# No rules to reference means we're going to fail closed
|
||||
result = False
|
||||
else:
|
||||
try:
|
||||
# Evaluate the rule
|
||||
result = _rules[rule](target, creds)
|
||||
except KeyError:
|
||||
# If the rule doesn't exist, fail closed
|
||||
def __init__(self, policy_file=None, rules=None,
|
||||
default_rule=None, use_conf=True, overwrite=True):
|
||||
self.default_rule = default_rule or CONF.policy_default_rule
|
||||
self.rules = Rules(rules, self.default_rule)
|
||||
|
||||
self.policy_path = None
|
||||
self.policy_file = policy_file or CONF.policy_file
|
||||
self.use_conf = use_conf
|
||||
self.overwrite = overwrite
|
||||
|
||||
def set_rules(self, rules, overwrite=True, use_conf=False):
|
||||
"""Create a new Rules object based on the provided dict of rules.
|
||||
|
||||
:param rules: New rules to use. It should be an instance of dict.
|
||||
:param overwrite: Whether to overwrite current rules or update them
|
||||
with the new rules.
|
||||
:param use_conf: Whether to reload rules from cache or config file.
|
||||
"""
|
||||
|
||||
if not isinstance(rules, dict):
|
||||
raise TypeError(_("Rules must be an instance of dict or Rules, "
|
||||
"got %s instead") % type(rules))
|
||||
self.use_conf = use_conf
|
||||
if overwrite:
|
||||
self.rules = Rules(rules, self.default_rule)
|
||||
else:
|
||||
self.rules.update(rules)
|
||||
|
||||
def clear(self):
|
||||
"""Clears Enforcer rules, policy's cache and policy's path."""
|
||||
self.set_rules({})
|
||||
fileutils.delete_cached_file(self.policy_path)
|
||||
self.default_rule = None
|
||||
self.policy_path = None
|
||||
|
||||
def load_rules(self, force_reload=False):
|
||||
"""Loads policy_path's rules.
|
||||
|
||||
Policy file is cached and will be reloaded if modified.
|
||||
|
||||
:param force_reload: Whether to reload rules from config file.
|
||||
"""
|
||||
|
||||
if force_reload:
|
||||
self.use_conf = force_reload
|
||||
|
||||
if self.use_conf:
|
||||
if not self.policy_path:
|
||||
self.policy_path = self._get_policy_path(self.policy_file)
|
||||
|
||||
self._load_policy_file(self.policy_path, force_reload,
|
||||
overwrite=self.overwrite)
|
||||
for path in CONF.policy_dirs:
|
||||
try:
|
||||
path = self._get_policy_path(path)
|
||||
except cfg.ConfigFilesNotFoundError:
|
||||
LOG.warn(_LW("Can not find policy directory: %s"), path)
|
||||
continue
|
||||
self._walk_through_policy_directory(path,
|
||||
self._load_policy_file,
|
||||
force_reload, False)
|
||||
|
||||
@staticmethod
|
||||
def _walk_through_policy_directory(path, func, *args):
|
||||
# We do not iterate over sub-directories.
|
||||
policy_files = next(os.walk(path))[2]
|
||||
policy_files.sort()
|
||||
for policy_file in [p for p in policy_files if not p.startswith('.')]:
|
||||
func(os.path.join(path, policy_file), *args)
|
||||
|
||||
def _load_policy_file(self, path, force_reload, overwrite=True):
|
||||
reloaded, data = fileutils.read_cached_file(
|
||||
path, force_reload=force_reload)
|
||||
if reloaded or not self.rules or not overwrite:
|
||||
rules = Rules.load_json(data, self.default_rule)
|
||||
self.set_rules(rules, overwrite=overwrite, use_conf=True)
|
||||
LOG.debug("Rules successfully reloaded")
|
||||
|
||||
def _get_policy_path(self, path):
|
||||
"""Locate the policy json data file/path.
|
||||
|
||||
:param path: It's value can be a full path or related path. When
|
||||
full path specified, this function just returns the full
|
||||
path. When related path specified, this function will
|
||||
search configuration directories to find one that exists.
|
||||
|
||||
:returns: The policy path
|
||||
|
||||
:raises: ConfigFilesNotFoundError if the file/path couldn't
|
||||
be located.
|
||||
"""
|
||||
policy_path = CONF.find_file(path)
|
||||
|
||||
if policy_path:
|
||||
return policy_path
|
||||
|
||||
raise cfg.ConfigFilesNotFoundError((path,))
|
||||
|
||||
def enforce(self, rule, target, creds, do_raise=False,
|
||||
exc=None, *args, **kwargs):
|
||||
"""Checks authorization of a rule against the target and credentials.
|
||||
|
||||
:param rule: A string or BaseCheck instance specifying the rule
|
||||
to evaluate.
|
||||
:param target: As much information about the object being operated
|
||||
on as possible, as a dictionary.
|
||||
:param creds: As much information about the user performing the
|
||||
action as possible, as a dictionary.
|
||||
:param do_raise: Whether to raise an exception or not if check
|
||||
fails.
|
||||
:param exc: Class of the exception to raise if the check fails.
|
||||
Any remaining arguments passed to enforce() (both
|
||||
positional and keyword arguments) will be passed to
|
||||
the exception class. If not specified, PolicyNotAuthorized
|
||||
will be used.
|
||||
|
||||
:return: Returns False if the policy does not allow the action and
|
||||
exc is not provided; otherwise, returns a value that
|
||||
evaluates to True. Note: for rules using the "case"
|
||||
expression, this True value will be the specified string
|
||||
from the expression.
|
||||
"""
|
||||
|
||||
self.load_rules()
|
||||
|
||||
# Allow the rule to be a Check tree
|
||||
if isinstance(rule, BaseCheck):
|
||||
result = rule(target, creds, self)
|
||||
elif not self.rules:
|
||||
# No rules to reference means we're going to fail closed
|
||||
result = False
|
||||
else:
|
||||
try:
|
||||
# Evaluate the rule
|
||||
result = self.rules[rule](target, creds, self)
|
||||
except KeyError:
|
||||
LOG.debug("Rule [%s] doesn't exist" % rule)
|
||||
# If the rule doesn't exist, fail closed
|
||||
result = False
|
||||
|
||||
# If it is False, raise the exception if requested
|
||||
if exc and result is False:
|
||||
raise exc(*args, **kwargs)
|
||||
# If it is False, raise the exception if requested
|
||||
if do_raise and not result:
|
||||
if exc:
|
||||
raise exc(*args, **kwargs)
|
||||
|
||||
return result
|
||||
raise PolicyNotAuthorized(rule)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseCheck(object):
|
||||
"""
|
||||
Abstract base class for Check classes.
|
||||
"""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
"""Abstract base class for Check classes."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __str__(self):
|
||||
"""
|
||||
Retrieve a string representation of the Check tree rooted at
|
||||
this node.
|
||||
"""
|
||||
"""String representation of the Check tree rooted at this node."""
|
||||
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(self, target, cred):
|
||||
"""
|
||||
Perform the check. Returns False to reject the access or a
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Triggers if instance of the class is called.
|
||||
|
||||
Performs the check. Returns False to reject the access or a
|
||||
true value (not necessary True) to accept the access.
|
||||
"""
|
||||
|
||||
@ -213,44 +375,39 @@ class BaseCheck(object):
|
||||
|
||||
|
||||
class FalseCheck(BaseCheck):
|
||||
"""
|
||||
A policy check that always returns False (disallow).
|
||||
"""
|
||||
"""A policy check that always returns False (disallow)."""
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "!"
|
||||
|
||||
def __call__(self, target, cred):
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy."""
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class TrueCheck(BaseCheck):
|
||||
"""
|
||||
A policy check that always returns True (allow).
|
||||
"""
|
||||
"""A policy check that always returns True (allow)."""
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "@"
|
||||
|
||||
def __call__(self, target, cred):
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy."""
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Check(BaseCheck):
|
||||
"""
|
||||
A base class to allow for user-defined policy checks.
|
||||
"""
|
||||
"""A base class to allow for user-defined policy checks."""
|
||||
|
||||
def __init__(self, kind, match):
|
||||
"""
|
||||
"""Initiates Check instance.
|
||||
|
||||
:param kind: The kind of the check, i.e., the field before the
|
||||
':'.
|
||||
:param match: The match of the check, i.e., the field after
|
||||
@ -267,14 +424,13 @@ class Check(BaseCheck):
|
||||
|
||||
|
||||
class NotCheck(BaseCheck):
|
||||
"""
|
||||
"""Implements the "not" logical operator.
|
||||
|
||||
A policy check that inverts the result of another policy check.
|
||||
Implements the "not" operator.
|
||||
"""
|
||||
|
||||
def __init__(self, rule):
|
||||
"""
|
||||
Initialize the 'not' check.
|
||||
"""Initialize the 'not' check.
|
||||
|
||||
:param rule: The rule to negate. Must be a Check.
|
||||
"""
|
||||
@ -286,24 +442,23 @@ class NotCheck(BaseCheck):
|
||||
|
||||
return "not %s" % self.rule
|
||||
|
||||
def __call__(self, target, cred):
|
||||
"""
|
||||
Check the policy. Returns the logical inverse of the wrapped
|
||||
check.
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Returns the logical inverse of the wrapped check.
|
||||
"""
|
||||
|
||||
return not self.rule(target, cred)
|
||||
return not self.rule(target, cred, enforcer)
|
||||
|
||||
|
||||
class AndCheck(BaseCheck):
|
||||
"""
|
||||
A policy check that requires that a list of other checks all
|
||||
return True. Implements the "and" operator.
|
||||
"""Implements the "and" logical operator.
|
||||
|
||||
A policy check that requires that a list of other checks all return True.
|
||||
"""
|
||||
|
||||
def __init__(self, rules):
|
||||
"""
|
||||
Initialize the 'and' check.
|
||||
"""Initialize the 'and' check.
|
||||
|
||||
:param rules: A list of rules that will be tested.
|
||||
"""
|
||||
@ -315,20 +470,21 @@ class AndCheck(BaseCheck):
|
||||
|
||||
return "(%s)" % ' and '.join(str(r) for r in self.rules)
|
||||
|
||||
def __call__(self, target, cred):
|
||||
"""
|
||||
Check the policy. Requires that all rules accept in order to
|
||||
return True.
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Requires that all rules accept in order to return True.
|
||||
"""
|
||||
|
||||
for rule in self.rules:
|
||||
if not rule(target, cred):
|
||||
if not rule(target, cred, enforcer):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def add_check(self, rule):
|
||||
"""
|
||||
"""Adds rule to be tested.
|
||||
|
||||
Allows addition of another rule to the list of rules that will
|
||||
be tested. Returns the AndCheck object for convenience.
|
||||
"""
|
||||
@ -338,14 +494,14 @@ class AndCheck(BaseCheck):
|
||||
|
||||
|
||||
class OrCheck(BaseCheck):
|
||||
"""
|
||||
"""Implements the "or" operator.
|
||||
|
||||
A policy check that requires that at least one of a list of other
|
||||
checks returns True. Implements the "or" operator.
|
||||
checks returns True.
|
||||
"""
|
||||
|
||||
def __init__(self, rules):
|
||||
"""
|
||||
Initialize the 'or' check.
|
||||
"""Initialize the 'or' check.
|
||||
|
||||
:param rules: A list of rules that will be tested.
|
||||
"""
|
||||
@ -357,20 +513,20 @@ class OrCheck(BaseCheck):
|
||||
|
||||
return "(%s)" % ' or '.join(str(r) for r in self.rules)
|
||||
|
||||
def __call__(self, target, cred):
|
||||
"""
|
||||
Check the policy. Requires that at least one rule accept in
|
||||
order to return True.
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Requires that at least one rule accept in order to return True.
|
||||
"""
|
||||
|
||||
for rule in self.rules:
|
||||
if rule(target, cred):
|
||||
if rule(target, cred, enforcer):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def add_check(self, rule):
|
||||
"""
|
||||
"""Adds rule to be tested.
|
||||
|
||||
Allows addition of another rule to the list of rules that will
|
||||
be tested. Returns the OrCheck object for convenience.
|
||||
"""
|
||||
@ -380,9 +536,7 @@ class OrCheck(BaseCheck):
|
||||
|
||||
|
||||
def _parse_check(rule):
|
||||
"""
|
||||
Parse a single base check rule into an appropriate Check object.
|
||||
"""
|
||||
"""Parse a single base check rule into an appropriate Check object."""
|
||||
|
||||
# Handle the special checks
|
||||
if rule == '!':
|
||||
@ -393,7 +547,7 @@ def _parse_check(rule):
|
||||
try:
|
||||
kind, match = rule.split(':', 1)
|
||||
except Exception:
|
||||
LOG.exception(_("Failed to understand rule %(rule)s") % locals())
|
||||
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||
# If the rule is invalid, we'll fail closed
|
||||
return FalseCheck()
|
||||
|
||||
@ -403,14 +557,14 @@ def _parse_check(rule):
|
||||
elif None in _checks:
|
||||
return _checks[None](kind, match)
|
||||
else:
|
||||
LOG.error(_("No handler for matches of kind %s") % kind)
|
||||
LOG.error(_LE("No handler for matches of kind %s") % kind)
|
||||
return FalseCheck()
|
||||
|
||||
|
||||
def _parse_list_rule(rule):
|
||||
"""
|
||||
Provided for backwards compatibility. Translates the old
|
||||
list-of-lists syntax into a tree of Check objects.
|
||||
"""Translates the old list-of-lists syntax into a tree of Check objects.
|
||||
|
||||
Provided for backwards compatibility.
|
||||
"""
|
||||
|
||||
# Empty rule defaults to True
|
||||
@ -425,7 +579,7 @@ def _parse_list_rule(rule):
|
||||
continue
|
||||
|
||||
# Handle bare strings
|
||||
if isinstance(inner_rule, basestring):
|
||||
if isinstance(inner_rule, six.string_types):
|
||||
inner_rule = [inner_rule]
|
||||
|
||||
# Parse the inner rules into Check objects
|
||||
@ -451,8 +605,7 @@ _tokenize_re = re.compile(r'\s+')
|
||||
|
||||
|
||||
def _parse_tokenize(rule):
|
||||
"""
|
||||
Tokenizer for the policy language.
|
||||
"""Tokenizer for the policy language.
|
||||
|
||||
Most of the single-character tokens are specified in the
|
||||
_tokenize_re; however, parentheses need to be handled specially,
|
||||
@ -501,16 +654,16 @@ def _parse_tokenize(rule):
|
||||
|
||||
|
||||
class ParseStateMeta(type):
|
||||
"""
|
||||
Metaclass for the ParseState class. Facilitates identifying
|
||||
reduction methods.
|
||||
"""Metaclass for the ParseState class.
|
||||
|
||||
Facilitates identifying reduction methods.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, cls_dict):
|
||||
"""
|
||||
Create the class. Injects the 'reducers' list, a list of
|
||||
tuples matching token sequences to the names of the
|
||||
corresponding reduction methods.
|
||||
"""Create the class.
|
||||
|
||||
Injects the 'reducers' list, a list of tuples matching token sequences
|
||||
to the names of the corresponding reduction methods.
|
||||
"""
|
||||
|
||||
reducers = []
|
||||
@ -527,10 +680,10 @@ class ParseStateMeta(type):
|
||||
|
||||
|
||||
def reducer(*tokens):
|
||||
"""
|
||||
Decorator for reduction methods. Arguments are a sequence of
|
||||
tokens, in order, which should trigger running this reduction
|
||||
method.
|
||||
"""Decorator for reduction methods.
|
||||
|
||||
Arguments are a sequence of tokens, in order, which should trigger running
|
||||
this reduction method.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@ -546,11 +699,12 @@ def reducer(*tokens):
|
||||
return decorator
|
||||
|
||||
|
||||
@six.add_metaclass(ParseStateMeta)
|
||||
class ParseState(object):
|
||||
"""
|
||||
Implement the core of parsing the policy language. Uses a greedy
|
||||
reduction algorithm to reduce a sequence of tokens into a single
|
||||
terminal, the value of which will be the root of the Check tree.
|
||||
"""Implement the core of parsing the policy language.
|
||||
|
||||
Uses a greedy reduction algorithm to reduce a sequence of tokens into
|
||||
a single terminal, the value of which will be the root of the Check tree.
|
||||
|
||||
Note: error reporting is rather lacking. The best we can get with
|
||||
this parser formulation is an overall "parse failed" error.
|
||||
@ -558,8 +712,6 @@ class ParseState(object):
|
||||
shouldn't be that big a problem.
|
||||
"""
|
||||
|
||||
__metaclass__ = ParseStateMeta
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the ParseState."""
|
||||
|
||||
@ -567,11 +719,11 @@ class ParseState(object):
|
||||
self.values = []
|
||||
|
||||
def reduce(self):
|
||||
"""
|
||||
Perform a greedy reduction of the token stream. If a reducer
|
||||
method matches, it will be executed, then the reduce() method
|
||||
will be called recursively to search for any more possible
|
||||
reductions.
|
||||
"""Perform a greedy reduction of the token stream.
|
||||
|
||||
If a reducer method matches, it will be executed, then the
|
||||
reduce() method will be called recursively to search for any more
|
||||
possible reductions.
|
||||
"""
|
||||
|
||||
for reduction, methname in self.reducers:
|
||||
@ -601,9 +753,9 @@ class ParseState(object):
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
"""
|
||||
Obtain the final result of the parse. Raises ValueError if
|
||||
the parse failed to reduce to a single result.
|
||||
"""Obtain the final result of the parse.
|
||||
|
||||
Raises ValueError if the parse failed to reduce to a single result.
|
||||
"""
|
||||
|
||||
if len(self.values) != 1:
|
||||
@ -620,35 +772,31 @@ class ParseState(object):
|
||||
|
||||
@reducer('check', 'and', 'check')
|
||||
def _make_and_expr(self, check1, _and, check2):
|
||||
"""
|
||||
Create an 'and_expr' from two checks joined by the 'and'
|
||||
operator.
|
||||
"""Create an 'and_expr'.
|
||||
|
||||
Join two checks by the 'and' operator.
|
||||
"""
|
||||
|
||||
return [('and_expr', AndCheck([check1, check2]))]
|
||||
|
||||
@reducer('and_expr', 'and', 'check')
|
||||
def _extend_and_expr(self, and_expr, _and, check):
|
||||
"""
|
||||
Extend an 'and_expr' by adding one more check.
|
||||
"""
|
||||
"""Extend an 'and_expr' by adding one more check."""
|
||||
|
||||
return [('and_expr', and_expr.add_check(check))]
|
||||
|
||||
@reducer('check', 'or', 'check')
|
||||
def _make_or_expr(self, check1, _or, check2):
|
||||
"""
|
||||
Create an 'or_expr' from two checks joined by the 'or'
|
||||
operator.
|
||||
"""Create an 'or_expr'.
|
||||
|
||||
Join two checks by the 'or' operator.
|
||||
"""
|
||||
|
||||
return [('or_expr', OrCheck([check1, check2]))]
|
||||
|
||||
@reducer('or_expr', 'or', 'check')
|
||||
def _extend_or_expr(self, or_expr, _or, check):
|
||||
"""
|
||||
Extend an 'or_expr' by adding one more check.
|
||||
"""
|
||||
"""Extend an 'or_expr' by adding one more check."""
|
||||
|
||||
return [('or_expr', or_expr.add_check(check))]
|
||||
|
||||
@ -660,7 +808,8 @@ class ParseState(object):
|
||||
|
||||
|
||||
def _parse_text_rule(rule):
|
||||
"""
|
||||
"""Parses policy to the tree.
|
||||
|
||||
Translates a policy written in the policy language into a tree of
|
||||
Check objects.
|
||||
"""
|
||||
@ -678,26 +827,23 @@ def _parse_text_rule(rule):
|
||||
return state.result
|
||||
except ValueError:
|
||||
# Couldn't parse the rule
|
||||
LOG.exception(_("Failed to understand rule %(rule)r") % locals())
|
||||
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||
|
||||
# Fail closed
|
||||
return FalseCheck()
|
||||
|
||||
|
||||
def parse_rule(rule):
|
||||
"""
|
||||
Parses a policy rule into a tree of Check objects.
|
||||
"""
|
||||
"""Parses a policy rule into a tree of Check objects."""
|
||||
|
||||
# If the rule is a string, it's in the policy language
|
||||
if isinstance(rule, basestring):
|
||||
if isinstance(rule, six.string_types):
|
||||
return _parse_text_rule(rule)
|
||||
return _parse_list_rule(rule)
|
||||
|
||||
|
||||
def register(name, func=None):
|
||||
"""
|
||||
Register a function or Check class as a policy check.
|
||||
"""Register a function or Check class as a policy check.
|
||||
|
||||
:param name: Gives the name of the check type, e.g., 'rule',
|
||||
'role', etc. If name is None, a default check type
|
||||
@ -724,13 +870,11 @@ def register(name, func=None):
|
||||
|
||||
@register("rule")
|
||||
class RuleCheck(Check):
|
||||
def __call__(self, target, creds):
|
||||
"""
|
||||
Recursively checks credentials based on the defined rules.
|
||||
"""
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Recursively checks credentials based on the defined rules."""
|
||||
|
||||
try:
|
||||
return _rules[self.match](target, creds)
|
||||
return enforcer.rules[self.match](target, creds, enforcer)
|
||||
except KeyError:
|
||||
# We don't have any matching rule; fail closed
|
||||
return False
|
||||
@ -738,7 +882,7 @@ class RuleCheck(Check):
|
||||
|
||||
@register("role")
|
||||
class RoleCheck(Check):
|
||||
def __call__(self, target, creds):
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check that there is a matching role in the cred dict."""
|
||||
|
||||
return self.match.lower() in [x.lower() for x in creds['roles']]
|
||||
@ -746,9 +890,8 @@ class RoleCheck(Check):
|
||||
|
||||
@register('http')
|
||||
class HttpCheck(Check):
|
||||
def __call__(self, target, creds):
|
||||
"""
|
||||
Check http: rules by calling to a remote server.
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check http: rules by calling to a remote server.
|
||||
|
||||
This example implementation simply verifies that the response
|
||||
is exactly 'True'.
|
||||
@ -757,32 +900,40 @@ class HttpCheck(Check):
|
||||
url = ('http:' + self.match) % target
|
||||
data = {'target': jsonutils.dumps(target),
|
||||
'credentials': jsonutils.dumps(creds)}
|
||||
post_data = urllib.urlencode(data)
|
||||
f = urllib2.urlopen(url, post_data)
|
||||
post_data = urlparse.urlencode(data)
|
||||
f = urlrequest.urlopen(url, post_data)
|
||||
return f.read() == "True"
|
||||
|
||||
|
||||
@register(None)
|
||||
class GenericCheck(Check):
|
||||
def __call__(self, target, creds):
|
||||
"""
|
||||
Check an individual match.
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check an individual match.
|
||||
|
||||
Matches look like:
|
||||
|
||||
tenant:%(tenant_id)s
|
||||
role:compute:admin
|
||||
True:%(user.enabled)s
|
||||
'Member':%(role.name)s
|
||||
"""
|
||||
|
||||
# TODO(termie): do dict inspection via dot syntax
|
||||
match = self.match % target
|
||||
try:
|
||||
match = self.match % target
|
||||
except KeyError:
|
||||
# While doing GenericCheck if key not
|
||||
# present in Target return false
|
||||
return False
|
||||
|
||||
try:
|
||||
# Try to interpret self.kind as a literal
|
||||
leftval = ast.literal_eval(self.kind)
|
||||
except ValueError:
|
||||
try:
|
||||
leftval = creds[self.kind]
|
||||
kind_parts = self.kind.split('.')
|
||||
leftval = creds
|
||||
for kind_part in kind_parts:
|
||||
leftval = leftval[kind_part]
|
||||
except KeyError:
|
||||
return False
|
||||
return match == six.text_type(leftval)
|
||||
|
@ -1,239 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
System-level utilities and helper functions.
|
||||
"""
|
||||
|
||||
import math
|
||||
import re
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
import six
|
||||
|
||||
from glance.openstack.common.gettextutils import _
|
||||
|
||||
|
||||
UNIT_PREFIX_EXPONENT = {
|
||||
'k': 1,
|
||||
'K': 1,
|
||||
'Ki': 1,
|
||||
'M': 2,
|
||||
'Mi': 2,
|
||||
'G': 3,
|
||||
'Gi': 3,
|
||||
'T': 4,
|
||||
'Ti': 4,
|
||||
}
|
||||
UNIT_SYSTEM_INFO = {
|
||||
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
|
||||
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
|
||||
}
|
||||
|
||||
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
|
||||
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
|
||||
|
||||
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
|
||||
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
|
||||
|
||||
|
||||
def int_from_bool_as_string(subject):
|
||||
"""Interpret a string as a boolean and return either 1 or 0.
|
||||
|
||||
Any string value in:
|
||||
|
||||
('True', 'true', 'On', 'on', '1')
|
||||
|
||||
is interpreted as a boolean True.
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing
|
||||
"""
|
||||
return bool_from_string(subject) and 1 or 0
|
||||
|
||||
|
||||
def bool_from_string(subject, strict=False, default=False):
|
||||
"""Interpret a string as a boolean.
|
||||
|
||||
A case-insensitive match is performed such that strings matching 't',
|
||||
'true', 'on', 'y', 'yes', or '1' are considered True and, when
|
||||
`strict=False`, anything else returns the value specified by 'default'.
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing.
|
||||
|
||||
If `strict=True`, unrecognized values, including None, will raise a
|
||||
ValueError which is useful when parsing values passed in from an API call.
|
||||
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
|
||||
"""
|
||||
if not isinstance(subject, six.string_types):
|
||||
subject = six.text_type(subject)
|
||||
|
||||
lowered = subject.strip().lower()
|
||||
|
||||
if lowered in TRUE_STRINGS:
|
||||
return True
|
||||
elif lowered in FALSE_STRINGS:
|
||||
return False
|
||||
elif strict:
|
||||
acceptable = ', '.join(
|
||||
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
|
||||
msg = _("Unrecognized value '%(val)s', acceptable values are:"
|
||||
" %(acceptable)s") % {'val': subject,
|
||||
'acceptable': acceptable}
|
||||
raise ValueError(msg)
|
||||
else:
|
||||
return default
|
||||
|
||||
|
||||
def safe_decode(text, incoming=None, errors='strict'):
|
||||
"""Decodes incoming text/bytes string using `incoming` if they're not
|
||||
already unicode.
|
||||
|
||||
:param incoming: Text's current encoding
|
||||
:param errors: Errors handling policy. See here for valid
|
||||
values http://docs.python.org/2/library/codecs.html
|
||||
:returns: text or a unicode `incoming` encoded
|
||||
representation of it.
|
||||
:raises TypeError: If text is not an instance of str
|
||||
"""
|
||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
||||
raise TypeError("%s can't be decoded" % type(text))
|
||||
|
||||
if isinstance(text, six.text_type):
|
||||
return text
|
||||
|
||||
if not incoming:
|
||||
incoming = (sys.stdin.encoding or
|
||||
sys.getdefaultencoding())
|
||||
|
||||
try:
|
||||
return text.decode(incoming, errors)
|
||||
except UnicodeDecodeError:
|
||||
# Note(flaper87) If we get here, it means that
|
||||
# sys.stdin.encoding / sys.getdefaultencoding
|
||||
# didn't return a suitable encoding to decode
|
||||
# text. This happens mostly when global LANG
|
||||
# var is not set correctly and there's no
|
||||
# default encoding. In this case, most likely
|
||||
# python will use ASCII or ANSI encoders as
|
||||
# default encodings but they won't be capable
|
||||
# of decoding non-ASCII characters.
|
||||
#
|
||||
# Also, UTF-8 is being used since it's an ASCII
|
||||
# extension.
|
||||
return text.decode('utf-8', errors)
|
||||
|
||||
|
||||
def safe_encode(text, incoming=None,
|
||||
encoding='utf-8', errors='strict'):
|
||||
"""Encodes incoming text/bytes string using `encoding`.
|
||||
|
||||
If incoming is not specified, text is expected to be encoded with
|
||||
current python's default encoding. (`sys.getdefaultencoding`)
|
||||
|
||||
:param incoming: Text's current encoding
|
||||
:param encoding: Expected encoding for text (Default UTF-8)
|
||||
:param errors: Errors handling policy. See here for valid
|
||||
values http://docs.python.org/2/library/codecs.html
|
||||
:returns: text or a bytestring `encoding` encoded
|
||||
representation of it.
|
||||
:raises TypeError: If text is not an instance of str
|
||||
"""
|
||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
||||
raise TypeError("%s can't be encoded" % type(text))
|
||||
|
||||
if not incoming:
|
||||
incoming = (sys.stdin.encoding or
|
||||
sys.getdefaultencoding())
|
||||
|
||||
if isinstance(text, six.text_type):
|
||||
return text.encode(encoding, errors)
|
||||
elif text and encoding != incoming:
|
||||
# Decode text before encoding it with `encoding`
|
||||
text = safe_decode(text, incoming, errors)
|
||||
return text.encode(encoding, errors)
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
def string_to_bytes(text, unit_system='IEC', return_int=False):
|
||||
"""Converts a string into an float representation of bytes.
|
||||
|
||||
The units supported for IEC ::
|
||||
|
||||
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
|
||||
KB, KiB, MB, MiB, GB, GiB, TB, TiB
|
||||
|
||||
The units supported for SI ::
|
||||
|
||||
kb(it), Mb(it), Gb(it), Tb(it)
|
||||
kB, MB, GB, TB
|
||||
|
||||
Note that the SI unit system does not support capital letter 'K'
|
||||
|
||||
:param text: String input for bytes size conversion.
|
||||
:param unit_system: Unit system for byte size conversion.
|
||||
:param return_int: If True, returns integer representation of text
|
||||
in bytes. (default: decimal)
|
||||
:returns: Numerical representation of text in bytes.
|
||||
:raises ValueError: If text has an invalid value.
|
||||
|
||||
"""
|
||||
try:
|
||||
base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
|
||||
except KeyError:
|
||||
msg = _('Invalid unit system: "%s"') % unit_system
|
||||
raise ValueError(msg)
|
||||
match = reg_ex.match(text)
|
||||
if match:
|
||||
magnitude = float(match.group(1))
|
||||
unit_prefix = match.group(2)
|
||||
if match.group(3) in ['b', 'bit']:
|
||||
magnitude /= 8
|
||||
else:
|
||||
msg = _('Invalid string format: %s') % text
|
||||
raise ValueError(msg)
|
||||
if not unit_prefix:
|
||||
res = magnitude
|
||||
else:
|
||||
res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
|
||||
if return_int:
|
||||
return int(math.ceil(res))
|
||||
return res
|
||||
|
||||
|
||||
def to_slug(value, incoming=None, errors="strict"):
|
||||
"""Normalize string.
|
||||
|
||||
Convert to lowercase, remove non-word characters, and convert spaces
|
||||
to hyphens.
|
||||
|
||||
Inspired by Django's `slugify` filter.
|
||||
|
||||
:param value: Text to slugify
|
||||
:param incoming: Text's current encoding
|
||||
:param errors: Errors handling policy. See here for valid
|
||||
values http://docs.python.org/2/library/codecs.html
|
||||
:returns: slugified unicode representation of `value`
|
||||
:raises TypeError: If text is not an instance of str
|
||||
"""
|
||||
value = safe_decode(value, incoming, errors)
|
||||
# NOTE(aababilov): no need to use safe_(encode|decode) here:
|
||||
# encodings are always "ascii", error handling is always "ignore"
|
||||
# and types are always known (first: unicode; second: str)
|
||||
value = unicodedata.normalize("NFKD", value).encode(
|
||||
"ascii", "ignore").decode("ascii")
|
||||
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
|
||||
return SLUGIFY_HYPHENATE_RE.sub("-", value)
|
@ -24,7 +24,6 @@ import copy
|
||||
import itertools
|
||||
|
||||
import glance.api.middleware.context
|
||||
import glance.api.policy
|
||||
import glance.api.versions
|
||||
import glance.common.config
|
||||
import glance.common.location_strategy
|
||||
@ -37,6 +36,7 @@ import glance.image_cache.drivers.sqlite
|
||||
import glance.notifier
|
||||
import glance.openstack.common.lockutils
|
||||
import glance.openstack.common.log
|
||||
import glance.openstack.common.policy
|
||||
import glance.registry
|
||||
import glance.registry.client
|
||||
import glance.registry.client.v1.api
|
||||
@ -50,7 +50,6 @@ _global_opt_lists = [
|
||||
_api_opts = [
|
||||
(None, list(itertools.chain(*(_global_opt_lists + [
|
||||
glance.api.middleware.context.context_opts,
|
||||
glance.api.policy.policy_opts,
|
||||
glance.api.versions.versions_opts,
|
||||
glance.common.config.common_opts,
|
||||
glance.common.location_strategy.location_strategy_opts,
|
||||
@ -67,6 +66,7 @@ _api_opts = [
|
||||
glance.registry.client.registry_client_opts,
|
||||
glance.registry.client.v1.api.registry_client_ctx_opts,
|
||||
glance.openstack.common.lockutils.util_opts,
|
||||
glance.openstack.common.policy.policy_opts,
|
||||
glance.scrubber.scrubber_opts])))),
|
||||
('image_format', glance.common.config.image_format_opts),
|
||||
('task', glance.common.config.task_opts),
|
||||
@ -77,18 +77,18 @@ _api_opts = [
|
||||
_registry_opts = [
|
||||
(None, list(itertools.chain(*(_global_opt_lists + [
|
||||
glance.api.middleware.context.context_opts,
|
||||
glance.api.policy.policy_opts,
|
||||
glance.common.config.common_opts,
|
||||
glance.common.wsgi.bind_opts,
|
||||
glance.common.wsgi.socket_opts,
|
||||
glance.common.wsgi.eventlet_opts])))),
|
||||
glance.common.wsgi.eventlet_opts,
|
||||
glance.openstack.common.policy.policy_opts])))),
|
||||
('paste_deploy', glance.common.config.paste_deploy_opts)
|
||||
]
|
||||
_scrubber_opts = [
|
||||
(None, list(itertools.chain(*(_global_opt_lists + [
|
||||
glance.api.policy.policy_opts,
|
||||
glance.common.config.common_opts,
|
||||
glance.openstack.common.lockutils.util_opts,
|
||||
glance.openstack.common.policy.policy_opts,
|
||||
glance.scrubber.scrubber_opts,
|
||||
glance.scrubber.scrubber_cmd_opts,
|
||||
glance.scrubber.scrubber_cmd_cli_opts,
|
||||
@ -97,8 +97,8 @@ _scrubber_opts = [
|
||||
]
|
||||
_cache_opts = [
|
||||
(None, list(itertools.chain(*(_global_opt_lists + [
|
||||
glance.api.policy.policy_opts,
|
||||
glance.common.config.common_opts,
|
||||
glance.openstack.common.policy.policy_opts,
|
||||
glance.image_cache.drivers.sqlite.sqlite_opts,
|
||||
glance.image_cache.image_cache_opts,
|
||||
glance.registry.registry_addr_opts,
|
||||
|
@ -29,5 +29,24 @@
|
||||
"get_task": "",
|
||||
"get_tasks": "",
|
||||
"add_task": "",
|
||||
"modify_task": ""
|
||||
"modify_task": "",
|
||||
|
||||
"get_metadef_namespace": "",
|
||||
"get_metadef_namespaces":"",
|
||||
"modify_metadef_namespace":"",
|
||||
"add_metadef_namespace":"",
|
||||
|
||||
"get_metadef_object":"",
|
||||
"get_metadef_objects":"",
|
||||
"modify_metadef_object":"",
|
||||
"add_metadef_object":"",
|
||||
|
||||
"list_metadef_resource_types":"",
|
||||
"get_metadef_resource_type":"",
|
||||
"add_metadef_resource_type_association":"",
|
||||
|
||||
"get_metadef_property":"",
|
||||
"get_metadef_properties":"",
|
||||
"modify_metadef_property":"",
|
||||
"add_metadef_property":""
|
||||
}
|
||||
|
@ -422,7 +422,7 @@ class RegistryServer(Server):
|
||||
Server object that starts/stops/manages the Registry server
|
||||
"""
|
||||
|
||||
def __init__(self, test_dir, port, sock=None):
|
||||
def __init__(self, test_dir, port, policy_file, sock=None):
|
||||
super(RegistryServer, self).__init__(test_dir, port, sock=sock)
|
||||
self.server_name = 'registry'
|
||||
self.server_module = 'glance.cmd.%s' % self.server_name
|
||||
@ -439,6 +439,8 @@ class RegistryServer(Server):
|
||||
self.api_version = 1
|
||||
self.user_storage_quota = '0'
|
||||
self.metadata_encryption_key = "012345678901234567890123456789ab"
|
||||
self.policy_file = policy_file
|
||||
self.policy_default_rule = 'default'
|
||||
|
||||
self.conf_base = """[DEFAULT]
|
||||
verbose = %(verbose)s
|
||||
@ -456,6 +458,8 @@ enable_v2_registry = %(enable_v2_registry)s
|
||||
workers = %(workers)s
|
||||
user_storage_quota = %(user_storage_quota)s
|
||||
metadata_encryption_key = %(metadata_encryption_key)s
|
||||
policy_file = %(policy_file)s
|
||||
policy_default_rule = %(policy_default_rule)s
|
||||
[paste_deploy]
|
||||
flavor = %(deployment_flavor)s
|
||||
"""
|
||||
@ -488,7 +492,7 @@ class ScrubberDaemon(Server):
|
||||
Server object that starts/stops/manages the Scrubber server
|
||||
"""
|
||||
|
||||
def __init__(self, test_dir, daemon=False, **kwargs):
|
||||
def __init__(self, test_dir, policy_file, daemon=False, **kwargs):
|
||||
# NOTE(jkoelker): Set the port to 0 since we actually don't listen
|
||||
super(ScrubberDaemon, self).__init__(test_dir, 0)
|
||||
self.server_name = 'scrubber'
|
||||
@ -507,6 +511,8 @@ class ScrubberDaemon(Server):
|
||||
default_sql_connection = 'sqlite:////%s/tests.sqlite' % self.test_dir
|
||||
self.sql_connection = os.environ.get('GLANCE_TEST_SQL_CONNECTION',
|
||||
default_sql_connection)
|
||||
self.policy_file = policy_file
|
||||
self.policy_default_rule = 'default'
|
||||
|
||||
self.conf_base = """[DEFAULT]
|
||||
verbose = %(verbose)s
|
||||
@ -520,6 +526,8 @@ scrubber_datadir = %(scrubber_datadir)s
|
||||
registry_host = 127.0.0.1
|
||||
registry_port = %(registry_port)s
|
||||
metadata_encryption_key = %(metadata_encryption_key)s
|
||||
policy_file = %(policy_file)s
|
||||
policy_default_rule = %(policy_default_rule)s
|
||||
lock_path = %(lock_path)s
|
||||
sql_connection = %(sql_connection)s
|
||||
sql_idle_timeout = 3600
|
||||
@ -573,9 +581,10 @@ class FunctionalTest(test_utils.BaseTestCase):
|
||||
|
||||
self.registry_server = RegistryServer(self.test_dir,
|
||||
self.registry_port,
|
||||
self.policy_file,
|
||||
sock=reg_sock)
|
||||
|
||||
self.scrubber_daemon = ScrubberDaemon(self.test_dir)
|
||||
self.scrubber_daemon = ScrubberDaemon(self.test_dir, self.policy_file)
|
||||
|
||||
self.pid_files = [self.api_server.pid_file,
|
||||
self.registry_server.pid_file,
|
||||
|
@ -24,6 +24,7 @@ from oslo.utils import timeutils
|
||||
|
||||
from glance.common import exception
|
||||
from glance import context
|
||||
from glance.tests import functional
|
||||
import glance.tests.functional.db as db_tests
|
||||
from glance.tests import utils as test_utils
|
||||
|
||||
@ -78,6 +79,13 @@ def build_task_fixture(**kwargs):
|
||||
return task
|
||||
|
||||
|
||||
class FunctionalInitWrapper(functional.FunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(FunctionalInitWrapper, self).setUp()
|
||||
self.config(policy_file=self.policy_file)
|
||||
|
||||
|
||||
class TestDriver(test_utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -17,7 +17,6 @@ from oslo.config import cfg
|
||||
from oslo.db import options
|
||||
|
||||
import glance.db
|
||||
from glance.tests import functional
|
||||
import glance.tests.functional.db as db_tests
|
||||
from glance.tests.functional.db import base
|
||||
from glance.tests.functional.db import base_metadef
|
||||
@ -35,7 +34,7 @@ def reset_db(db_api):
|
||||
pass
|
||||
|
||||
|
||||
class FunctionalInitWrapper(functional.FunctionalTest):
|
||||
class FunctionalInitWrapper(base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
# NOTE(flaper87): We need to start the
|
||||
|
@ -30,7 +30,9 @@ def reset_db(db_api):
|
||||
db_api.reset()
|
||||
|
||||
|
||||
class TestSimpleDriver(base.TestDriver, base.DriverTests):
|
||||
class TestSimpleDriver(base.TestDriver,
|
||||
base.DriverTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -38,7 +40,8 @@ class TestSimpleDriver(base.TestDriver, base.DriverTests):
|
||||
self.addCleanup(db_tests.reset)
|
||||
|
||||
|
||||
class TestSimpleQuota(base.DriverQuotaTests):
|
||||
class TestSimpleQuota(base.DriverQuotaTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -46,7 +49,9 @@ class TestSimpleQuota(base.DriverQuotaTests):
|
||||
self.addCleanup(db_tests.reset)
|
||||
|
||||
|
||||
class TestSimpleVisibility(base.TestVisibility, base.VisibilityTests):
|
||||
class TestSimpleVisibility(base.TestVisibility,
|
||||
base.VisibilityTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -55,7 +60,8 @@ class TestSimpleVisibility(base.TestVisibility, base.VisibilityTests):
|
||||
|
||||
|
||||
class TestSimpleMembershipVisibility(base.TestMembershipVisibility,
|
||||
base.MembershipVisibilityTests):
|
||||
base.MembershipVisibilityTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -63,7 +69,8 @@ class TestSimpleMembershipVisibility(base.TestMembershipVisibility,
|
||||
self.addCleanup(db_tests.reset)
|
||||
|
||||
|
||||
class TestSimpleTask(base.TaskTests):
|
||||
class TestSimpleTask(base.TaskTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
|
@ -45,7 +45,9 @@ def reset_db_metadef(db_api):
|
||||
metadef_models.register_models(db_api.get_engine())
|
||||
|
||||
|
||||
class TestSqlAlchemyDriver(base.TestDriver, base.DriverTests):
|
||||
class TestSqlAlchemyDriver(base.TestDriver,
|
||||
base.DriverTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -74,7 +76,9 @@ class TestSqlAlchemyDriver(base.TestDriver, base.DriverTests):
|
||||
self.context, 'fake_owner_id', image_id)
|
||||
|
||||
|
||||
class TestSqlAlchemyVisibility(base.TestVisibility, base.VisibilityTests):
|
||||
class TestSqlAlchemyVisibility(base.TestVisibility,
|
||||
base.VisibilityTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -83,7 +87,8 @@ class TestSqlAlchemyVisibility(base.TestVisibility, base.VisibilityTests):
|
||||
|
||||
|
||||
class TestSqlAlchemyMembershipVisibility(base.TestMembershipVisibility,
|
||||
base.MembershipVisibilityTests):
|
||||
base.MembershipVisibilityTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -91,7 +96,8 @@ class TestSqlAlchemyMembershipVisibility(base.TestMembershipVisibility,
|
||||
self.addCleanup(db_tests.reset)
|
||||
|
||||
|
||||
class TestSqlAlchemyDBDataIntegrity(base.TestDriver):
|
||||
class TestSqlAlchemyDBDataIntegrity(base.TestDriver,
|
||||
base.FunctionalInitWrapper):
|
||||
"""Test class for checking the data integrity in the database.
|
||||
|
||||
Helpful in testing scenarios specific to the sqlalchemy api.
|
||||
@ -129,7 +135,8 @@ class TestSqlAlchemyDBDataIntegrity(base.TestDriver):
|
||||
self.db_api.image_get_all(self.context, sort_key='name')
|
||||
|
||||
|
||||
class TestSqlAlchemyTask(base.TaskTests):
|
||||
class TestSqlAlchemyTask(base.TaskTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -137,7 +144,8 @@ class TestSqlAlchemyTask(base.TaskTests):
|
||||
self.addCleanup(db_tests.reset)
|
||||
|
||||
|
||||
class TestSqlAlchemyQuota(base.DriverQuotaTests):
|
||||
class TestSqlAlchemyQuota(base.DriverQuotaTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db)
|
||||
@ -146,7 +154,8 @@ class TestSqlAlchemyQuota(base.DriverQuotaTests):
|
||||
|
||||
|
||||
class TestMetadefSqlAlchemyDriver(base_metadef.TestMetadefDriver,
|
||||
base_metadef.MetadefDriverTests):
|
||||
base_metadef.MetadefDriverTests,
|
||||
base.FunctionalInitWrapper):
|
||||
|
||||
def setUp(self):
|
||||
db_tests.load(get_db, reset_db_metadef)
|
||||
|
@ -535,7 +535,13 @@ class TestImages(functional.FunctionalTest):
|
||||
|
||||
def test_download_policy_when_cache_is_not_enabled(self):
|
||||
|
||||
rules = {'context_is_admin': 'role:admin', 'default': '',
|
||||
rules = {'context_is_admin': 'role:admin',
|
||||
'default': '',
|
||||
'add_image': '',
|
||||
'get_image': '',
|
||||
'modify_image': '',
|
||||
'upload_image': '',
|
||||
'delete_image': '',
|
||||
'download_image': '!'}
|
||||
self.set_policy_rules(rules)
|
||||
self.start_servers(**self.__dict__.copy())
|
||||
@ -596,6 +602,11 @@ class TestImages(functional.FunctionalTest):
|
||||
rules = {
|
||||
"context_is_admin": "role:admin",
|
||||
"default": "",
|
||||
"add_image": "",
|
||||
"get_image": "",
|
||||
"modify_image": "",
|
||||
"upload_image": "",
|
||||
"delete_image": "",
|
||||
"restricted":
|
||||
"not ('aki':%(container_format)s and role:_member_)",
|
||||
"download_image": "role:admin or rule:restricted"
|
||||
@ -662,6 +673,12 @@ class TestImages(functional.FunctionalTest):
|
||||
rules = {
|
||||
"context_is_admin": "role:admin",
|
||||
"default": "",
|
||||
"add_image": "",
|
||||
"get_image": "",
|
||||
"modify_image": "",
|
||||
"upload_image": "",
|
||||
"get_image_location": "",
|
||||
"delete_image": "",
|
||||
"restricted":
|
||||
"not ('aki':%(container_format)s and role:_member_)",
|
||||
"download_image": "role:admin or rule:restricted"
|
||||
|
@ -14,9 +14,7 @@
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import fixtures
|
||||
import glance_store as store
|
||||
from glance_store import location
|
||||
from oslo.config import cfg
|
||||
@ -61,14 +59,11 @@ class IsolatedUnitTest(StoreClearingUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
super(IsolatedUnitTest, self).setUp()
|
||||
self.test_dir = self.useFixture(fixtures.TempDir()).path
|
||||
policy_file = self._copy_data_file('policy.json', self.test_dir)
|
||||
options.set_defaults(CONF, connection='sqlite://',
|
||||
sqlite_db='glance.sqlite')
|
||||
|
||||
self.config(verbose=False,
|
||||
debug=False,
|
||||
policy_file=policy_file,
|
||||
lock_path=os.path.join(self.test_dir))
|
||||
|
||||
self.config(default_store='filesystem',
|
||||
@ -84,12 +79,6 @@ class IsolatedUnitTest(StoreClearingUnitTest):
|
||||
if hasattr(local.store, 'context'):
|
||||
delattr(local.store, 'context')
|
||||
|
||||
def _copy_data_file(self, file_name, dst_dir):
|
||||
src_file_name = os.path.join('glance/tests/etc', file_name)
|
||||
shutil.copy(src_file_name, dst_dir)
|
||||
dst_file_name = os.path.join(dst_dir, file_name)
|
||||
return dst_file_name
|
||||
|
||||
def set_policy_rules(self, rules):
|
||||
fap = open(CONF.policy_file, 'w')
|
||||
fap.write(jsonutils.dumps(rules))
|
||||
|
@ -76,6 +76,7 @@ class OptsTestCase(utils.BaseTestCase):
|
||||
'allow_anonymous_access',
|
||||
'policy_file',
|
||||
'policy_default_rule',
|
||||
'policy_dirs',
|
||||
'allow_additional_image_properties',
|
||||
'image_member_quota',
|
||||
'image_property_quota',
|
||||
@ -175,6 +176,7 @@ class OptsTestCase(utils.BaseTestCase):
|
||||
'allow_anonymous_access',
|
||||
'policy_file',
|
||||
'policy_default_rule',
|
||||
'policy_dirs',
|
||||
'allow_additional_image_properties',
|
||||
'image_member_quota',
|
||||
'image_property_quota',
|
||||
@ -230,6 +232,7 @@ class OptsTestCase(utils.BaseTestCase):
|
||||
'syslog-log-facility',
|
||||
'policy_file',
|
||||
'policy_default_rule',
|
||||
'policy_dirs',
|
||||
'allow_additional_image_properties',
|
||||
'image_member_quota',
|
||||
'image_property_quota',
|
||||
@ -292,6 +295,7 @@ class OptsTestCase(utils.BaseTestCase):
|
||||
'syslog-log-facility',
|
||||
'policy_file',
|
||||
'policy_default_rule',
|
||||
'policy_dirs',
|
||||
'allow_additional_image_properties',
|
||||
'image_member_quota',
|
||||
'image_property_quota',
|
||||
|
@ -101,6 +101,7 @@ class TestImageMembersController(test_utils.BaseTestCase):
|
||||
self.policy,
|
||||
self.notifier,
|
||||
self.store)
|
||||
glance_store.register_opts(CONF)
|
||||
glance_store.create_stores()
|
||||
|
||||
def _create_images(self):
|
||||
|
@ -35,6 +35,7 @@ import webob
|
||||
from glance.common import config
|
||||
from glance.common import exception
|
||||
from glance.common import property_utils
|
||||
from glance.common import utils
|
||||
from glance.common import wsgi
|
||||
from glance import context
|
||||
from glance.db.sqlalchemy import api as db_api
|
||||
@ -56,12 +57,20 @@ class BaseTestCase(testtools.TestCase):
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
self.stubs.Set(exception, '_FATAL_EXCEPTION_FORMAT_ERRORS', True)
|
||||
self.test_dir = self.useFixture(fixtures.TempDir()).path
|
||||
self.conf_dir = os.path.join(self.test_dir, 'etc')
|
||||
utils.safe_mkdirs(self.conf_dir)
|
||||
self.set_policy()
|
||||
|
||||
def tearDown(self):
|
||||
self.stubs.UnsetAll()
|
||||
self.stubs.SmartUnsetAll()
|
||||
super(BaseTestCase, self).tearDown()
|
||||
|
||||
def set_policy(self):
|
||||
conf_file = "policy.json"
|
||||
self.policy_file = self._copy_data_file(conf_file, self.conf_dir)
|
||||
self.config(policy_file=self.policy_file)
|
||||
|
||||
def set_property_protections(self, use_policies=False):
|
||||
self.unset_property_protections()
|
||||
conf_file = "property-protections.conf"
|
||||
|
@ -4,7 +4,6 @@
|
||||
module=_i18n
|
||||
module=gettextutils
|
||||
module=install_venv_common
|
||||
module=jsonutils
|
||||
module=local
|
||||
module=lockutils
|
||||
module=log
|
||||
|
Loading…
x
Reference in New Issue
Block a user