Declarative definitions of Artifact Types

Add a notation which allows to declaratively define schema of Artifacts'
type-specific metadata fields, with appropriate constraints and
validators.

The classes utilizing this declarative notation will be used to define
artifact types and will be imported using the plugin system.

The generated classes provide input validation in both initializers and
property setters.

At the same time the model contain meta-definitions defining attributes
of the properties, such as immutability, internal access etc, which may
be used by other layers of code.

Co-Authored-By: Alexander Tivelkov <ativelkov@mirantis.com>
Co-Authored-By: Inessa Vasilevskaya <ivasilevskaya@mirantis.com>
Co-Authored-By: Mike Fedosin <mfedosin@mirantis.com>

Implements-blueprint: artifact-repository

Change-Id: Ie0deb84d5fbe0397c047862b7cfbaecd31603e70
This commit is contained in:
Alexander Tivelkov 2014-09-04 23:02:59 +04:00 committed by Mike Fedosin
parent e72bd4c8bc
commit b66f3904c8
6 changed files with 2719 additions and 0 deletions

View File

View File

@ -0,0 +1,747 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import re
import types
import semantic_version
import six
from glance.common import exception as exc
from glance import i18n
_ = i18n._
class AttributeDefinition(object):
"""A base class for the attribute definitions which may be added to
declaratively defined artifact types
"""
ALLOWED_TYPES = (object,)
def __init__(self,
display_name=None,
description=None,
readonly=False,
mutable=True,
required=False,
default=None):
"""Initializes attribute definition
:param display_name: Display name of the attribute
:param description: Description of the attribute
:param readonly: Flag indicating if the value of attribute may not be
changed once an artifact is created
:param mutable: Flag indicating if the value of attribute may not be
changed once an artifact is published
:param required: Flag indicating if the value of attribute is required
:param default: default value of the attribute
"""
self.name = None
self.display_name = display_name
self.description = description
self.readonly = readonly
self.required = required
self.mutable = mutable
self.default = default
self._add_validator('type',
lambda v: isinstance(v, self.ALLOWED_TYPES),
_("Not a valid value type"))
self._validate_default()
def _set_name(self, value):
self.name = value
if self.display_name is None:
self.display_name = value
def _add_validator(self, name, func, message):
if not hasattr(self, '_validators'):
self._validators = []
self._validators_index = {}
pair = (func, message)
self._validators.append(pair)
self._validators_index[name] = pair
def _get_validator(self, name):
return self._validators_index.get(name)
def _remove_validator(self, name):
pair = self._validators_index.pop(name, None)
if pair is not None:
self._validators.remove(pair)
def _check_definition(self):
self._validate_default()
def _validate_default(self):
if self.default:
try:
self.validate(self.default, 'default')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Default value is invalid"))
def get_value(self, obj):
return getattr(obj, self.name)
def set_value(self, obj, value):
return setattr(obj, self.name, value)
def validate(self, value, name=None):
if value is None:
if self.required:
raise exc.InvalidArtifactPropertyValue(
name=name or self.name,
val=value,
msg=_('Value is required'))
else:
return
first_error = next((msg for v_func, msg in self._validators
if not v_func(value)), None)
if first_error:
raise exc.InvalidArtifactPropertyValue(name=name or self.name,
val=value,
msg=first_error)
class ListAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having List-semantics
Is inherited by Array, ArtifactReferenceList and BinaryObjectList
"""
ALLOWED_TYPES = (types.ListType,)
ALLOWED_ITEM_TYPES = (AttributeDefinition, )
def _check_item_type(self, item):
if not isinstance(item, self.ALLOWED_ITEM_TYPES):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid item type specification'))
if item.default is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('List definitions may hot have defaults'))
def __init__(self, item_type, min_size=0, max_size=None, unique=False,
**kwargs):
super(ListAttributeDefinition, self).__init__(**kwargs)
if isinstance(item_type, types.ListType):
for it in item_type:
self._check_item_type(it)
# we need to copy the item_type collection
self.item_type = item_type[:]
if min_size != 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'min_size' explicitly")
)
if max_size is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'max_size' explicitly")
)
# setting max_size and min_size to the length of item_type,
# as tuple-semantic assumes that the number of elements is set
# by the type spec
min_size = max_size = len(item_type)
else:
self._check_item_type(item_type)
self.item_type = item_type
if min_size:
self.min_size(min_size)
if max_size:
self.max_size(max_size)
if unique:
self.unique()
def min_size(self, value):
self._min_size = value
if value is not None:
self._add_validator('min_size',
lambda v: len(v) >= self._min_size,
_('List size is less than minimum'))
else:
self._remove_validator('min_size')
def max_size(self, value):
self._max_size = value
if value is not None:
self._add_validator('max_size',
lambda v: len(v) <= self._max_size,
_('List size is greater than maximum'))
else:
self._remove_validator('max_size')
def unique(self, value=True):
self._unique = value
if value:
def _unique(items):
seen = set()
for item in items:
if item in seen:
return False
seen.add(item)
return True
self._add_validator('unique',
_unique, _('Items have to be unique'))
else:
self._remove_validator('unique')
def _set_name(self, value):
super(ListAttributeDefinition, self)._set_name(value)
if isinstance(self.item_type, types.ListType):
for i, item in enumerate(self.item_type):
item._set_name("%s[%i]" % (value, i))
else:
self.item_type._set_name("%s[*]" % value)
def validate(self, value, name=None):
super(ListAttributeDefinition, self).validate(value, name)
if value is not None:
for i, item in enumerate(value):
self._validate_item_at(item, i)
def get_item_definition_at_index(self, index):
if isinstance(self.item_type, types.ListType):
if index < len(self.item_type):
return self.item_type[index]
else:
return None
return self.item_type
def _validate_item_at(self, item, index):
item_type = self.get_item_definition_at_index(index)
# set name if none has been given to the list element at given index
if (isinstance(self.item_type, types.ListType) and item_type and
not item_type.name):
item_type.name = "%s[%i]" % (self.name, index)
if item_type:
item_type.validate(item)
class DictAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having Map-semantics
Is inherited by Dict
"""
ALLOWED_TYPES = (types.DictionaryType,)
ALLOWED_PROPERTY_TYPES = (AttributeDefinition,)
def _check_prop(self, key, item):
if (not isinstance(item, self.ALLOWED_PROPERTY_TYPES) or
(key is not None and not isinstance(key, types.StringTypes))):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid dict property type specification'))
@staticmethod
def _validate_key(key):
if not isinstance(key, types.StringTypes):
raise exc.InvalidArtifactPropertyValue(
_('Invalid dict property type'))
def __init__(self, properties, min_properties=0, max_properties=0,
**kwargs):
super(DictAttributeDefinition, self).__init__(**kwargs)
if isinstance(properties, types.DictionaryType):
for key, value in six.iteritems(properties):
self._check_prop(key, value)
# copy the properties dict
self.properties = properties.copy()
self._add_validator('keys',
lambda v: set(v.keys()) <= set(
self.properties.keys()),
_('Dictionary contains unexpected key(s)'))
else:
self._check_prop(None, properties)
self.properties = properties
if min_properties:
self.min_properties(min_properties)
if max_properties:
self.max_properties(max_properties)
def min_properties(self, value):
self._min_properties = value
if value is not None:
self._add_validator('min_properties',
lambda v: len(v) >= self._min_properties,
_('Dictionary size is less than '
'minimum'))
else:
self._remove_validator('min_properties')
def max_properties(self, value):
self._max_properties = value
if value is not None:
self._add_validator('max_properties',
lambda v: len(v) <= self._max_properties,
_('Dictionary size is '
'greater than maximum'))
else:
self._remove_validator('max_properties')
def _set_name(self, value):
super(DictAttributeDefinition, self)._set_name(value)
if isinstance(self.properties, types.DictionaryType):
for k, v in six.iteritems(self.properties):
v._set_name(value)
else:
self.properties._set_name(value)
def validate(self, value, name=None):
super(DictAttributeDefinition, self).validate(value, name)
if value is not None:
for k, v in six.iteritems(value):
self._validate_item_with_key(v, k)
def _validate_item_with_key(self, value, key):
self._validate_key(key)
if isinstance(self.properties, types.DictionaryType):
prop_def = self.properties.get(key)
if prop_def is not None:
name = "%s[%s]" % (prop_def.name, key)
prop_def.validate(value, name=name)
else:
name = "%s[%s]" % (self.properties.name, key)
self.properties.validate(value, name=name)
def get_prop_definition_at_key(self, key):
if isinstance(self.properties, types.DictionaryType):
return self.properties.get(key)
else:
return self.properties
class PropertyDefinition(AttributeDefinition):
"""A base class for Attributes defining generic or type-specific metadata
properties
"""
DB_TYPE = None
def __init__(self,
internal=False,
allowed_values=None,
validators=None,
**kwargs):
"""Defines a metadata property
:param internal: a flag indicating that the property is internal, i.e.
not returned to client
:param allowed_values: specifies a list of values allowed for the
property
:param validators: specifies a list of custom validators for the
property
"""
super(PropertyDefinition, self).__init__(**kwargs)
self.internal = internal
self._allowed_values = None
if validators is not None:
try:
for i, (f, m) in enumerate(validators):
self._add_validator("custom_%i" % i, f, m)
except ValueError:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Custom validators list should contain tuples "
"'(function, message)'"))
if allowed_values is not None:
# copy the allowed_values, as this is going to create a
# closure, and we need to make sure that external modification of
# this list does not affect the created validator
self.allowed_values(allowed_values)
self._check_definition()
def _validate_allowed_values(self):
if self._allowed_values:
try:
for allowed_value in self._allowed_values:
self.validate(allowed_value, 'allowed_value')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Allowed values %s are invalid under given validators") %
self._allowed_values)
def allowed_values(self, values):
self._allowed_values = values[:]
if values is not None:
self._add_validator('allowed', lambda v: v in self._allowed_values,
_("Is not allowed value"))
else:
self._remove_validator('allowed')
self._check_definition()
def _check_definition(self):
self._validate_allowed_values()
super(PropertyDefinition, self)._check_definition()
class RelationDefinition(AttributeDefinition):
"""A base class for Attributes defining cross-artifact relations"""
def __init__(self, internal=False, **kwargs):
self.internal = internal
kwargs.setdefault('mutable', False)
# if mutable=True has been passed -> raise an exception
if kwargs['mutable'] is True:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Dependency relations cannot be mutable"))
super(RelationDefinition, self).__init__(**kwargs)
class BlobDefinition(AttributeDefinition):
"""A base class for Attributes defining binary objects"""
pass
class ArtifactTypeMetaclass(type):
"""A metaclass to build Artifact Types. Not intended to be used directly
Use `get_declarative_base` to get the base class instead
"""
def __init__(cls, class_name, bases, attributes):
if '_declarative_artifact_type' not in cls.__dict__:
_build_declarative_meta(cls)
super(ArtifactTypeMetaclass, cls).__init__(class_name, bases,
attributes)
class ArtifactPropertyDescriptor(object):
"""A descriptor object for working with artifact attributes"""
def __init__(self, prop, collection_wrapper_class=None):
self.prop = prop
self.collection_wrapper_class = collection_wrapper_class
def __get__(self, instance, owner):
if instance is None:
# accessed via owner class
return self.prop
else:
v = getattr(instance, '_' + self.prop.name, None)
if v is None and self.prop.default is not None:
v = copy.copy(self.prop.default)
self.__set__(instance, v, ignore_mutability=True)
return self.__get__(instance, owner)
else:
if v is not None and self.collection_wrapper_class:
if self.prop.readonly:
readonly = True
elif (not self.prop.mutable and
hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
readonly = not instance.__is_mutable__()
else:
readonly = False
if readonly:
v = v.__make_immutable__()
return v
def __set__(self, instance, value, ignore_mutability=False):
if instance:
if self.prop.readonly:
if hasattr(instance, '_' + self.prop.name):
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set readonly property'))
if not self.prop.mutable:
if (hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
mutable = instance.__is_mutable__() or ignore_mutability
if not mutable:
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set value of immutable property'))
if value is not None and self.collection_wrapper_class:
value = self.collection_wrapper_class(value)
value.property = self.prop
self.prop.validate(value)
setattr(instance, '_' + self.prop.name, value)
class ArtifactAttributes(object):
"""A container class storing description of Artifact Type attributes"""
def __init__(self):
self.properties = {}
self.dependencies = {}
self.blobs = {}
self.all = {}
@property
def default_dependency(self):
"""Returns the default dependency relation for an artifact type"""
if len(self.dependencies) == 1:
return self.dependencies.values()[0]
@property
def default_blob(self):
"""Returns the default blob object for an artifact type"""
if len(self.blobs) == 1:
return self.blobs.values()[0]
@property
def default_properties_dict(self):
"""Returns a default properties dict for an artifact type"""
dict_props = [v for v in self.properties.values() if
isinstance(v, DictAttributeDefinition)]
if len(dict_props) == 1:
return dict_props[0]
@property
def tags(self):
"""Returns tags property for an artifact type"""
return self.properties.get('tags')
def add(self, attribute):
self.all[attribute.name] = attribute
if isinstance(attribute, PropertyDefinition):
self.properties[attribute.name] = attribute
elif isinstance(attribute, BlobDefinition):
self.blobs[attribute.name] = attribute
elif isinstance(attribute, RelationDefinition):
self.dependencies[attribute.name] = attribute
class ArtifactTypeMetadata(object):
"""A container to store the meta-information about an artifact type"""
def __init__(self, type_name, type_display_name, type_version,
type_description, endpoint):
"""Initializes the Artifact Type metadata
:param type_name: name of the artifact type
:param type_display_name: display name of the artifact type
:param type_version: version of the artifact type
:param type_description: description of the artifact type
:param endpoint: REST API URI suffix to call the artifacts of this type
"""
self.attributes = ArtifactAttributes()
# These are going to be defined by third-party plugin
# developers, so we need to do some validations on these values and
# raise InvalidArtifactTypeDefinition if they are violated
self.type_name = type_name
self.type_display_name = type_display_name or type_name
self.type_version = type_version or '1.0'
self.type_description = type_description
self.endpoint = endpoint or type_name.lower()
self._validate_string(self.type_name, 'Type name', min_length=1,
max_length=255)
self._validate_string(self.type_display_name, 'Type display name',
max_length=255)
self._validate_string(self.type_description, 'Type description')
self._validate_string(self.endpoint, 'endpoint', min_length=1)
try:
semantic_version.Version(self.type_version, partial=True)
except ValueError:
raise exc.InvalidArtifactTypeDefinition(
message=_("Type version has to be a valid semver string"))
@staticmethod
def _validate_string(value, name, min_length=0, max_length=None,
pattern=None):
if value is None:
if min_length > 0:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s is required"), attribute=name)
else:
return
if not isinstance(value, six.string_types):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s have to be string"), attribute=name)
if max_length and len(value) > max_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be longer than %(length)i"),
attribute=name, length=max_length)
if min_length and len(value) < min_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be shorter than %(length)i"),
attribute=name, length=min_length)
if pattern and not re.match(pattern, value):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s should match pattern %(pattern)s"),
attribute=name, pattern=pattern.pattern)
def _build_declarative_meta(cls):
attrs = dict(cls.__dict__)
type_name = None
type_display_name = None
type_version = None
type_description = None
endpoint = None
for base in cls.__mro__:
for name, value in six.iteritems(vars(base)):
if name == '__type_name__':
if not type_name:
type_name = cls.__type_name__
elif name == '__type_version__':
if not type_version:
type_version = cls.__type_version__
elif name == '__type_description__':
if not type_description:
type_description = cls.__type_description__
elif name == '__endpoint__':
if not endpoint:
endpoint = cls.__endpoint__
elif name == '__type_display_name__':
if not type_display_name:
type_display_name = cls.__type_display_name__
elif base is not cls and name not in attrs:
if isinstance(value, AttributeDefinition):
attrs[name] = value
elif isinstance(value, ArtifactPropertyDescriptor):
attrs[name] = value.prop
meta = ArtifactTypeMetadata(type_name=type_name or cls.__name__,
type_display_name=type_display_name,
type_version=type_version,
type_description=type_description,
endpoint=endpoint)
setattr(cls, 'metadata', meta)
for k, v in attrs.items():
if k == 'metadata':
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot declare artifact property with reserved name "
"'metadata'"))
if isinstance(v, AttributeDefinition):
v._set_name(k)
wrapper_class = None
if isinstance(v, ListAttributeDefinition):
wrapper_class = type("ValidatedList", (list,), {})
_add_validation_to_list(wrapper_class)
if isinstance(v, DictAttributeDefinition):
wrapper_class = type("ValidatedDict", (dict,), {})
_add_validation_to_dict(wrapper_class)
prop_descr = ArtifactPropertyDescriptor(v, wrapper_class)
setattr(cls, k, prop_descr)
meta.attributes.add(v)
def _validating_method(method, klass):
def wrapper(self, *args, **kwargs):
instance_copy = klass(self)
method(instance_copy, *args, **kwargs)
self.property.validate(instance_copy)
method(self, *args, **kwargs)
return wrapper
def _immutable_method(method):
def substitution(*args, **kwargs):
raise exc.InvalidArtifactPropertyValue(
_("Unable to modify collection in "
"immutable or readonly property"))
return substitution
def _add_immutable_wrappers(class_to_add, wrapped_methods):
for method_name in wrapped_methods:
method = getattr(class_to_add, method_name, None)
if method:
setattr(class_to_add, method_name, _immutable_method(method))
def _add_validation_wrappers(class_to_validate, base_class, validated_methods):
for method_name in validated_methods:
method = getattr(class_to_validate, method_name, None)
if method:
setattr(class_to_validate, method_name,
_validating_method(method, base_class))
readonly_class = type("Readonly" + class_to_validate.__name__,
(class_to_validate,), {})
_add_immutable_wrappers(readonly_class, validated_methods)
def __make_immutable__(self):
return readonly_class(self)
class_to_validate.__make_immutable__ = __make_immutable__
def _add_validation_to_list(list_based_class):
validated_methods = ['append', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__setitem__', '__delitem__',
'__delslice__']
_add_validation_wrappers(list_based_class, list, validated_methods)
def _add_validation_to_dict(dict_based_class):
validated_methods = ['pop', 'popitem', 'setdefault', 'update',
'__delitem__', '__setitem__', 'clear']
_add_validation_wrappers(dict_based_class, dict, validated_methods)
def _kwarg_init_constructor(self, **kwargs):
self.__suspend_mutability_checks__ = True
try:
for k in kwargs:
if not hasattr(type(self), k):
raise exc.ArtifactInvalidProperty(prop=k)
setattr(self, k, kwargs[k])
self._validate_required(self.metadata.attributes.properties)
finally:
del self.__suspend_mutability_checks__
def _validate_required(self, attribute_dict):
for k, v in six.iteritems(attribute_dict):
if v.required and (not hasattr(self, k) or getattr(self, k) is None):
raise exc.InvalidArtifactPropertyValue(name=k, val=None,
msg=_('Value is required'))
def _update(self, values):
for k in values:
if hasattr(type(self), k):
setattr(self, k, values[k])
else:
raise exc.ArtifactInvalidProperty(prop=k)
def _pre_publish_validator(self, *args, **kwargs):
self._validate_required(self.metadata.attributes.blobs)
self._validate_required(self.metadata.attributes.dependencies)
_kwarg_init_constructor.__name__ = '__init__'
_pre_publish_validator.__name__ = '__pre_publish__'
_update.__name__ = 'update'
def get_declarative_base(name='base', base_class=object):
"""Returns a base class which should be inherited to construct Artifact
Type object using the declarative syntax of attribute definition
"""
bases = not isinstance(base_class, tuple) and (base_class,) or base_class
class_dict = {'__init__': _kwarg_init_constructor,
'_validate_required': _validate_required,
'__pre_publish__': _pre_publish_validator,
'_declarative_artifact_type': True,
'update': _update}
return ArtifactTypeMetaclass(name, bases, class_dict)

View File

@ -0,0 +1,571 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import numbers
import re
import types
import semantic_version
import six
from glance.common.artifacts import declarative
import glance.common.exception as exc
from glance import i18n
_ = i18n._
class Text(declarative.PropertyDefinition):
"""A text metadata property of arbitrary length
Maps to TEXT columns in database, does not support sorting or filtering
"""
ALLOWED_TYPES = (six.string_types,)
DB_TYPE = 'text'
# noinspection PyAttributeOutsideInit
class String(Text):
"""A string metadata property of limited length
Maps to VARCHAR columns in database, supports filtering and sorting.
May have constrains on length and regexp patterns.
The maximum length is limited to 255 characters
"""
DB_TYPE = 'string'
def __init__(self, max_length=255, min_length=0, pattern=None, **kwargs):
"""Defines a String metadata property.
:param max_length: maximum value length
:param min_length: minimum value length
:param pattern: regexp pattern to match
"""
super(String, self).__init__(**kwargs)
self.max_length(max_length)
self.min_length(min_length)
if pattern:
self.pattern(pattern)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def max_length(self, value):
"""Sets the maximum value length"""
self._max_length = value
if value is not None:
if value > 255:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Max string length may not exceed 255 characters'))
self._add_validator('max_length',
lambda v: len(v) <= self._max_length,
_('Length is greater than maximum'))
else:
self._remove_validator('max_length')
self._check_definition()
def min_length(self, value):
"""Sets the minimum value length"""
self._min_length = value
if value is not None:
if value < 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Min string length may not be negative'))
self._add_validator('min_length',
lambda v: len(v) >= self._min_length,
_('Length is less than minimum'))
else:
self._remove_validator('min_length')
self._check_definition()
def pattern(self, value):
"""Sets the regexp pattern to match"""
self._pattern = value
if value is not None:
self._add_validator('pattern',
lambda v: re.match(self._pattern,
v) is not None,
_('Does not match pattern'))
else:
self._remove_validator('pattern')
self._check_definition()
class SemVerString(String):
"""A String metadata property matching semver pattern"""
def __init__(self, **kwargs):
def validate(value):
try:
semantic_version.Version(value, partial=True)
except ValueError:
return False
return True
super(SemVerString,
self).__init__(validators=[(validate,
"Invalid semver string")],
**kwargs)
# noinspection PyAttributeOutsideInit
class Integer(declarative.PropertyDefinition):
"""An Integer metadata property
Maps to INT columns in Database, supports filtering and sorting.
May have constraints on value
"""
ALLOWED_TYPES = (six.integer_types,)
DB_TYPE = 'int'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines an Integer metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Integer, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class DateTime(declarative.PropertyDefinition):
"""A DateTime metadata property
Maps to a DATETIME columns in database.
Is not supported as Type Specific property, may be used only as Generic one
May have constraints on value
"""
ALLOWED_TYPES = (datetime.datetime,)
DB_TYPE = 'datetime'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a DateTime metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(DateTime, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class Numeric(declarative.PropertyDefinition):
"""A Numeric metadata property
Maps to floating point number columns in Database, supports filtering and
sorting. May have constraints on value
"""
ALLOWED_TYPES = numbers.Number
DB_TYPE = 'numeric'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a Numeric metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Numeric, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
class Boolean(declarative.PropertyDefinition):
"""A Boolean metadata property
Maps to Boolean columns in database. Supports filtering and sorting.
"""
ALLOWED_TYPES = (types.BooleanType,)
DB_TYPE = 'bool'
class Array(declarative.ListAttributeDefinition,
declarative.PropertyDefinition, list):
"""An array metadata property
May contain elements of any other PropertyDefinition types except Dict and
Array. Each elements maps to appropriate type of columns in database.
Preserves order. Allows filtering based on "Array contains Value" semantics
May specify constrains on types of elements, their amount and uniqueness.
"""
ALLOWED_ITEM_TYPES = (declarative.PropertyDefinition,)
def __init__(self, item_type=String(), min_size=0, max_size=None,
unique=False, extra_items=True, **kwargs):
"""Defines an Array metadata property
:param item_type: defines the types of elements in Array. If set to an
instance of PropertyDefinition then all the elements have to be of that
type. If set to list of such instances, then the elements on the
corresponding positions have to be of the appropriate type.
:param min_size: minimum size of the Array
:param max_size: maximum size of the Array
:param unique: if set to true, all the elements in the Array have to be
unique
"""
if isinstance(item_type, Array):
msg = _("Array property can't have item_type=Array")
raise exc.InvalidArtifactTypePropertyDefinition(msg)
declarative.ListAttributeDefinition.__init__(self,
item_type=item_type,
min_size=min_size,
max_size=max_size,
unique=unique)
declarative.PropertyDefinition.__init__(self, **kwargs)
class Dict(declarative.DictAttributeDefinition,
declarative.PropertyDefinition, dict):
"""A dictionary metadata property
May contain elements of any other PropertyDefinition types except Dict.
Each elements maps to appropriate type of columns in database. Allows
filtering and sorting by values of each key except the ones mapping the
Text fields.
May specify constrains on types of elements and their amount.
"""
ALLOWED_PROPERTY_TYPES = (declarative.PropertyDefinition,)
def __init__(self, properties=String(), min_properties=0,
max_properties=None, **kwargs):
"""Defines a dictionary metadata property
:param properties: defines the types of dictionary values. If set to an
instance of PropertyDefinition then all the value have to be of that
type. If set to a dictionary with string keys and values of
PropertyDefinition type, then the elements mapped by the corresponding
have have to be of the appropriate type.
:param min_properties: minimum allowed amount of properties in the dict
:param max_properties: maximum allowed amount of properties in the dict
"""
declarative.DictAttributeDefinition. \
__init__(self,
properties=properties,
min_properties=min_properties,
max_properties=max_properties)
declarative.PropertyDefinition.__init__(self, **kwargs)
class ArtifactType(declarative.get_declarative_base()): # noqa
"""A base class for all the Artifact Type definitions
Defines the Generic metadata properties as attributes.
"""
id = String(required=True, readonly=True)
type_name = String(required=True, readonly=True)
type_version = SemVerString(required=True, readonly=True)
name = String(required=True, mutable=False)
version = SemVerString(required=True, mutable=False)
description = Text()
tags = Array(unique=True, default=[])
visibility = String(required=True,
allowed_values=["private", "public", "shared",
"community"],
default="private")
state = String(required=True, readonly=True, allowed_values=["creating",
"active",
"deactivated",
"deleted"])
owner = String(required=True, readonly=True)
created_at = DateTime(required=True, readonly=True)
updated_at = DateTime(required=True, readonly=True)
published_at = DateTime(readonly=True)
deleted_at = DateTime(readonly=True)
def __init__(self, **kwargs):
if "type_name" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type explicitly"))
if "type_version" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type version explicitly"))
super(ArtifactType,
self).__init__(type_name=self.metadata.type_name,
type_version=self.metadata.type_version, **kwargs)
def __eq__(self, other):
if not isinstance(other, ArtifactType):
return False
return self.id == other.id
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.id)
def __is_mutable__(self):
return self.state == "creating"
class ArtifactReference(declarative.RelationDefinition):
"""An artifact reference definition
Allows to define constraints by the name and version of target artifact
"""
ALLOWED_TYPES = ArtifactType
def __init__(self, type_name=None, type_version=None, **kwargs):
"""Defines an artifact reference
:param type_name: type name of the target artifact
:param type_version: type version of the target artifact
"""
super(ArtifactReference, self).__init__(**kwargs)
if type_name is not None:
if isinstance(type_name, types.ListType):
type_names = list(type_name)
if type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if multiple types are possible'))
else:
type_names = [type_name]
def validate_reference(artifact):
if artifact.type_name not in type_names:
return False
if (type_version is not None and
artifact.type_version != type_version):
return False
return True
self._add_validator('referenced_type',
validate_reference,
_("Invalid referenced type"))
elif type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if type is not specified'))
self._check_definition()
class ArtifactReferenceList(declarative.ListAttributeDefinition,
declarative.RelationDefinition, list):
"""A list of Artifact References
Allows to define a collection of references to other artifacts, each
optionally constrained by type name and type version
"""
ALLOWED_ITEM_TYPES = (ArtifactReference,)
def __init__(self, references=ArtifactReference(), min_size=0,
max_size=None, **kwargs):
if isinstance(references, types.ListType):
raise exc.InvalidArtifactTypePropertyDefinition(
_("Invalid reference list specification"))
declarative.RelationDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=references,
min_size=min_size,
max_size=max_size,
unique=True,
default=[]
if min_size == 0 else
None)
class Blob(object):
"""A Binary object being part of the Artifact"""
def __init__(self, size=0, locations=None, checksum=None, item_key=None):
"""Initializes a new Binary Object for an Artifact
:param size: the size of Binary Data
:param locations: a list of data locations in backing stores
:param checksum: a checksum for the data
"""
if locations is None:
locations = []
self.size = size
self.checksum = checksum
self.locations = locations
self.item_key = item_key
def to_dict(self):
return {
"size": self.size,
"checksum": self.checksum,
}
class BinaryObject(declarative.BlobDefinition, Blob):
"""A definition of BinaryObject binding
Adds a BinaryObject to an Artifact Type, optionally constrained by file
size and amount of locations
"""
ALLOWED_TYPES = (Blob,)
def __init__(self,
max_file_size=None,
min_file_size=None,
min_locations=None,
max_locations=None,
**kwargs):
"""Defines a binary object as part of Artifact Type
:param max_file_size: maximum size of the associate Blob
:param min_file_size: minimum size of the associated Blob
:param min_locations: minimum number of locations in the associated
Blob
:param max_locations: maximum number of locations in the associated
Blob
"""
super(BinaryObject, self).__init__(default=None, readonly=False,
mutable=False, **kwargs)
self._max_file_size = max_file_size
self._min_file_size = min_file_size
self._min_locations = min_locations
self._max_locations = max_locations
self._add_validator('size_not_empty',
lambda v: v.size is not None,
_('Blob size is not set'))
if max_file_size:
self._add_validator('max_size',
lambda v: v.size <= self._max_file_size,
_("File too large"))
if min_file_size:
self._add_validator('min_size',
lambda v: v.size >= self._min_file_size,
_("File too small"))
if min_locations:
self._add_validator('min_locations',
lambda v: len(
v.locations) >= self._min_locations,
_("Too few locations"))
if max_locations:
self._add_validator(
'max_locations',
lambda v: len(v.locations) <= self._max_locations,
_("Too many locations"))
class BinaryObjectList(declarative.ListAttributeDefinition,
declarative.BlobDefinition, list):
"""A definition of binding to the list of BinaryObject
Adds a list of BinaryObject's to an artifact type, optionally constrained
by the number of objects in the list and their uniqueness
"""
ALLOWED_ITEM_TYPES = (BinaryObject,)
def __init__(self, objects=BinaryObject(), min_count=0, max_count=None,
**kwargs):
declarative.BlobDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=objects,
min_size=min_count,
max_size=max_count,
unique=True)
self.default = [] if min_count == 0 else None

View File

@ -0,0 +1,265 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from glance.common.artifacts import declarative
from glance.common.artifacts import definitions
from glance.common import exception
from glance import i18n
_ = i18n._
COMMON_ARTIFACT_PROPERTIES = ['id',
'type_name',
'type_version',
'name',
'version',
'description',
'visibility',
'state',
'tags',
'owner',
'created_at',
'updated_at',
'published_at',
'deleted_at']
def _serialize_list_prop(prop, values):
"""
A helper func called to correctly serialize an Array property.
Returns a dict {'type': some_supported_db_type, 'value': serialized_data}
"""
# FIXME(Due to a potential bug in declarative framework, for Arrays, that
# are values to some dict items (Dict(properties={"foo": Array()})),
# prop.get_value(artifact) returns not the real list of items, but the
# whole dict). So we can't rely on prop.get_value(artifact) and will pass
# correctly retrieved values to this function
serialized_value = []
for i, val in enumerate(values or []):
db_type = prop.get_item_definition_at_index(i).DB_TYPE
if db_type is None:
continue
serialized_value.append({
'type': db_type,
'value': val
})
return serialized_value
def _serialize_dict_prop(artifact, prop, key, value, save_prop_func):
key_to_save = prop.name + '.' + key
dict_key_prop = prop.get_prop_definition_at_key(key)
db_type = dict_key_prop.DB_TYPE
if (db_type is None and
not isinstance(dict_key_prop,
declarative.ListAttributeDefinition)):
# nothing to do here, don't know how to deal with this type
return
elif isinstance(dict_key_prop,
declarative.ListAttributeDefinition):
serialized = _serialize_list_prop(
dict_key_prop,
# FIXME(see comment for _serialize_list_prop func)
values=(dict_key_prop.get_value(artifact) or {}).get(key, []))
save_prop_func(key_to_save, 'array', serialized)
else:
save_prop_func(key_to_save, db_type, value)
def _serialize_dependencies(artifact):
"""Returns a dict of serialized dependencies for given artifact"""
dependencies = {}
for relation in artifact.metadata.attributes.dependencies.values():
serialized_dependency = []
if isinstance(relation, declarative.ListAttributeDefinition):
for dep in relation.get_value(artifact):
serialized_dependency.append(dep.id)
else:
relation_data = relation.get_value(artifact)
if relation_data:
serialized_dependency.append(relation.get_value(artifact).id)
dependencies[relation.name] = serialized_dependency
return dependencies
def _serialize_blobs(artifact):
"""Return a dict of serialized blobs for given artifact"""
blobs = {}
for blob in artifact.metadata.attributes.blobs.values():
serialized_blob = []
if isinstance(blob, declarative.ListAttributeDefinition):
for b in blob.get_value(artifact) or []:
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
else:
b = blob.get_value(artifact)
# if no value for blob has been set -> continue
if not b:
continue
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
blobs[blob.name] = serialized_blob
return blobs
def serialize_for_db(artifact):
result = {}
custom_properties = {}
def _save_prop(prop_key, prop_type, value):
custom_properties[prop_key] = {
'type': prop_type,
'value': value
}
for prop in artifact.metadata.attributes.properties.values():
if prop.name in COMMON_ARTIFACT_PROPERTIES:
result[prop.name] = prop.get_value(artifact)
continue
if isinstance(prop, declarative.ListAttributeDefinition):
serialized_value = _serialize_list_prop(prop,
prop.get_value(artifact))
_save_prop(prop.name, 'array', serialized_value)
elif isinstance(prop, declarative.DictAttributeDefinition):
fields_to_set = prop.get_value(artifact) or {}
# if some keys are not present (like in prop == {}), then have to
# set their values to None.
# XXX FIXME prop.properties may be a dict ({'foo': '', 'bar': ''})
# or String\Integer\whatsoever, limiting the possible dict values.
# In the latter case have no idea how to remove old values during
# serialization process.
if isinstance(prop.properties, dict):
for key in [k for k in prop.properties
if k not in fields_to_set.keys()]:
_serialize_dict_prop(artifact, prop, key, None, _save_prop)
# serialize values of properties present
for key, value in six.iteritems(fields_to_set):
_serialize_dict_prop(artifact, prop, key, value, _save_prop)
elif prop.DB_TYPE is not None:
_save_prop(prop.name, prop.DB_TYPE, prop.get_value(artifact))
result['properties'] = custom_properties
result['dependencies'] = _serialize_dependencies(artifact)
result['blobs'] = _serialize_blobs(artifact)
return result
def _deserialize_blobs(artifact_type, blobs_from_db, artifact_properties):
"""Retrieves blobs from database"""
for blob_name, blob_value in six.iteritems(blobs_from_db):
if not blob_value:
continue
if isinstance(artifact_type.metadata.attributes.blobs.get(blob_name),
declarative.ListAttributeDefinition):
val = []
for v in blob_value:
b = definitions.Blob(size=v['size'],
locations=v['locations'],
checksum=v['checksum'],
item_key=v['item_key'])
val.append(b)
elif len(blob_value) == 1:
val = definitions.Blob(size=blob_value[0]['size'],
locations=blob_value[0]['locations'],
checksum=blob_value[0]['checksum'],
item_key=blob_value[0]['item_key'])
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Blob %(name)s may not have multiple values'),
name=blob_name)
artifact_properties[blob_name] = val
def _deserialize_dependencies(artifact_type, deps_from_db,
artifact_properties, type_dictionary):
"""Retrieves dependencies from database"""
for dep_name, dep_value in six.iteritems(deps_from_db):
if not dep_value:
continue
if isinstance(
artifact_type.metadata.attributes.dependencies.get(dep_name),
declarative.ListAttributeDefinition):
val = []
for v in dep_value:
val.append(deserialize_from_db(v, type_dictionary))
elif len(dep_value) == 1:
val = deserialize_from_db(dep_value[0], type_dictionary)
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Relation %(name)s may not have multiple values'),
name=dep_name)
artifact_properties[dep_name] = val
def deserialize_from_db(db_dict, type_dictionary):
artifact_properties = {}
type_name = None
type_version = None
for prop_name in COMMON_ARTIFACT_PROPERTIES:
prop_value = db_dict.pop(prop_name, None)
if prop_name == 'type_name':
type_name = prop_value
elif prop_name == 'type_version':
type_version = prop_value
else:
artifact_properties[prop_name] = prop_value
if type_name and type_version and (type_version in
type_dictionary.get(type_name, [])):
artifact_type = type_dictionary[type_name][type_version]
else:
raise exception.UnknownArtifactType(name=type_name,
version=type_version)
type_specific_properties = db_dict.pop('properties', {})
for prop_name, prop_value in six.iteritems(type_specific_properties):
prop_type = prop_value.get('type')
prop_value = prop_value.get('value')
if prop_value is None:
continue
if '.' in prop_name: # dict-based property
name, key = prop_name.split('.', 1)
artifact_properties.setdefault(name, {})
if prop_type == 'array':
artifact_properties[name][key] = [item.get('value') for item in
prop_value]
else:
artifact_properties[name][key] = prop_value
elif prop_type == 'array': # list-based property
artifact_properties[prop_name] = [item.get('value') for item in
prop_value]
else:
artifact_properties[prop_name] = prop_value
blobs = db_dict.pop('blobs', {})
_deserialize_blobs(artifact_type, blobs, artifact_properties)
dependencies = db_dict.pop('dependencies', {})
_deserialize_dependencies(artifact_type, dependencies,
artifact_properties, type_dictionary)
return artifact_type(**artifact_properties)

View File

@ -491,9 +491,36 @@ class ArtifactPropertyValueNotFound(NotFound):
message = _("Property's %(prop)s value has not been found")
class ArtifactInvalidProperty(Invalid):
message = _("Artifact has no property %(prop)s")
class ArtifactInvalidPropertyParameter(Invalid):
message = _("Cannot use this parameter with the operator %(op)s")
class ArtifactInvalidStateTransition(Invalid):
message = _("Artifact state cannot be changed from %(curr)s to %(to)s")
class InvalidArtifactTypePropertyDefinition(Invalid):
message = _("Invalid property definition")
class InvalidArtifactTypeDefinition(Invalid):
message = _("Invalid type definition")
class InvalidArtifactPropertyValue(Invalid):
message = _("Property '%(name)s' may not have value '%(val)s': %(msg)s")
def __init__(self, message=None, *args, **kwargs):
super(InvalidArtifactPropertyValue, self).__init__(message, *args,
**kwargs)
self.name = kwargs.get('name')
self.value = kwargs.get('val')
class UnknownArtifactType(NotFound):
message = _("Artifact type with name '%(name)s' and version '%(version)s' "
"is not known")

File diff suppressed because it is too large Load Diff