Improve document validation module.

This PS rewrites the document_validation module in
Deckhand to achieve the following goals:

  * better validation resiliency
  * add support for different document schema versions
  * better support for DataSchema validation
  * separation of concerns by splitting up validations
    into separate classes
  * support for validating documents that rely on
    a DataSchema passed in via the same payload
  * support for generating multiple validation errors
    rather than returning after the first one found
  * increase testing validations for unit/functional
    tests

Better validation resiliency is achieved through more
robust exception handling. For example, it is possible
for a ``DataSchema`` to be 100% valid from the POV of
built-in schema validation, but if the "data" section
itself is utterly invalid, then an exception will be
raised -- such an exception is treated as a critical
failure.

Better generation of error messages is achieved by
creation more validation error message results.

DataSchema validation was previously wonky. A DataSchema
had to first be created in 1 revision before it could be
referenced by a batch of documents in sequential revisions.
Now, a DataSchema can be created in the same (or previous)
revision as documents that rely on it and used to validate
said documents.

Finally, the module was heavily rewritten so that more
nuanced validations can be built by inheriting from
``BaseValidator`` so as to allow for easier code
readability and maintainability.

Change-Id: Ie75742b984b7ad392cb41decc203d42842050c80
This commit is contained in:
Felipe Monteiro 2017-12-15 02:32:58 +00:00
parent 2620913499
commit 453927facf
29 changed files with 897 additions and 447 deletions

View File

@ -53,8 +53,7 @@ class BucketsResource(api_base.BaseResource):
try:
doc_validator = document_validation.DocumentValidation(documents)
validations = doc_validator.validate_all()
except (deckhand_errors.InvalidDocumentFormat,
deckhand_errors.InvalidDocumentSchema) as e:
except deckhand_errors.InvalidDocumentFormat as e:
LOG.exception(e.format_message())
raise falcon.HTTPBadRequest(description=e.format_message())

View File

@ -170,8 +170,7 @@ class RenderedDocumentsResource(api_base.BaseResource):
doc_validator = document_validation.DocumentValidation(documents)
try:
doc_validator.validate_all()
except (errors.InvalidDocumentFormat,
errors.InvalidDocumentSchema) as e:
except errors.InvalidDocumentFormat as e:
LOG.error('Failed to post-validate rendered documents.')
LOG.exception(e.format_message())
raise falcon.HTTPInternalServerError(

View File

@ -12,170 +12,294 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import abc
import re
import jsonschema
from oslo_log import log as logging
import six
from deckhand.db.sqlalchemy import api as db_api
from deckhand.engine import document as document_wrapper
from deckhand.engine.schema import base_schema
from deckhand.engine.schema import v1_0
from deckhand import errors
from deckhand import types
from deckhand import utils
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class BaseValidator(object):
"""Abstract base validator.
Sub-classes should override this to implement schema-specific document
validation.
"""
_supported_versions = ('v1',)
_schema_re = re.compile(r'^[a-zA-Z]+\/[a-zA-Z]+\/v\d+(.0)?$')
@abc.abstractmethod
def matches(self, document):
"""Whether this Validator should be used to validate ``document``.
:param dict document: Document to validate.
:returns: True if Validator applies to ``document``, else False.
"""
@abc.abstractmethod
def validate(self, document):
"""Validate whether ``document`` passes schema validation."""
class GenericValidator(BaseValidator):
"""Validator used for validating all documents, regardless whether concrete
or abstract, or what version its schema is.
"""
def matches(self, document):
# Applies to all schemas, so unconditionally returns True.
return True
def validate(self, document):
"""Validate ``document``against basic schema validation.
Sanity-checks each document for mandatory keys like "metadata" and
"schema".
Applies even to abstract documents, as they must be consumed by
concrete documents, so basic formatting is mandatory.
Failure to pass this check results in an error.
:param dict document: Document to validate.
:raises RuntimeError: If the Deckhand schema itself is invalid.
:raises errors.InvalidDocumentFormat: If the document failed schema
validation.
:returns: None
"""
try:
jsonschema.Draft4Validator.check_schema(base_schema.schema)
schema_validator = jsonschema.Draft4Validator(base_schema.schema)
error_messages = [
e.message for e in schema_validator.iter_errors(document)]
except Exception as e:
raise RuntimeError(
'Unknown error occurred while attempting to use Deckhand '
'schema. Details: %s' % six.text_type(e))
else:
if error_messages:
LOG.error(
'Failed sanity-check validation for document [%s] %s. '
'Details: %s', document.get('schema', 'N/A'),
document.get('metadata', {}).get('name'), error_messages)
raise errors.InvalidDocumentFormat(details=error_messages)
class SchemaValidator(BaseValidator):
"""Validator for validating built-in document kinds."""
_schema_map = {
'v1': {
'deckhand/CertificateAuthorityKey':
v1_0.certificate_authority_key_schema,
'deckhand/CertificateAuthority': v1_0.certificate_authority_schema,
'deckhand/CertificateKey': v1_0.certificate_key_schema,
'deckhand/Certificate': v1_0.certificate_schema,
'deckhand/DataSchema': v1_0.data_schema_schema,
'deckhand/LayeringPolicy': v1_0.layering_policy_schema,
'deckhand/Passphrase': v1_0.passphrase_schema,
'deckhand/PrivateKey': v1_0.private_key_schema,
'deckhand/PublicKey': v1_0.public_key_schema,
'deckhand/ValidationPolicy': v1_0.validation_policy_schema,
}
}
# Represents a generic document schema.
_fallback_schema = v1_0.document_schema
def _get_schemas(self, document):
"""Retrieve the relevant schemas based on the document's
``schema``.
:param dict doc: The document used for finding the correct schema
to validate it based on its ``schema``.
:returns: A schema to be used by ``jsonschema`` for document
validation.
:rtype: dict
"""
schema_prefix, schema_version = get_schema_parts(document)
matching_schemas = []
relevant_schemas = self._schema_map.get(schema_version, {})
for candidae_schema_prefix, schema in relevant_schemas.items():
if candidae_schema_prefix == schema_prefix:
if schema not in matching_schemas:
matching_schemas.append(schema)
return matching_schemas
def matches(self, document):
if is_abstract(document) is True:
LOG.info('Skipping schema validation for abstract document [%s]: '
'%s.', document['schema'], document['metadata']['name'])
return False
return True
def validate(self, document, validate_section='',
use_fallback_schema=True):
"""Validate ``document`` against built-in ``schema``-specific schemas.
Does not apply to abstract documents.
:param dict document: Document to validate.
:param str validate_section: Document section to validate. If empty
string, validates entire ``document``.
:param bool use_fallback_schema: Whether to use the "fallback" schema
if no matching schemas are found by :method:``matches``.
:raises RuntimeError: If the Deckhand schema itself is invalid.
:returns: Tuple of (error message, parent path for failing property)
following schema validation failure.
:rtype: Generator[Tuple[str, str]]
"""
schemas_to_use = self._get_schemas(document)
if not schemas_to_use and use_fallback_schema:
LOG.debug('Document schema %s not recognized. Using "fallback" '
'schema.', document['schema'])
schemas_to_use = [SchemaValidator._fallback_schema]
for schema_to_use in schemas_to_use:
schema = schema_to_use.schema
if validate_section:
to_validate = document.get(validate_section, None)
root_path = '.' + validate_section + '.'
else:
to_validate = document
root_path = '.'
try:
jsonschema.Draft4Validator.check_schema(schema)
schema_validator = jsonschema.Draft4Validator(schema)
errors = schema_validator.iter_errors(to_validate)
except Exception as e:
LOG.exception(six.text_type(e))
raise RuntimeError(
'Unknown error occurred while attempting to use schema '
'for validation. Details: %s.' % six.text_type(e))
else:
for error in errors:
LOG.error(
'Failed schema validation for document [%s] %s. '
'Details: %s.', document['schema'],
document['metadata']['name'], error.message)
parent_path = root_path + '.'.join(
[six.text_type(x) for x in error.path])
yield error.message, parent_path
class DataSchemaValidator(SchemaValidator):
"""Validator for validating ``DataSchema`` documents."""
def __init__(self, data_schemas):
super(DataSchemaValidator, self).__init__()
self._schema_map = self._build_schema_map(data_schemas)
def _build_schema_map(self, data_schemas):
schema_map = {k: {} for k in self._supported_versions}
for data_schema in data_schemas:
# Ensure that each `DataSchema` document has required properties
# before they themselves can be used to validate other documents.
if 'name' not in data_schema.get('metadata', {}):
continue
if self._schema_re.match(data_schema['metadata']['name']) is None:
continue
if 'data' not in data_schema:
continue
schema_prefix, schema_version = get_schema_parts(data_schema,
'metadata.name')
class Schema(object):
schema = data_schema['data']
schema_map[schema_version].setdefault(schema_prefix, Schema())
return schema_map
def matches(self, document):
if is_abstract(document) is True:
LOG.info('Skipping schema validation for abstract document [%s]: '
'%s.', document['schema'], document['metadata']['name'])
return False
schema_prefix, schema_version = get_schema_parts(document)
return schema_prefix in self._schema_map.get(schema_version, {})
def validate(self, document):
return super(DataSchemaValidator, self).validate(
document, validate_section='data', use_fallback_schema=False)
class DocumentValidation(object):
def __init__(self, documents):
"""Class for document validation logic for YAML files.
"""Class for document validation logic for documents.
This class is responsible for validating YAML files according to their
This class is responsible for validating documents according to their
schema.
``DataSchema`` documents must be validated first, as they are in turn
used to validate other documents.
:param documents: Documents to be validated.
:type documents: list[dict]
:type documents: List[dict]
"""
self.documents = []
data_schemas = db_api.revision_documents_get(
schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
db_data_schemas = {d['metadata']['name']: d for d in data_schemas}
if not isinstance(documents, (list, tuple)):
documents = [documents]
try:
for document in documents:
doc = copy.deepcopy(document)
# NOTE(fmontei): Remove extraneous top-level keys so that fully
# rendered documents pass schema validation.
for key in doc.copy():
if key not in ('metadata', 'schema', 'data'):
doc.pop(key)
self.documents.append(document_wrapper.Document(doc))
except Exception:
raise errors.InvalidDocumentFormat(
detail='Document could not be converted into a dictionary',
schema='Unknown')
for document in documents:
if document.get('schema', '').startswith(types.DATA_SCHEMA_SCHEMA):
data_schemas.append(document)
# If a newer version of the same DataSchema was passed in,
# only use the new one and discard the old one.
document_name = document.get('metadata', {}).get('name')
if document_name in db_data_schemas:
data_schemas.remove(db_data_schemas.pop(document_name))
self.documents.append(document)
class SchemaType(object):
"""Class for retrieving correct schema for pre-validation on YAML.
# NOTE(fmontei): The order of the validators is important. The
# ``GenericValidator`` must come first.
self._validators = [
GenericValidator(),
SchemaValidator(),
DataSchemaValidator(data_schemas)
]
Retrieves the schema that corresponds to "apiVersion" in the YAML
data. This schema is responsible for performing pre-validation on
YAML data.
"""
schema_versions_info = [
{'id': 'deckhand/CertificateAuthorityKey',
'schema': v1_0.certificate_authority_key_schema,
'version': '1.0'},
{'id': 'deckhand/CertificateAuthority',
'schema': v1_0.certificate_authority_schema,
'version': '1.0'},
{'id': 'deckhand/CertificateKey',
'schema': v1_0.certificate_key_schema,
'version': '1.0'},
{'id': 'deckhand/Certificate',
'schema': v1_0.certificate_schema,
'version': '1.0'},
{'id': 'deckhand/PrivateKey',
'schema': v1_0.private_key_schema,
'version': '1.0'},
{'id': 'deckhand/PublicKey',
'schema': v1_0.public_key_schema,
'version': '1.0'},
{'id': 'deckhand/DataSchema',
'schema': v1_0.data_schema_schema,
'version': '1.0'},
{'id': 'deckhand/LayeringPolicy',
'schema': v1_0.layering_policy_schema,
'version': '1.0'},
{'id': 'deckhand/Passphrase',
'schema': v1_0.passphrase_schema,
'version': '1.0'},
{'id': 'deckhand/ValidationPolicy',
'schema': v1_0.validation_policy_schema,
'version': '1.0'},
# FIXME(fmontei): Remove this once all Deckhand tests have been
# refactored to account for dynamic schema registeration via
# `DataSchema` documents. Otherwise most tests will fail.
{'id': 'metadata/Document',
'schema': v1_0.document_schema,
'version': '1.0'}]
schema_re = re.compile(
'^([A-Za-z]+\/[A-Za-z]+\/v[1]{1}(\.[0]{1}){0,1})$')
@classmethod
def _register_data_schemas(cls):
"""Dynamically detect schemas for document validation that have
been registered by external services via ``DataSchema`` documents.
"""
data_schemas = db_api.document_get_all(
schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
for data_schema in data_schemas:
if cls.schema_re.match(data_schema['metadata']['name']):
schema_id = '/'.join(
data_schema['metadata']['name'].split('/')[:2])
else:
schema_id = data_schema['metadata']['name']
cls.schema_versions_info.append({
'id': schema_id,
'schema': data_schema['data'],
'version': '1.0',
'registered': True,
})
@classmethod
def _get_schema_by_property(cls, schema_re, field):
if schema_re.match(field):
schema_id = '/'.join(field.split('/')[:2])
else:
schema_id = field
matching_schemas = []
for schema in cls.schema_versions_info:
# Can't use `startswith` below to avoid namespace false
# positives like `CertificateKey` and `Certificate`.
if schema_id == schema['id']:
if schema not in matching_schemas:
matching_schemas.append(schema)
return matching_schemas
@classmethod
def get_schemas(cls, doc):
"""Retrieve the relevant schema based on the document's ``schema``.
:param dict doc: The document used for finding the correct schema
to validate it based on its ``schema``.
:returns: A schema to be used by ``jsonschema`` for document
validation.
:rtype: dict
"""
cls._register_data_schemas()
# FIXME(fmontei): Remove this once all Deckhand tests have been
# refactored to account for dynamic schema registeration via
# ``DataSchema`` documents. Otherwise most tests will fail.
for doc_field in [doc['schema'], doc['metadata']['schema']]:
matching_schemas = cls._get_schema_by_property(
cls.schema_re, doc_field)
if matching_schemas:
return matching_schemas
return []
def _get_supported_schema_list(self):
schema_list = []
for validator in self._validators[1:]:
for schema_version, schema_map in validator._schema_map.items():
for schema_prefix in schema_map:
schema_list.append(schema_prefix + '/' + schema_version)
return schema_list
def _format_validation_results(self, results):
"""Format the validation result to be compatible with database
formatting.
:results: The validation results generated during document validation.
:type results: list[dict]
:type results: List[dict]
:returns: List of formatted validation results.
:rtype: `func`:list[dict]
:rtype: List[dict]
"""
internal_validator = {
'name': 'deckhand',
@ -195,61 +319,35 @@ class DocumentValidation(object):
return formatted_results
def _validate_one(self, document):
raw_dict = document.to_dict()
try:
# Subject every document to basic validation to verify that each
# main section is present (schema, metadata, data).
jsonschema.validate(raw_dict, base_schema.schema)
except jsonschema.exceptions.ValidationError as e:
LOG.debug('Document failed top-level schema validation. Details: '
'%s', e.message)
# NOTE(fmontei): Raise here because if we fail basic schema
# validation, then there is no point in continuing.
raise errors.InvalidDocumentFormat(
detail=e.message, schema=e.schema)
schemas_to_use = self.SchemaType.get_schemas(raw_dict)
if not schemas_to_use:
LOG.debug('Document schema %s not recognized.',
document.get_schema())
# NOTE(fmontei): Raise here because if Deckhand cannot even
# determine which schema to use for further validation, then there
# is no point in trying to continue validation.
raise errors.InvalidDocumentSchema(
document_schema=document.get_schema(),
schema_list=[
s['id'] for s in self.SchemaType.schema_versions_info])
result = {'errors': []}
# Perform more detailed validation on each document depending on
# its schema. If the document is abstract, validation errors are
# ignored.
if document.is_abstract():
LOG.info('Skipping schema validation for abstract '
'document: [%s] %s.', document.get_schema(),
document.get_name())
else:
supported_schema_list = self._get_supported_schema_list()
document_schema = None if not document.get('schema') else '/'.join(
get_schema_parts(document))
if document_schema not in supported_schema_list:
error_msg = ("The provided document schema %s is invalid. "
"Supported schemas include: %s" % (
document.get('schema', 'N/A'),
supported_schema_list))
LOG.error(error_msg)
result['errors'].append({
'schema': document.get('schema', 'N/A'),
'name': document.get('metadata', {}).get('name', 'N/A'),
'message': error_msg,
'path': '.'
})
for schema_to_use in schemas_to_use:
try:
if isinstance(schema_to_use['schema'], dict):
schema_validator = schema_to_use['schema']
jsonschema.validate(raw_dict.get('data', {}),
schema_validator)
else:
schema_validator = schema_to_use['schema'].schema
jsonschema.validate(raw_dict, schema_validator)
except jsonschema.exceptions.ValidationError as e:
LOG.error(
'Document failed schema validation for schema %s.'
'Details: %s.', document.get_schema(), e.message)
result['errors'].append({
'schema': document.get_schema(),
'name': document.get_name(),
'message': e.message.replace('u\'', '\'')
})
for validator in self._validators:
if validator.matches(document):
error_messages = validator.validate(document)
if error_messages:
for error_msg, error_path in error_messages:
result['errors'].append({
'schema': document['schema'],
'name': document['metadata']['name'],
'message': error_msg,
'path': error_path
})
if result['errors']:
result.setdefault('status', 'failure')
@ -259,17 +357,19 @@ class DocumentValidation(object):
return result
def validate_all(self):
"""Pre-validate that the YAML file is correctly formatted.
"""Pre-validate that all documents are correctly formatted.
All concrete documents in the revision successfully pass their JSON
schema validations. The result of the validation is stored under
All concrete documents in the revision must successfully pass their
JSON schema validations. The result of the validation is stored under
the "deckhand-document-schema-validation" validation namespace for
a document revision.
Validation is broken up into 2 stages:
All abstract documents must themselves be sanity-checked.
Validation is broken up into 3 stages:
1) Validate that each document contains the basic bulding blocks
needed: ``schema`` and ``metadata`` using a "base" schema.
needed: i.e. ``schema`` and ``metadata`` using a "base" schema.
Failing this validation is deemed a critical failure, resulting
in an exception.
@ -287,18 +387,45 @@ class DocumentValidation(object):
any other non-critical exceptions, which are returned together
later.
3) Execute ``DataSchema`` validations if applicable.
:returns: A list of validations (one for each document validated).
:rtype: `func`:list[dict]
:rtype: List[dict]
:raises errors.InvalidDocumentFormat: If the document failed schema
validation and the failure is deemed critical.
:raises errors.InvalidDocumentSchema: If no JSON schema for could be
found for executing document validation.
:raises RuntimeError: If a Deckhand schema itself is invalid.
"""
validation_results = []
for document in self.documents:
result = self._validate_one(document)
validation_results.append(result)
# NOTE(fmontei): Since ``DataSchema`` documents created in previous
# revisions are retrieved and combined with new ``DataSchema``
# documents, we only want to create a validation result in the DB
# for the new documents. One way to do this is to check whether the
# document contains the 'id' key which is only assigned by the DB.
requires_validation = 'id' not in document
if requires_validation:
result = self._validate_one(document)
validation_results.append(result)
validations = self._format_validation_results(validation_results)
return validations
def is_abstract(document):
try:
return document['metadata']['layeringDefinition']['abstract']
except Exception:
return False
def get_schema_parts(document, schema_key='schema'):
schema_parts = utils.jsonpath_parse(document, schema_key).split('/')
schema_prefix = '/'.join(schema_parts[:2])
schema_version = schema_parts[2]
if schema_version.endswith('.0'):
schema_version = schema_version[:-2]
return schema_prefix, schema_version

View File

@ -18,7 +18,7 @@ schema = {
'schema': {
'type': 'string',
# Currently supported versions include v1/v1.0 only.
'pattern': '^([A-Za-z]+\/[A-Za-z]+\/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^[A-Za-z]+\/[A-Za-z]+\/v\d+(.0)?$'
},
'metadata': {
'type': 'object',

View File

@ -17,21 +17,22 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/CertificateKey/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/CertificateKey/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
'pattern': '^metadata/Document/v\d+(.0)?$',
},
'name': {'type': 'string'},
# Not strictly needed for secrets.
'layeringDefinition': {
'type': 'object',
'properties': {
'layer': {'type': 'string'}
'layer': {'type': 'string'},
'abstract': {'type': 'boolean'}
}
},
'storagePolicy': {

View File

@ -17,21 +17,22 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/Certificate/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/Certificate/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
'pattern': '^metadata/Document/v\d+(.0)?$',
},
'name': {'type': 'string'},
# Not strictly needed for secrets.
'layeringDefinition': {
'type': 'object',
'properties': {
'layer': {'type': 'string'}
'layer': {'type': 'string'},
'abstract': {'type': 'boolean'}
}
},
'storagePolicy': {

View File

@ -20,19 +20,18 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/DataSchema/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/DataSchema/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^metadata/Control/v\d+(.0)?$'
},
'name': {
'type': 'string',
'pattern': (
'^([A-Za-z]+\/[A-Za-z]+\/v[1]{1}(\.[0]{1}){0,1})$')
'pattern': '^[A-Za-z]+\/[A-Za-z]+\/v\d+(.0)?$'
},
# Labels are optional.
'labels': {
@ -43,7 +42,7 @@ schema = {
'enum': ['encrypted', 'cleartext']
}
},
'additionalProperties': False,
'additionalProperties': True, # Can include layeringDefinition.
'required': ['schema', 'name']
},
'data': {
@ -65,7 +64,7 @@ schema = {
.. literalinclude:: ../../deckhand/engine/schema/v1_0/data_schema_schema.py
:language: python
:lines: 15-62
:lines: 15-61
This schema is used to sanity-check all DataSchema documents that are
passed to Deckhand. This schema is only enforced after validation for

View File

@ -44,14 +44,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^([A-Za-z]+/[A-Za-z]+/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^[A-Za-z]+/[A-Za-z]+/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^metadata/Document/v\d+(.0)?$'
},
'name': {'type': 'string'},
'labels': {'type': 'object'},

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/LayeringPolicy/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/LayeringPolicy/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^metadata/Control/v\d+(.0)?$'
},
'name': {'type': 'string'},
'storagePolicy': {

View File

@ -17,21 +17,22 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/Passphrase/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/Passphrase/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
'pattern': '^metadata/Document/v\d+(.0)?$',
},
'name': {'type': 'string'},
# Not strictly needed for secrets.
# Not strictly needed.
'layeringDefinition': {
'type': 'object',
'properties': {
'layer': {'type': 'string'}
'layer': {'type': 'string'},
'abstract': {'type': 'boolean'}
}
},
'storagePolicy': {

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/ValidationPolicy/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^deckhand/ValidationPolicy/v\d+(.0)?$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
'pattern': '^metadata/Control/v\d+(.0)?$'
},
'name': {'type': 'string'},
'storagePolicy': {

View File

@ -171,14 +171,8 @@ class DeckhandException(Exception):
class InvalidDocumentFormat(DeckhandException):
msg_fmt = ("The provided document YAML failed schema validation. Details: "
"%(detail)s. Schema: %(schema)s.")
code = 400
class InvalidDocumentSchema(DeckhandException):
msg_fmt = ("The provided %(document_schema)s is invalid. Supported "
"schemas: %(schema_list)s.")
msg_fmt = ("The provided document failed schema validation. Details: "
"%(details)s")
code = 400

View File

@ -79,7 +79,8 @@ class DataSchemaFactory(DeckhandFactory):
data_schema_template['metadata']['name'] = metadata_name
data_schema_template['metadata']['labels'] = metadata_labels
data_schema_template['data'] = data
if data:
data_schema_template['data'] = data
return data_schema_template

View File

@ -54,12 +54,14 @@ tests:
- name: verify_initial
desc: Verify initial document count and revisions
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -83,11 +85,16 @@ tests:
desc: Verify duplicate documents were ignored
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -110,12 +117,14 @@ tests:
- name: verify_update
desc: Verify updated document count and revisions
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -133,12 +142,14 @@ tests:
- name: verify_initial_documents_preserved_after_update
desc: Verify initial documents count and revisions preserved after update
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -162,6 +173,8 @@ tests:
- name: verify_delete
desc: Verify document deletion
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 3
@ -170,8 +183,8 @@ tests:
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['update_single_document'].$RESPONSE['$.[0].status.revision']"
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- site-1234
$.[*].status.bucket:
- mop
@ -182,12 +195,14 @@ tests:
- name: verify_initial_documents_preserved_after_delete
desc: Verify initial documents count and revisions
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -205,12 +220,14 @@ tests:
- name: verify_updated_documents_preserved_after_delete
desc: Verify updated documents count and revisions preserved after delete
GET: /api/v1.0/revisions/$HISTORY['update_single_document'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:

View File

@ -196,7 +196,6 @@ tests:
layer: site
data:
value: mistake
...
- name: delete_mistake
desc: Delete documents from bucket mistake

View File

@ -123,12 +123,13 @@ tests:
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
query_parameters:
status.bucket: mop
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.bucket:

View File

@ -36,7 +36,6 @@ tests:
status: 204
response_headers: null
# Create a revision implicitly by creating a document.
- name: initialize
desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents
@ -76,7 +75,7 @@ tests:
GET: /api/v1.0/revisions
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[0].results.`len`: 1
$.[0].results[0].tags.`len`: 1
$.[0].results[0].tags: [foo]
@ -124,7 +123,7 @@ tests:
GET: /api/v1.0/revisions
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[0].results.`len`: 1
$.[0].results[0].tags.`len`: 2
$.[0].results[0].tags: [bar, foo]
@ -162,7 +161,7 @@ tests:
GET: /api/v1.0/revisions
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[0].results.`len`: 1
$.[0].results[0].tags.`len`: 1
$.[0].results[0].tags: [bar]
@ -185,7 +184,7 @@ tests:
GET: /api/v1.0/revisions
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[0].results.`len`: 1
$.[0].results[0].tags: []
- name: verify_tag_delete_all

View File

@ -52,12 +52,14 @@ tests:
- name: verify_revision_1
desc: Verify initial document count and revisions
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -70,17 +72,23 @@ tests:
- mop
- mop
- mop
$.[0].data.a:
x: 1
y: 2
$.[2].data.a.z: 3
$.[3].data.b: 4
- name: verify_revision_2
desc: Verify updated document count and revisions
GET: /api/v1.0/revisions/$HISTORY['update_single_document'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -93,17 +101,23 @@ tests:
- mop
- mop
- mop
$.[0].data.a:
x: 1
y: 2
$.[2].data.a.z: 3
$.[3].data.b: 5
- name: verify_revision_3
desc: Verify document deletion
GET: /api/v1.0/revisions/$HISTORY['delete_document'].$RESPONSE['$.[0].status.revision']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 3
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- site-1234
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
@ -113,17 +127,22 @@ tests:
- mop
- mop
- mop
$.[0].data.a:
x: 1
y: 2
$.[2].data.b: 5
- name: verify_revision_4
desc: Verify rollback revision
GET: /api/v1.0/revisions/$HISTORY['rollback'].$RESPONSE['$.[0].id']/documents
query_parameters:
sort: metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 4
$.[*].metadata.name:
- layering-policy
- global-1234
- layering-policy
- region-1234
- site-1234
$.[*].status.revision:
@ -136,4 +155,8 @@ tests:
- mop
- mop
- mop
$.[0].data.a:
x: 1
y: 2
$.[2].data.a.z: 3
$.[3].data.b: 4

View File

@ -88,14 +88,14 @@ class DeckhandTestCase(testtools.TestCase):
self.addCleanup(p.stop)
return m
def patchobject(self, target, attribute, new=mock.DEFAULT, autospec=True):
def patchobject(self, target, attribute, new=mock.DEFAULT, **kwargs):
"""Convenient wrapper around `mock.patch.object`
Returns a started mock that will be automatically stopped after the
test ran.
"""
p = mock.patch.object(target, attribute, new, autospec=autospec)
p = mock.patch.object(target, attribute, new, **kwargs)
m = p.start()
self.addCleanup(p.stop)
return m

View File

@ -68,14 +68,15 @@ class TestBucketsController(test_base.BaseControllerTest):
def test_put_bucket_with_secret(self):
def _do_test(payload):
bucket_name = test_utils.rand_name('bucket')
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
'/api/v1.0/buckets/%s/documents' % bucket_name,
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all(payload))
self.assertEqual(200, resp.status_code)
created_documents = list(yaml.safe_load_all(resp.text))
self.assertEqual(1, len(created_documents))
self.assertEqual(len(payload), len(created_documents))
expected = sorted([(d['schema'], d['metadata']['name'])
for d in payload])
actual = sorted([(d['schema'], d['metadata']['name'])
@ -108,16 +109,16 @@ class TestBucketsController(test_base.BaseControllerTest):
# `metadata.storagePolicy`='encrypted'. In the case below,
# a generic document is tested.
documents_factory = factories.DocumentFactory(1, [1])
document_mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}}
}
payload = documents_factory.gen_test(document_mapping,
global_abstract=False)
payload[-1]['metadata']['storagePolicy'] = 'encrypted'
document = documents_factory.gen_test({}, global_abstract=False)[-1]
document['metadata']['storagePolicy'] = 'encrypted'
data_schema_factory = factories.DataSchemaFactory()
data_schema = data_schema_factory.gen_test(document['schema'], {})
with mock.patch.object(buckets.BucketsResource, 'secrets_mgr',
autospec=True) as mock_secrets_mgr:
mock_secrets_mgr.create.return_value = payload[-1]['data']
_do_test([payload[-1]])
mock_secrets_mgr.create.return_value = document['data']
_do_test([document, data_schema])
def test_create_delete_then_recreate_document_in_different_bucket(self):
"""Ordiniarly creating a document with the same metadata.name/schema
@ -164,28 +165,6 @@ class TestBucketsController(test_base.BaseControllerTest):
class TestBucketsControllerNegative(test_base.BaseControllerTest):
"""Test suite for validating negative scenarios for bucket controller."""
def test_put_bucket_with_invalid_document_payload(self):
rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
no_colon_spaces = """
name:foo
schema:
layeringDefinition:
layer:site
"""
invalid_payloads = ['garbage', no_colon_spaces]
error_re = ['.*The provided document YAML failed schema validation.*',
'.*mapping values are not allowed here.*']
for idx, payload in enumerate(invalid_payloads):
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'},
body=payload)
self.assertEqual(400, resp.status_code)
self.assertRegexpMatches(resp.text, error_re[idx])
def test_put_conflicting_layering_policy(self):
rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)

View File

@ -16,6 +16,7 @@ import yaml
import mock
from deckhand import factories
from deckhand.tests.unit.control import base as test_base
@ -24,11 +25,14 @@ class TestYAMLTranslator(test_base.BaseControllerTest):
def test_request_with_correct_content_type(self):
rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
self._read_data('sample_document_simple')
documents_factory = factories.DocumentFactory(1, [1])
document = documents_factory.gen_test({})[-1]
resp = self.app.simulate_put(
'/api/v1.0/buckets/b1/documents',
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump(self.data),
body=yaml.safe_dump(document),
)
self.assertEqual(200, resp.status_code)
@ -84,11 +88,14 @@ class TestYAMLTranslator(test_base.BaseControllerTest):
def test_request_with_correct_content_type_plus_encoding(self):
rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
self._read_data('sample_document_simple')
documents_factory = factories.DocumentFactory(1, [1])
document = documents_factory.gen_test({})[-1]
resp = self.app.simulate_put(
'/api/v1.0/buckets/b1/documents',
headers={'Content-Type': 'application/x-yaml;encoding=utf-8'},
body=yaml.safe_dump(self.data),
body=yaml.safe_dump(document),
)
self.assertEqual(200, resp.status_code)

View File

@ -52,24 +52,22 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest):
'/api/v1.0/revisions/%s/rendered-documents' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
rendered_documents = list(yaml.safe_load_all(resp.text))
# TODO(fmontei): Implement "negative" filter server-side.
rendered_documents = [
d for d in rendered_documents
if not d['schema'].startswith(types.LAYERING_POLICY_SCHEMA)
]
self.assertEqual(1, len(rendered_documents))
is_abstract = rendered_documents[0]['metadata']['layeringDefinition'][
self.assertEqual(2, len(rendered_documents))
rendered_documents = list(filter(
lambda x: not x['schema'].startswith(types.LAYERING_POLICY_SCHEMA),
rendered_documents))
is_abstract = rendered_documents[-1]['metadata']['layeringDefinition'][
'abstract']
self.assertFalse(is_abstract)
for key, value in concrete_doc.items():
if isinstance(value, dict):
self.assertDictContainsSubset(value,
rendered_documents[0][key])
rendered_documents[-1][key])
else:
self.assertEqual(value, rendered_documents[0][key])
self.assertEqual(value, rendered_documents[-1][key])
def test_list_rendered_documents_exclude_deleted_documents(self):
"""Verifies that documents from previous revisions that have been
@ -83,40 +81,40 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest):
'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
# Create 1st document.
# PUT a bunch of documents, include a layeringPolicy.
documents_factory = factories.DocumentFactory(1, [1])
payload = documents_factory.gen_test({}, global_abstract=False)[1:]
payload = documents_factory.gen_test({}, global_abstract=False)
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all(payload))
self.assertEqual(200, resp.status_code)
# Create 2nd document (exclude 1st document in new payload).
# PUT new document (exclude original documents from this payload).
payload = documents_factory.gen_test({}, global_abstract=False)
new_name = payload[-1]['metadata']['name']
new_name = payload[1]['metadata']['name']
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all(payload))
body=yaml.safe_dump_all([payload[1]]))
self.assertEqual(200, resp.status_code)
revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][
'revision']
# Verify that only the 2nd is returned for revision_id=2.
# Verify that only the document with `new_name` is returned. (The
# layeringPolicy) is omitted from the response even though it still
# exists.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/rendered-documents' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
rendered_documents = list(yaml.safe_load_all(resp.text))
# TODO(fmontei): Implement "negative" filter server-side.
rendered_documents = [
d for d in rendered_documents
if not d['schema'].startswith(types.LAYERING_POLICY_SCHEMA)
]
self.assertEqual(1, len(rendered_documents))
self.assertEqual(2, len(rendered_documents))
rendered_documents = list(filter(
lambda x: not x['schema'].startswith(types.LAYERING_POLICY_SCHEMA),
rendered_documents))
self.assertEqual(new_name, rendered_documents[0]['metadata']['name'])
self.assertEqual(2, rendered_documents[0]['status']['revision'])

View File

@ -59,8 +59,13 @@ class TestValidationsController(test_base.BaseControllerTest):
def _create_revision(self, payload=None):
if not payload:
documents_factory = factories.DocumentFactory(2, [1, 1])
documents_factory = factories.DocumentFactory(1, [1])
payload = documents_factory.gen_test({})
data_schema_factory = factories.DataSchemaFactory()
data_schema = data_schema_factory.gen_test(
payload[1]['schema'], data={})
payload.append(data_schema)
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'},
@ -339,7 +344,7 @@ class TestValidationsController(test_base.BaseControllerTest):
'deckhand:list_validations': '@'}
self.policy.set_rules(rules)
# Register a `DataSchema` against which the test document will be
# Create a `DataSchema` against which the test document will be
# validated.
data_schema_factory = factories.DataSchemaFactory()
metadata_name = 'example/Doc/v1'
@ -357,22 +362,6 @@ class TestValidationsController(test_base.BaseControllerTest):
data_schema = data_schema_factory.gen_test(
metadata_name, data=schema_to_use)
revision_id = self._create_revision(payload=[data_schema])
# Validate that the internal deckhand validation was created.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
expected_body = {
'count': 1,
'results': [
{'name': types.DECKHAND_SCHEMA_VALIDATION, 'status': 'success'}
]
}
self.assertEqual(expected_body, body)
# Create the test document whose data section adheres to the
# `DataSchema` above.
doc_factory = factories.DocumentFactory(1, [1])
@ -381,10 +370,9 @@ class TestValidationsController(test_base.BaseControllerTest):
global_abstract=False)[-1]
doc_to_test['schema'] = 'example/Doc/v1'
revision_id = self._create_revision(
payload=[doc_to_test])
revision_id = self._create_revision(payload=[doc_to_test, data_schema])
# Validate that the validation was created and passed.
# Validate that the validation was created and succeeded.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
@ -398,12 +386,16 @@ class TestValidationsController(test_base.BaseControllerTest):
}
self.assertEqual(expected_body, body)
def test_validation_with_registered_data_schema_expect_failure(self):
def test_validation_data_schema_different_revision_expect_failure(self):
"""Validates that creating a ``DataSchema`` in one revision and then
creating a document in another revision that relies on the previously
created ``DataSchema`` results in an expected failure.
"""
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@'}
self.policy.set_rules(rules)
# Register a `DataSchema` against which the test document will be
# Create a `DataSchema` against which the test document will be
# validated.
data_schema_factory = factories.DataSchemaFactory()
metadata_name = 'example/foo/v1'
@ -419,7 +411,6 @@ class TestValidationsController(test_base.BaseControllerTest):
}
data_schema = data_schema_factory.gen_test(
metadata_name, data=schema_to_use)
revision_id = self._create_revision(payload=[data_schema])
# Validate that the internal deckhand validation was created.
@ -461,13 +452,15 @@ class TestValidationsController(test_base.BaseControllerTest):
}
self.assertEqual(expected_body, body)
def test_validation_with_registered_data_schema_expect_mixed(self):
def test_validation_data_schema_same_revision_expect_failure(self):
"""Validates that creating a ``DataSchema`` alongside a document
that relies on it in the same revision results in an expected failure.
"""
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@',
'deckhand:show_validation': '@'}
'deckhand:list_validations': '@'}
self.policy.set_rules(rules)
# Register a `DataSchema` against which the test document will be
# Create a `DataSchema` against which the test document will be
# validated.
data_schema_factory = factories.DataSchemaFactory()
metadata_name = 'example/foo/v1'
@ -484,9 +477,18 @@ class TestValidationsController(test_base.BaseControllerTest):
data_schema = data_schema_factory.gen_test(
metadata_name, data=schema_to_use)
revision_id = self._create_revision(payload=[data_schema])
# Create the test document that fails the validation due to the
# schema defined by the `DataSchema` document.
doc_factory = factories.DocumentFactory(1, [1])
doc_to_test = doc_factory.gen_test(
{'_GLOBAL_DATA_1_': {'data': {'a': 'fail'}}},
global_abstract=False)[-1]
doc_to_test['schema'] = 'example/foo/v1'
doc_to_test['metadata']['name'] = 'test_doc'
# Validate that the internal deckhand validation was created.
revision_id = self._create_revision(payload=[doc_to_test, data_schema])
# Validate that the validation was created and reports failure.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
@ -495,11 +497,110 @@ class TestValidationsController(test_base.BaseControllerTest):
expected_body = {
'count': 1,
'results': [
{'name': types.DECKHAND_SCHEMA_VALIDATION, 'status': 'success'}
{'name': types.DECKHAND_SCHEMA_VALIDATION, 'status': 'failure'}
]
}
self.assertEqual(expected_body, body)
def test_validation_with_registered_data_schema_expect_multi_failure(self):
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@',
'deckhand:show_validation': '@'}
self.policy.set_rules(rules)
# Create a `DataSchema` against which the test document will be
# validated.
data_schema_factory = factories.DataSchemaFactory()
metadata_name = 'example/foo/v1'
schema_to_use = {
'$schema': 'http://json-schema.org/schema#',
'type': 'object',
'properties': {
'a': {
'type': 'integer' # Test doc will fail b/c of wrong type.
}
},
'required': ['a']
}
data_schema = data_schema_factory.gen_test(
metadata_name, data=schema_to_use)
# Failure #1.
# Create the test document that fails the validation due to the
# schema defined by the `DataSchema` document.
doc_factory = factories.DocumentFactory(1, [1])
doc_to_test = doc_factory.gen_test(
{'_GLOBAL_DATA_1_': {'data': {'a': 'fail'}}},
global_abstract=False)[-1]
doc_to_test['schema'] = 'example/foo/v1'
doc_to_test['metadata']['name'] = 'test_doc'
# Failure #2.
# Remove required metadata property, causing error to be generated.
del doc_to_test['metadata']['layeringDefinition']
revision_id = self._create_revision(payload=[doc_to_test, data_schema])
# Validate that the validation was created and reports failure.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
expected_body = {
'count': 1,
'results': [
{'name': types.DECKHAND_SCHEMA_VALIDATION, 'status': 'failure'}
]
}
self.assertEqual(expected_body, body)
# Validate that both expected errors are present for validation.
expected_errors = [
{
'message': "'layeringDefinition' is a required property",
'name': 'test_doc',
'schema': 'example/foo/v1',
'path': '.metadata'
}, {
'message': "'fail' is not of type 'integer'",
'name': 'test_doc',
'schema': 'example/foo/v1',
'path': '.data.a'
}
]
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations/%s/entries/0' % (
revision_id, types.DECKHAND_SCHEMA_VALIDATION),
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
self.assertEqual('failure', body['status'])
self.assertEqual(expected_errors, body['errors'])
def test_validation_with_registered_data_schema_expect_mixed(self):
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@',
'deckhand:show_validation': '@'}
self.policy.set_rules(rules)
# Create a `DataSchema` against which the test document will be
# validated.
data_schema_factory = factories.DataSchemaFactory()
metadata_name = 'example/foo/v1'
schema_to_use = {
'$schema': 'http://json-schema.org/schema#',
'type': 'object',
'properties': {
'a': {
'type': 'integer' # Test doc will fail b/c of wrong type.
}
},
'required': ['a']
}
data_schema = data_schema_factory.gen_test(
metadata_name, data=schema_to_use)
# Create a document that passes validation and another that fails it.
doc_factory = factories.DocumentFactory(1, [1])
fail_doc = doc_factory.gen_test(
@ -511,7 +612,8 @@ class TestValidationsController(test_base.BaseControllerTest):
pass_doc = copy.deepcopy(fail_doc)
pass_doc['data']['a'] = 5
revision_id = self._create_revision(payload=[fail_doc, pass_doc])
revision_id = self._create_revision(
payload=[fail_doc, pass_doc, data_schema])
# Validate that the validation reports failure since `fail_doc`
# should've failed validation.
@ -535,9 +637,10 @@ class TestValidationsController(test_base.BaseControllerTest):
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
expected_body = {
'count': 2,
'count': 3,
'results': [{'id': 0, 'status': 'failure'}, # fail_doc failed.
{'id': 1, 'status': 'success'}] # pass_doc succeeded.
{'id': 1, 'status': 'success'}, # DataSchema passed.
{'id': 2, 'status': 'success'}] # pass_doc succeeded.
}
self.assertEqual(expected_body, body)
@ -551,7 +654,8 @@ class TestValidationsController(test_base.BaseControllerTest):
expected_errors = [{
'schema': 'example/foo/v1',
'name': 'test_doc',
'message': "'fail' is not of type 'integer'"
'message': "'fail' is not of type 'integer'",
'path': '.data.a'
}]
self.assertIn('errors', body)
self.assertEqual(expected_errors, body['errors'])
@ -563,14 +667,18 @@ class TestValidationsController(test_base.BaseControllerTest):
depends on substitution from another document.
"""
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@'}
'deckhand:list_validations': '@',
'deckhand:show_validation': '@'}
self.policy.set_rules(rules)
documents_factory = factories.DocumentFactory(1, [1])
payload = documents_factory.gen_test({}, global_abstract=False)[-1]
del payload['data']
document = documents_factory.gen_test({}, global_abstract=False)[-1]
del document['data']
revision_id = self._create_revision(payload=[payload])
data_schema_factory = factories.DataSchemaFactory()
data_schema = data_schema_factory.gen_test(document['schema'], {})
revision_id = self._create_revision(payload=[document, data_schema])
# Validate that the entry is present.
resp = self.app.simulate_get(
@ -581,7 +689,92 @@ class TestValidationsController(test_base.BaseControllerTest):
body = yaml.safe_load(resp.text)
expected_body = {
'count': 1,
'results': [{'id': 0, 'status': 'failure'}]
'count': 2,
'results': [{'id': 0, 'status': 'failure'}, # Document.
{'id': 1, 'status': 'success'}] # DataSchema.
}
self.assertEqual(expected_body, body)
# Validate that the created document failed validation for the expected
# reason.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations/%s/entries/0' % (
revision_id, types.DECKHAND_SCHEMA_VALIDATION),
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
expected_errors = [{
'schema': document['schema'],
'name': document['metadata']['name'],
'message': "'data' is a required property",
'path': '.'
}]
self.assertIn('errors', body)
self.assertEqual(expected_errors, body['errors'])
def test_validation_only_new_data_schema_registered(self):
"""Validate whether newly created DataSchemas replace old DataSchemas
when it comes to validation.
"""
rules = {'deckhand:create_cleartext_documents': '@',
'deckhand:list_validations': '@'}
self.policy.set_rules(rules)
# Create 2 DataSchemas that will fail if they're used. These shouldn't
# be used for validation.
data_schema_factory = factories.DataSchemaFactory()
metadata_names = ['exampleA/Doc/v1', 'exampleB/Doc/v1']
schemas_to_use = [{
'$schema': 'http://json-schema.org/schema#',
'type': 'object',
'properties': {
'a': {
'type': 'integer'
}
},
'required': ['a'],
'additionalProperties': False
}] * 2
old_data_schemas = [
data_schema_factory.gen_test(
metadata_names[i], data=schemas_to_use[i])
for i in range(2)
]
# Save the DataSchemas in the first revision.
revision_id = self._create_revision(payload=old_data_schemas)
# Create 2 DataSchemas that will pass if they're used. These should
# be used for validation.
for schema_to_use in schemas_to_use:
schema_to_use['properties']['a']['type'] = 'string'
new_data_schemas = [
data_schema_factory.gen_test(
metadata_names[i], data=schemas_to_use[i])
for i in range(2)
]
doc_factory = factories.DocumentFactory(1, [1])
example1_doc = doc_factory.gen_test(
{'_GLOBAL_DATA_1_': {'data': {'a': 'whatever'}}},
global_abstract=False)[-1]
example1_doc['schema'] = metadata_names[0]
example2_doc = copy.deepcopy(example1_doc)
example2_doc['schema'] = metadata_names[1]
# Save the documents that will be validated alongside the DataSchemas
# that will be used to validate them.
revision_id = self._create_revision(
payload=[example1_doc, example2_doc] + new_data_schemas)
# Validate that the validation was created and succeeded: This means
# that the new DataSchemas were used, not the old ones.
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/validations' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
body = yaml.safe_load(resp.text)
expected_body = {
'count': 1,
'results': [
{'name': types.DECKHAND_SCHEMA_VALIDATION, 'status': 'success'}
]
}
self.assertEqual(expected_body, body)

View File

@ -28,9 +28,9 @@ class TestDocumentValidationBase(test_base.DeckhandTestCase):
with open(test_yaml_path, 'r') as yaml_file:
yaml_data = yaml_file.read()
self.data = yaml.safe_load(yaml_data)
return yaml.safe_load(yaml_data)
def _corrupt_data(self, key, value=None, data=None, op='delete'):
def _corrupt_data(self, data, key, value=None, op='delete'):
"""Corrupt test data to check that pre-validation works.
Corrupt data by removing a key from the document (if ``op`` is delete)
@ -54,8 +54,6 @@ class TestDocumentValidationBase(test_base.DeckhandTestCase):
:type op: string
:returns: Corrupted data.
"""
if data is None:
data = self.data
if op not in ('delete', 'replace'):
raise ValueError("The ``op`` argument must either be 'delete' or "
"'replace'.")

View File

@ -17,32 +17,33 @@ import mock
from deckhand.engine import document_validation
from deckhand.tests.unit.engine import base as engine_test_base
from deckhand import factories
from deckhand import utils
class TestDocumentValidation(engine_test_base.TestDocumentValidationBase):
def setUp(self):
super(TestDocumentValidation, self).setUp()
# Mock out DB module (i.e. retrieving DataSchema docs from DB).
self.patch('deckhand.db.sqlalchemy.api.document_get_all')
self.test_document = self._read_data('sample_document')
dataschema_factory = factories.DataSchemaFactory()
self.dataschema = dataschema_factory.gen_test(
self.test_document['schema'], {})
def test_init_document_validation(self):
self._read_data('sample_document')
doc_validation = document_validation.DocumentValidation(
self.data)
self.assertIsInstance(doc_validation,
document_validation.DocumentValidation)
# Stub out the DB call for retrieving DataSchema documents.
self.patchobject(document_validation.db_api, 'revision_documents_get',
lambda *a, **k: [])
def test_data_schema_missing_optional_sections(self):
self._read_data('sample_data_schema')
optional_missing_data = [
self._corrupt_data('metadata.labels'),
self._corrupt_data(self.test_document, 'metadata.labels'),
]
for missing_data in optional_missing_data:
document_validation.DocumentValidation(missing_data).validate_all()
payload = [missing_data, self.dataschema]
document_validation.DocumentValidation(payload).validate_all()
def test_document_missing_optional_sections(self):
self._read_data('sample_document')
properties_to_remove = (
'metadata.layeringDefinition.actions',
'metadata.layeringDefinition.parentSelector',
@ -50,21 +51,19 @@ class TestDocumentValidation(engine_test_base.TestDocumentValidationBase):
'metadata.substitutions.2.dest.pattern')
for property_to_remove in properties_to_remove:
optional_data_removed = self._corrupt_data(property_to_remove)
document_validation.DocumentValidation(
optional_data_removed).validate_all()
missing_data = self._corrupt_data(self.test_document,
property_to_remove)
payload = [missing_data, self.dataschema]
document_validation.DocumentValidation(payload).validate_all()
@mock.patch.object(document_validation, 'LOG', autospec=True)
def test_abstract_document_not_validated(self, mock_log):
self._read_data('sample_document')
test_document = self._read_data('sample_passphrase')
# Set the document to abstract.
updated_data = self._corrupt_data(
'metadata.layeringDefinition.abstract', True, op='replace')
# Guarantee that a validation error is thrown by removing a required
# property.
del updated_data['metadata']['layeringDefinition']['layer']
document_validation.DocumentValidation(updated_data).validate_all()
abstract_document = utils.jsonpath_replace(
test_document, True, 'metadata.layeringDefinition.abstract')
document_validation.DocumentValidation(
abstract_document).validate_all()
self.assertTrue(mock_log.info.called)
self.assertIn("Skipping schema validation for abstract document",
mock_log.info.mock_calls[0][1][0])

View File

@ -12,76 +12,93 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from deckhand.engine import document_validation
from deckhand import errors
from deckhand.tests.unit.engine import base as engine_test_base
from deckhand import factories
from deckhand.tests.unit.engine import base as test_base
from deckhand import types
class TestDocumentValidationNegative(
engine_test_base.TestDocumentValidationBase):
class TestDocumentValidationNegative(test_base.TestDocumentValidationBase):
"""Negative testing suite for document validation."""
# The 'data' key is mandatory but not critical if excluded.
CRITICAL_ATTRS = (
'schema', 'metadata', 'metadata.schema', 'metadata.name')
SCHEMA_ERR = "'%s' is a required property"
exception_map = {
'metadata': errors.InvalidDocumentFormat,
'metadata.schema': errors.InvalidDocumentFormat,
'metadata.name': errors.InvalidDocumentFormat,
'schema': errors.InvalidDocumentFormat,
}
def setUp(self):
super(TestDocumentValidationNegative, self).setUp()
# Mock out DB module (i.e. retrieving DataSchema docs from DB).
self.patch('deckhand.db.sqlalchemy.api.document_get_all')
# Stub out the DB call for retrieving DataSchema documents.
self.patchobject(document_validation.db_api, 'revision_documents_get',
lambda *a, **k: [])
def _test_missing_required_sections(self, properties_to_remove):
def _do_validations(self, document_validator, expected, expected_err):
validations = document_validator.validate_all()
self.assertEqual(2, len(validations))
# The DataSchema document itself should've validated
# successfully.
self.assertEqual('success', validations[0]['status'])
self.assertEqual('failure', validations[-1]['status'])
self.assertEqual({'version': '1.0', 'name': 'deckhand'},
validations[-1]['validator'])
self.assertEqual(types.DECKHAND_SCHEMA_VALIDATION,
validations[-1]['name'])
self.assertEqual(1, len(validations[-1]['errors']))
self.assertEqual(expected['metadata']['name'],
validations[-1]['errors'][-1]['name'])
self.assertEqual(expected['schema'],
validations[-1]['errors'][-1]['schema'])
self.assertEqual(expected_err,
validations[-1]['errors'][-1]['message'])
def _test_missing_required_sections(self, document, properties_to_remove):
for idx, property_to_remove in enumerate(properties_to_remove):
critical = property_to_remove in self.CRITICAL_ATTRS
missing_prop = property_to_remove.split('.')[-1]
invalid_data = self._corrupt_data(property_to_remove)
expected_err = self.SCHEMA_ERR % missing_prop
invalid_data = self._corrupt_data(document, property_to_remove)
doc_validator = document_validation.DocumentValidation(
invalid_data)
if critical:
self.assertRaisesRegexp(
errors.InvalidDocumentFormat, expected_err,
doc_validator.validate_all)
exception_raised = self.exception_map.get(property_to_remove, None)
expected_err_msg = "'%s' is a required property" % missing_prop
dataschema_factory = factories.DataSchemaFactory()
dataschema = dataschema_factory.gen_test(
invalid_data.get('schema', ''), {})
payload = [dataschema, invalid_data]
doc_validator = document_validation.DocumentValidation(payload)
if exception_raised:
self.assertRaises(
exception_raised, doc_validator.validate_all)
else:
validations = doc_validator.validate_all()
self.assertEqual(1, len(validations))
self.assertEqual('failure', validations[0]['status'])
self.assertEqual({'version': '1.0', 'name': 'deckhand'},
validations[0]['validator'])
self.assertEqual(types.DECKHAND_SCHEMA_VALIDATION,
validations[0]['name'])
self.assertEqual(1, len(validations[0]['errors']))
self.assertEqual(self.data['metadata']['name'],
validations[0]['errors'][0]['name'])
self.assertEqual(self.data['schema'],
validations[0]['errors'][0]['schema'])
self.assertEqual(expected_err,
validations[0]['errors'][0]['message'])
self._do_validations(doc_validator, invalid_data,
expected_err_msg)
def test_certificate_key_missing_required_sections(self):
self._read_data('sample_certificate_key')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_certificate_key')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'metadata.storagePolicy',)
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
def test_certificate_missing_required_sections(self):
self._read_data('sample_certificate')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_certificate')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'metadata.storagePolicy',)
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
def test_data_schema_missing_required_sections(self):
self._read_data('sample_data_schema')
properties_to_remove = self.CRITICAL_ATTRS + ('data', 'data.$schema',)
self._test_missing_required_sections(properties_to_remove)
document = self._read_data('sample_data_schema')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'data.$schema',)
self._test_missing_required_sections(document, properties_to_remove)
def test_document_missing_required_sections(self):
self._read_data('sample_document')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_document')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data',
'metadata.layeringDefinition',
'metadata.layeringDefinition.layer',
@ -93,45 +110,135 @@ class TestDocumentValidationNegative(
'metadata.substitutions.0.src.schema',
'metadata.substitutions.0.src.name',
'metadata.substitutions.0.src.path')
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
def test_document_missing_multiple_required_sections(self):
"""Validates that multiple errors are reported for a document with
multiple validation errors.
"""
document = self._read_data('sample_document')
properties_to_remove = (
'metadata.layeringDefinition.layer',
'metadata.layeringDefinition.actions.0.method',
'metadata.layeringDefinition.actions.0.path',
'metadata.substitutions.0.dest.path',
'metadata.substitutions.0.src.name',
'metadata.substitutions.0.src.path',
'metadata.substitutions.0.src.schema',
)
for property_to_remove in properties_to_remove:
document = self._corrupt_data(document, property_to_remove)
doc_validator = document_validation.DocumentValidation(document)
validations = doc_validator.validate_all()
errors = validations[0]['errors']
self.assertEqual(len(properties_to_remove) + 1, len(errors))
# Validate the first error relates to the fact that the document's
# schema is unrecognized (promenade/ResourceType/v1.0) as it wasn't
# registered with a ``DataSchema``.
self.assertIn('%s is invalid' % document['schema'],
errors[0]['message'])
# Sort the errors to match the order in ``properties_to_remove``.
errors = sorted(errors[1:], key=lambda x: (x['path'], x['message']))
# Validate that an error was generated for each missing property in
# ``properties_to_remove`` that was removed from ``document``.
for idx, property_to_remove in enumerate(properties_to_remove):
parts = property_to_remove.split('.')
parent_path = '.' + '.'.join(parts[:-1])
missing_property = parts[-1]
expected_err = "'%s' is a required property" % missing_property
self.assertEqual(expected_err, errors[idx]['message'])
self.assertEqual(parent_path, errors[idx]['path'])
def test_document_invalid_layering_definition_action(self):
self._read_data('sample_document')
corrupted_data = self._corrupt_data(
'metadata.layeringDefinition.actions.0.method', 'invalid',
op='replace')
document = self._read_data('sample_document')
missing_data = self._corrupt_data(
document, 'metadata.layeringDefinition.actions.0.method',
'invalid', op='replace')
expected_err = "'invalid' is not one of ['replace', 'delete', 'merge']"
doc_validator = document_validation.DocumentValidation(corrupted_data)
validations = doc_validator.validate_all()
self.assertEqual(1, len(validations))
self.assertEqual('failure', validations[0]['status'])
self.assertEqual({'version': '1.0', 'name': 'deckhand'},
validations[0]['validator'])
self.assertEqual(types.DECKHAND_SCHEMA_VALIDATION,
validations[0]['name'])
self.assertEqual(1, len(validations[0]['errors']))
self.assertEqual(self.data['metadata']['name'],
validations[0]['errors'][0]['name'])
self.assertEqual(self.data['schema'],
validations[0]['errors'][0]['schema'])
self.assertEqual(expected_err,
validations[0]['errors'][0]['message'])
# Ensure that a dataschema document exists for the random document
# schema via mocking.
dataschema_factory = factories.DataSchemaFactory()
dataschema = dataschema_factory.gen_test(document['schema'], {})
payload = [dataschema, missing_data]
doc_validator = document_validation.DocumentValidation(payload)
self._do_validations(doc_validator, document, expected_err)
def test_layering_policy_missing_required_sections(self):
self._read_data('sample_layering_policy')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_layering_policy')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'data.layerOrder',)
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
def test_passphrase_missing_required_sections(self):
self._read_data('sample_passphrase')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_passphrase')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'metadata.storagePolicy',)
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
def test_validation_policy_missing_required_sections(self):
self._read_data('sample_validation_policy')
properties_to_remove = self.CRITICAL_ATTRS + (
document = self._read_data('sample_validation_policy')
properties_to_remove = tuple(self.exception_map.keys()) + (
'data', 'data.validations', 'data.validations.0.name')
self._test_missing_required_sections(properties_to_remove)
self._test_missing_required_sections(document, properties_to_remove)
@mock.patch.object(document_validation, 'LOG', autospec=True)
def test_invalid_document_schema_generates_error(self, mock_log):
document = self._read_data('sample_document')
document['schema'] = 'foo/bar/v1'
doc_validator = document_validation.DocumentValidation(document)
doc_validator.validate_all()
self.assertRegex(
mock_log.error.mock_calls[0][1][0],
'The provided document schema %s is invalid.' % document['schema'])
@mock.patch.object(document_validation, 'LOG', autospec=True)
def test_invalid_document_schema_version_generates_error(self, mock_log):
document = self._read_data('sample_passphrase')
document['schema'] = 'deckhand/Passphrase/v5'
doc_validator = document_validation.DocumentValidation(document)
doc_validator.validate_all()
self.assertRegex(
mock_log.error.mock_calls[0][1][0],
'The provided document schema %s is invalid.' % document['schema'])
def test_invalid_validation_schema_raises_runtime_error(self):
document = self._read_data('sample_passphrase')
fake_schema = mock.MagicMock(schema='fake')
fake_schema_map = {'v1': {'deckhand/Passphrase': fake_schema}}
# Validate that broken built-in base schema raises RuntimeError.
with mock.patch.object(document_validation, 'base_schema',
new_callable=mock.PropertyMock(
return_value=fake_schema)):
doc_validator = document_validation.DocumentValidation(document)
with self.assertRaisesRegexp(RuntimeError, 'Unknown error'):
doc_validator.validate_all()
# Validate that broken built-in schema for ``SchemaValidator`` raises
# RuntimeError.
with mock.patch.object(document_validation.SchemaValidator,
'_schema_map', new_callable=mock.PropertyMock(
return_value=fake_schema_map)):
doc_validator = document_validation.DocumentValidation(document)
with self.assertRaisesRegexp(RuntimeError, 'Unknown error'):
doc_validator.validate_all()
# Validate that broken data schema for ``DataSchemaValidator`` raises
# RuntimeError.
document = self._read_data('sample_document')
data_schema = self._read_data('sample_data_schema')
data_schema['metadata']['name'] = document['schema']
data_schema['data'] = 'fake'
doc_validator = document_validation.DocumentValidation(
[document, data_schema])
with self.assertRaisesRegexp(RuntimeError, 'Unknown error'):
doc_validator.validate_all()

View File

@ -4,4 +4,6 @@ metadata:
schema: metadata/Document/v1.0
name: application-admin-password
storagePolicy: encrypted
layeringDefinition:
abstract: False
data: some-password

View File

@ -181,7 +181,7 @@ def _add_microversion(value):
"""Hack for coercing all Deckhand schema fields (``schema`` and
``metadata.schema``) into ending with v1.0 rather than v1, for example.
"""
microversion_re = r'^.*/.*/v[0-9]+$'
microversion_re = r'^.*/.*/v[1-9]\d*$'
if re.match(value, microversion_re):
return value + '.0'
return value

View File

@ -149,6 +149,12 @@ connection = $DATABASE_URL
# auth_type = password
EOCONF
# Only set up logging if running Deckhand via uwsgi. The container already has
# values for logging.
if [ -z "$DECKHAND_IMAGE" ]; then
sed '1 a log_config_append = '"$CONF_DIR"'/logging.conf' $CONF_DIR/deckhand.conf
fi
# Only set up logging if running Deckhand via uwsgi. The container already has
# values for logging.
if [ -z "$DECKHAND_IMAGE" ]; then