diff --git a/deckhand/control/buckets.py b/deckhand/control/buckets.py index 910686da..25e04a43 100644 --- a/deckhand/control/buckets.py +++ b/deckhand/control/buckets.py @@ -70,8 +70,7 @@ class BucketsResource(api_base.BaseResource): created_documents = self._create_revision_documents( bucket_name, documents, validations) - if created_documents: - resp.body = self.view_builder.list(created_documents) + resp.body = self.view_builder.list(created_documents) resp.status = falcon.HTTP_200 def _prepare_secret_documents(self, secret_documents): diff --git a/deckhand/control/revision_documents.py b/deckhand/control/revision_documents.py index e80e965a..1f2cd742 100644 --- a/deckhand/control/revision_documents.py +++ b/deckhand/control/revision_documents.py @@ -21,9 +21,10 @@ from deckhand.control import common from deckhand.control.views import document as document_view from deckhand.db.sqlalchemy import api as db_api from deckhand.engine import document_validation -from deckhand.engine import secrets_manager +from deckhand.engine import layering from deckhand import errors from deckhand import policy +from deckhand import types from deckhand import utils LOG = logging.getLogger(__name__) @@ -97,35 +98,67 @@ class RenderedDocumentsResource(api_base.BaseResource): def on_get(self, req, resp, sanitized_params, revision_id): include_encrypted = policy.conditional_authorize( 'deckhand:list_encrypted_documents', req.context, do_raise=False) - - filters = sanitized_params.copy() - filters['metadata.layeringDefinition.abstract'] = False - filters['metadata.storagePolicy'] = ['cleartext'] - filters['deleted'] = False # Never return deleted documents to user. + filters = { + 'metadata.storagePolicy': ['cleartext'], + 'deleted': False + } if include_encrypted: filters['metadata.storagePolicy'].append('encrypted') + layering_policy = self._retrieve_layering_policy() + documents = self._retrieve_documents_for_rendering(revision_id, + **filters) + + # Prevent the layering policy from appearing twice. + if layering_policy in documents: + documents.remove(layering_policy) + document_layering = layering.DocumentLayering(layering_policy, + documents) + rendered_documents = document_layering.render() + + # Filters to be applied post-rendering, because many documents are + # involved in rendering. User filters can only be applied once all + # documents have been rendered. + user_filters = sanitized_params.copy() + user_filters['metadata.layeringDefinition.abstract'] = False + final_documents = [ + d for d in rendered_documents if utils.deepfilter( + d, **user_filters)] + + resp.status = falcon.HTTP_200 + resp.body = self.view_builder.list(final_documents) + self._post_validate(final_documents) + + def _retrieve_layering_policy(self): + try: + # NOTE(fmontei): Layering policies exist system-wide, across all + # revisions, so no need to filter by revision. + layering_policy_filters = { + 'deleted': False, + 'schema': types.LAYERING_POLICY_SCHEMA + } + layering_policy = db_api.document_get(**layering_policy_filters) + except errors.DocumentNotFound as e: + error_msg = ( + 'No layering policy found in the system so could not render ' + 'the documents.') + LOG.error(error_msg) + LOG.exception(six.text_type(e)) + raise falcon.HTTPConflict(description=error_msg) + else: + return layering_policy + + def _retrieve_documents_for_rendering(self, revision_id, **filters): try: documents = db_api.revision_get_documents( revision_id, **filters) except errors.RevisionNotFound as e: LOG.exception(six.text_type(e)) raise falcon.HTTPNotFound(description=e.format_message()) + else: + return documents - # TODO(fmontei): Currently the only phase of rendering that is - # performed is secret substitution, which can be done in any randomized - # order. However, secret substitution logic will have to be moved into - # a separate module that handles layering alongside substitution once - # layering has been fully integrated into this endpoint. - secrets_substitution = secrets_manager.SecretsSubstitution(documents) - try: - rendered_documents = secrets_substitution.substitute_all() - except errors.DocumentNotFound as e: - LOG.error('Failed to render the documents because a secret ' - 'document could not be found.') - LOG.exception(six.text_type(e)) - raise falcon.HTTPNotFound(description=e.format_message()) - + def _post_validate(self, documents): # Perform schema validation post-rendering to ensure that rendering # and substitution didn't break anything. doc_validator = document_validation.DocumentValidation(documents) @@ -133,9 +166,7 @@ class RenderedDocumentsResource(api_base.BaseResource): doc_validator.validate_all() except (errors.InvalidDocumentFormat, errors.InvalidDocumentSchema) as e: + LOG.error('Failed to post-validate rendered documents.') LOG.exception(e.format_message()) raise falcon.HTTPInternalServerError( description=e.format_message()) - - resp.status = falcon.HTTP_200 - resp.body = self.view_builder.list(rendered_documents) diff --git a/deckhand/db/sqlalchemy/api.py b/deckhand/db/sqlalchemy/api.py index 5e111199..83099418 100644 --- a/deckhand/db/sqlalchemy/api.py +++ b/deckhand/db/sqlalchemy/api.py @@ -12,14 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. - """Defines interface for DB access.""" -import ast import copy import functools import hashlib -import re import threading from oslo_config import cfg @@ -28,7 +25,6 @@ from oslo_db import options from oslo_db.sqlalchemy import session from oslo_log import log as logging from oslo_serialization import jsonutils as json -import six import sqlalchemy.orm as sa_orm from sqlalchemy import text @@ -362,7 +358,7 @@ def document_get(session=None, raw_dict=False, revision_id=None, **filters): for doc in documents: d = doc.to_dict(raw_dict=raw_dict) - if _apply_filters(d, **nested_filters): + if utils.deepfilter(d, **nested_filters): return d filters.update(nested_filters) @@ -412,7 +408,7 @@ def document_get_all(session=None, raw_dict=False, revision_id=None, final_documents = [] for doc in documents: d = doc.to_dict(raw_dict=raw_dict) - if _apply_filters(d, **nested_filters): + if utils.deepfilter(d, **nested_filters): final_documents.append(d) return final_documents @@ -536,97 +532,6 @@ def _update_revision_history(documents): return documents -def _add_microversion(value): - """Hack for coercing all Deckhand schema fields (``schema`` and - ``metadata.schema``) into ending with v1.0 rather than v1, for example. - """ - microversion_re = r'^.*/.*/v[0-9]{1}$' - if re.match(value, microversion_re): - return value + '.0' - return value - - -def _apply_filters(dct, **filters): - """Apply filters to ``dct``. - - Apply filters in ``filters`` to the dictionary ``dct``. - - :param dct: The dictionary to check against all the ``filters``. - :param filters: Dictionary of key-value pairs used for filtering out - unwanted results. - :return: True if the dictionary satisfies all the filters, else False. - """ - def _transform_filter_bool(filter_val): - # Transform boolean values into string literals. - if isinstance(filter_val, six.string_types): - try: - filter_val = ast.literal_eval(filter_val.title()) - except ValueError: - # If not True/False, set to None to avoid matching - # `actual_val` which is always boolean. - filter_val = None - return filter_val - - for filter_key, filter_val in filters.items(): - # If the filter is a list of possibilities, e.g. ['site', 'region'] - # for metadata.layeringDefinition.layer, check whether the actual - # value is present. - if isinstance(filter_val, (list, tuple)): - actual_val = utils.jsonpath_parse(dct, filter_key, match_all=True) - if not actual_val: - return False - - if isinstance(actual_val[0], bool): - filter_val = [_transform_filter_bool(x) for x in filter_val] - - if not set(actual_val).intersection(set(filter_val)): - return False - else: - actual_val = utils.jsonpath_parse(dct, filter_key) - - # Else if both the filter value and the actual value in the doc - # are dictionaries, check whether the filter dict is a subset - # of the actual dict. - if (isinstance(actual_val, dict) - and isinstance(filter_val, dict)): - is_subset = set( - filter_val.items()).issubset(set(actual_val.items())) - if not is_subset: - return False - # Else both filters are string literals. - else: - # Filtering by schema must support namespace matching - # (e.g. schema=promenade) such that all kind and schema - # documents with promenade namespace are returned, or - # (e.g. schema=promenade/Node) such that all version - # schemas with namespace=schema and kind=Node are returned. - if isinstance(actual_val, bool): - filter_val = _transform_filter_bool(filter_val) - - if filter_key in ['schema', 'metadata.schema']: - actual_val = _add_microversion(actual_val) - filter_val = _add_microversion(filter_val) - parts = actual_val.split('/')[:2] - if len(parts) == 2: - actual_namespace, actual_kind = parts - elif len(parts) == 1: - actual_namespace = parts[0] - actual_kind = '' - else: - actual_namespace = actual_kind = '' - actual_minus_version = actual_namespace + '/' + actual_kind - - if not (filter_val == actual_val or - actual_minus_version == filter_val or - actual_namespace == filter_val): - return False - else: - if actual_val != filter_val: - return False - - return True - - def revision_get_all(session=None, **filters): """Return list of all revisions. @@ -640,7 +545,7 @@ def revision_get_all(session=None, **filters): result = [] for revision in revisions: revision_dict = revision.to_dict() - if _apply_filters(revision_dict, **filters): + if utils.deepfilter(revision_dict, **filters): revision_dict['documents'] = _update_revision_history( revision_dict['documents']) result.append(revision_dict) @@ -707,7 +612,7 @@ def _filter_revision_documents(documents, unique_only, **filters): documents = _exclude_deleted_documents(documents) for document in documents: - if _apply_filters(document, **filters): + if utils.deepfilter(document, **filters): # Filter out redundant documents from previous revisions, i.e. # documents schema and metadata.name are repeated. if unique_only: diff --git a/deckhand/engine/layering.py b/deckhand/engine/layering.py index 246e85d2..36d8f1a2 100644 --- a/deckhand/engine/layering.py +++ b/deckhand/engine/layering.py @@ -15,91 +15,146 @@ import collections import copy +from oslo_log import log as logging +import six + from deckhand.engine import document +from deckhand.engine import secrets_manager from deckhand.engine import utils from deckhand import errors +LOG = logging.getLogger(__name__) + class DocumentLayering(object): """Class responsible for handling document layering. Layering is controlled in two places: - 1. The `LayeringPolicy` control document, which defines the valid layers + 1. The ``LayeringPolicy`` control document, which defines the valid layers and their order of precedence. - 2. In the `metadata.layeringDefinition` section of normal - (`metadata.schema=metadata/Document/v1.0`) documents. + 2. In the ``metadata.layeringDefinition`` section of normal + (``metadata.schema=metadata/Document/v1.0``) documents. .. note:: - Only documents with the same `schema` are allowed to be layered + Only documents with the same ``schema`` are allowed to be layered together into a fully rendered document. """ SUPPORTED_METHODS = ('merge', 'replace', 'delete') - LAYERING_POLICY_SCHEMA = 'deckhand/LayeringPolicy/v1.0' - def __init__(self, documents): + def _calc_document_children(self): + """Determine each document's children. + + For each document, attempts to find the document's children. Adds a new + key called "children" to the document's dictionary. + + .. note:: + + A document should only have exactly one parent. + + If a document does not have a parent, then its layer must be + the topmost layer defined by the ``layerOrder``. + + :returns: Ordered list of documents that need to be layered. Each + document contains a "children" property in addition to original + data. List of documents returned is ordered from highest to lowest + layer. + :rtype: list of deckhand.engine.document.Document objects. + :raises IndeterminateDocumentParent: If more than one parent document + was found for a document. + """ + layered_docs = list( + filter(lambda x: 'layeringDefinition' in x['metadata'], + self.documents)) + + # ``all_children`` is a counter utility for verifying that each + # document has exactly one parent. + all_children = collections.Counter() + + def _get_children(doc): + children = [] + doc_layer = doc.get_layer() + try: + next_layer_idx = self.layer_order.index(doc_layer) + 1 + children_doc_layer = self.layer_order[next_layer_idx] + except IndexError: + # The lowest layer has been reached, so no children. Return + # empty list. + return children + + for other_doc in layered_docs: + # Documents with different schemas are never layered together, + # so consider only documents with same schema as candidates. + is_potential_child = ( + other_doc.get_layer() == children_doc_layer and + other_doc.get_schema() == doc.get_schema() + ) + if (is_potential_child): + # A document can have many labels but should only have one + # explicit label for the parentSelector. + parent_sel = other_doc.get_parent_selector() + parent_sel_key = list(parent_sel.keys())[0] + parent_sel_val = list(parent_sel.values())[0] + doc_labels = doc.get_labels() + + if (parent_sel_key in doc_labels and + parent_sel_val == doc_labels[parent_sel_key]): + children.append(other_doc) + + return children + + for layer in self.layer_order: + docs_by_layer = list(filter( + (lambda x: x.get_layer() == layer), layered_docs)) + + for doc in docs_by_layer: + children = _get_children(doc) + + if children: + all_children.update(children) + doc.to_dict().setdefault('children', children) + + all_children_elements = list(all_children.elements()) + secondary_docs = list( + filter(lambda d: d.get_layer() != self.layer_order[0], + layered_docs)) + for doc in secondary_docs: + # Unless the document is the topmost document in the + # `layerOrder` of the LayeringPolicy, it should be a child document + # of another document. + if doc not in all_children_elements: + LOG.info('Could not find parent for document with name=%s, ' + 'schema=%s, layer=%s, parentSelector=%s.', + doc.get_name(), doc.get_schema(), doc.get_layer(), + doc.get_parent_selector()) + # If the document is a child document of more than 1 parent, then + # the document has too many parents, which is a validation error. + elif all_children[doc] != 1: + LOG.info('%d parent documents were found for child document ' + 'with name=%s, schema=%s, layer=%s, parentSelector=%s' + '. Each document must only have 1 parent.', + all_children[doc], doc.get_name(), doc.get_schema(), + doc.get_layer(), doc.get_parent_selector()) + raise errors.IndeterminateDocumentParent(document=doc) + + return layered_docs + + def __init__(self, layering_policy, documents): """Contructor for ``DocumentLayering``. - :param documents: List of YAML documents represented as dictionaries. + :param layering_policy: The document with schema + ``deckhand/LayeringPolicy`` needed for layering. + :param documents: List of all other documents to be layered together + in accordance with the ``layerOrder`` defined by the + LayeringPolicy document. """ + self.layering_policy = document.Document(layering_policy) self.documents = [document.Document(d) for d in documents] - self._find_layering_policy() + self.layer_order = list(self.layering_policy['data']['layerOrder']) self.layered_docs = self._calc_document_children() - def render(self): - """Perform layering on the set of `documents`. - - Each concrete document will undergo layering according to the actions - defined by its `layeringDefinition`. - - :returns: the list of rendered documents (does not include layering - policy document). - """ - # ``rendered_data_by_layer`` agglomerates the set of changes across all - # actions across each layer for a specific document. - rendered_data_by_layer = {} - - # NOTE(fmontei): ``global_docs`` represents the topmost documents in - # the system. It should probably be impossible for more than 1 - # top-level doc to exist, but handle multiple for now. - global_docs = [doc for doc in self.layered_docs - if doc.get_layer() == self.layer_order[0]] - - for doc in global_docs: - layer_idx = self.layer_order.index(doc.get_layer()) - rendered_data_by_layer[layer_idx] = doc.to_dict() - - # Keep iterating as long as a child exists. - for child in doc.get_children(nested=True): - - # Retrieve the most up-to-date rendered_data (by - # referencing the child's parent's data). - child_layer_idx = self.layer_order.index(child.get_layer()) - rendered_data = rendered_data_by_layer[child_layer_idx - 1] - - # Apply each action to the current document. - actions = child.get_actions() - for action in actions: - rendered_data = self._apply_action( - action, child.to_dict(), rendered_data) - - # Update the actual document data if concrete. - if not child.is_abstract(): - self.layered_docs[self.layered_docs.index(child)][ - 'data'] = rendered_data['data'] - - # Update ``rendered_data_by_layer`` for this layer so that - # children in deeper layers can reference the most up-to-date - # changes. - rendered_data_by_layer[child_layer_idx] = rendered_data - - if 'children' in doc: - del doc['children'] - - return [d.to_dict() for d in self.layered_docs] - def _apply_action(self, action, child_data, overall_data): """Apply actions to each layer that is rendered. @@ -175,121 +230,77 @@ class DocumentLayering(object): return overall_data - def _find_layering_policy(self): - """Retrieve the current layering policy. - - :raises LayeringPolicyMalformed: If the `layerOrder` could not be - found in the LayeringPolicy or if it is not a list. - :raises LayeringPolicyNotFound: If system has no layering policy. - """ - # TODO(fmontei): There should be a DB call here to fetch the layering - # policy from the DB. - for doc in self.documents: - if doc.to_dict()['schema'] == self.LAYERING_POLICY_SCHEMA: - self.layering_policy = doc - break - - if not hasattr(self, 'layering_policy'): - raise errors.LayeringPolicyNotFound( - schema=self.LAYERING_POLICY_SCHEMA) - - # TODO(fmontei): Rely on schema validation or some such for this. + def _apply_substitutions(self, data): try: - self.layer_order = list(self.layering_policy['data']['layerOrder']) - except KeyError: - raise errors.LayeringPolicyMalformed( - schema=self.LAYERING_POLICY_SCHEMA, - document=self.layering_policy) + secrets_substitution = secrets_manager.SecretsSubstitution(data) + return secrets_substitution.substitute_all() + except errors.DocumentNotFound as e: + LOG.error('Failed to render the documents because a secret ' + 'document could not be found.') + LOG.exception(six.text_type(e)) - if not isinstance(self.layer_order, list): - raise errors.LayeringPolicyMalformed( - schema=self.LAYERING_POLICY_SCHEMA, - document=self.layering_policy) + def render(self): + """Perform layering on the list of documents passed to ``__init__``. - def _calc_document_children(self): - """Determine each document's children. + Each concrete document will undergo layering according to the actions + defined by its ``metadata.layeringDefinition``. Documents are layered + with their parents. A parent document's ``schema`` must match that of + the child, and its ``metadata.labels`` must much the child's + ``metadata.layeringDefinition.parentSelector``. - For each document, attempts to find the document's children. Adds a new - key called "children" to the document's dictionary. - - .. note:: - - A document should only have exactly one parent. - - If a document does not have a parent, then its layer must be - the topmost layer defined by the `layerOrder`. - - :returns: Ordered list of documents that need to be layered. Each - document contains a "children" property in addition to original - data. List of documents returned is ordered from highest to lowest - layer. - :rtype: list of deckhand.engine.document.Document objects. - :raises IndeterminateDocumentParent: If more than one parent document - was found for a document. - :raises MissingDocumentParent: If the parent document could not be - found. Only applies documents with `layeringDefinition` property. + :returns: The list of rendered documents (does not include layering + policy document). + :rtype: list[dict] """ - layered_docs = list( - filter(lambda x: 'layeringDefinition' in x['metadata'], - self.documents)) + # ``rendered_data_by_layer`` tracks the set of changes across all + # actions across each layer for a specific document. + rendered_data_by_layer = {} - # ``all_children`` is a counter utility for verifying that each - # document has exactly one parent. - all_children = collections.Counter() + # NOTE(fmontei): ``global_docs`` represents the topmost documents in + # the system. It should probably be impossible for more than 1 + # top-level doc to exist, but handle multiple for now. + global_docs = [doc for doc in self.layered_docs + if doc.get_layer() == self.layer_order[0]] - def _get_children(doc): - children = [] - doc_layer = doc.get_layer() - try: - next_layer_idx = self.layer_order.index(doc_layer) + 1 - children_doc_layer = self.layer_order[next_layer_idx] - except IndexError: - # The lowest layer has been reached, so no children. Return - # empty list. - return children + for doc in global_docs: + layer_idx = self.layer_order.index(doc.get_layer()) + if doc.get_substitutions(): + substituted_data = self._apply_substitutions(doc.to_dict()) + rendered_data_by_layer[layer_idx] = substituted_data[0] + else: + rendered_data_by_layer[layer_idx] = doc.to_dict() - for other_doc in layered_docs: - # Documents with different schemas are never layered together, - # so consider only documents with same schema as candidates. - if (other_doc.get_layer() == children_doc_layer - and other_doc.get_schema() == doc.get_schema()): - # A document can have many labels but should only have one - # explicit label for the parentSelector. - parent_sel = other_doc.get_parent_selector() - parent_sel_key = list(parent_sel.keys())[0] - parent_sel_val = list(parent_sel.values())[0] - doc_labels = doc.get_labels() + # Keep iterating as long as a child exists. + for child in doc.get_children(nested=True): + # Retrieve the most up-to-date rendered_data (by + # referencing the child's parent's data). + child_layer_idx = self.layer_order.index(child.get_layer()) + rendered_data = rendered_data_by_layer[child_layer_idx - 1] - if (parent_sel_key in doc_labels and - parent_sel_val == doc_labels[parent_sel_key]): - children.append(other_doc) + # Apply each action to the current document. + for action in child.get_actions(): + LOG.debug('Applying action %s to child document with ' + 'name=%s, schema=%s, layer=%s.', action, + child.get_name(), child.get_schema(), + child.get_layer()) + rendered_data = self._apply_action( + action, child.to_dict(), rendered_data) - return children + # Update the actual document data if concrete. + if not child.is_abstract(): + if child.get_substitutions(): + rendered_data['metadata'][ + 'substitutions'] = child.get_substitutions() + self._apply_substitutions(rendered_data) + self.layered_docs[self.layered_docs.index(child)][ + 'data'] = rendered_data['data'] - for layer in self.layer_order: - docs_by_layer = list(filter( - (lambda x: x.get_layer() == layer), layered_docs)) + # Update ``rendered_data_by_layer`` for this layer so that + # children in deeper layers can reference the most up-to-date + # changes. + rendered_data_by_layer[child_layer_idx] = rendered_data - for doc in docs_by_layer: - children = _get_children(doc) + if 'children' in doc: + del doc['children'] - if children: - all_children.update(children) - doc.to_dict().setdefault('children', children) - - all_children_elements = list(all_children.elements()) - secondary_docs = list( - filter(lambda d: d.get_layer() != self.layer_order[0], - layered_docs)) - for doc in secondary_docs: - # Unless the document is the topmost document in the - # `layerOrder` of the LayeringPolicy, it should be a child document - # of another document. - if doc not in all_children_elements: - raise errors.MissingDocumentParent(document=doc) - # If the document is a child document of more than 1 parent, then - # the document has too many parents, which is a validation error. - elif all_children[doc] != 1: - raise errors.IndeterminateDocumentParent(document=doc) - - return layered_docs + return [d.to_dict() for d in self.layered_docs] diff --git a/deckhand/engine/secrets_manager.py b/deckhand/engine/secrets_manager.py index 8519c820..f4b0e8a4 100644 --- a/deckhand/engine/secrets_manager.py +++ b/deckhand/engine/secrets_manager.py @@ -99,23 +99,21 @@ class SecretsSubstitution(object): def __init__(self, documents): """SecretSubstitution constructor. - :param documents: List of YAML documents in dictionary format that are - candidates for secret substitution. This class will automatically - detect documents that require substitution; documents need not be - filtered prior to being passed to the constructor. + :param documents: List of documents that are candidates for secret + substitution. This class will automatically detect documents that + require substitution; documents need not be filtered prior to being + passed to the constructor. """ if not isinstance(documents, (list, tuple)): documents = [documents] self.docs_to_sub = [] - self.other_docs = [] for document in documents: - doc = document_wrapper.Document(document) - if doc.get_substitutions(): - self.docs_to_sub.append(doc) - else: - self.other_docs.append(document) + if not isinstance(document, document_wrapper.Document): + document_obj = document_wrapper.Document(document) + if document_obj.get_substitutions(): + self.docs_to_sub.append(document_obj) def substitute_all(self): """Substitute all documents that have a `metadata.substitutions` field. @@ -160,4 +158,4 @@ class SecretsSubstitution(object): doc['data'].update(substituted_data) substituted_docs.append(doc.to_dict()) - return substituted_docs + self.other_docs + return substituted_docs diff --git a/deckhand/errors.py b/deckhand/errors.py index a089157e..a55d0a69 100644 --- a/deckhand/errors.py +++ b/deckhand/errors.py @@ -196,12 +196,6 @@ class SingletonDocumentConflict(DeckhandException): code = 409 -class LayeringPolicyMalformed(DeckhandException): - msg_fmt = ("LayeringPolicy with schema %(schema)s is improperly formatted:" - " %(document)s.") - code = 400 - - class IndeterminateDocumentParent(DeckhandException): msg_fmt = ("Too many parent documents found for document %(document)s.") code = 400 @@ -217,12 +211,6 @@ class MissingDocumentKey(DeckhandException): "Parent: %(parent)s. Child: %(child)s.") -class MissingDocumentPattern(DeckhandException): - msg_fmt = ("Substitution pattern %(pattern)s could not be found for the " - "JSON path %(path)s in the destination document data %(data)s.") - code = 400 - - class UnsupportedActionMethod(DeckhandException): msg_fmt = ("Method in %(actions)s is invalid for document %(document)s.") code = 400 @@ -233,12 +221,6 @@ class DocumentNotFound(DeckhandException): code = 404 -class LayeringPolicyNotFound(DeckhandException): - msg_fmt = ("LayeringPolicy with schema %(schema)s not found in the " - "system.") - code = 404 - - class RevisionNotFound(DeckhandException): msg_fmt = "The requested revision %(revision)s was not found." code = 404 diff --git a/deckhand/factories.py b/deckhand/factories.py index 09231ea9..d9458425 100644 --- a/deckhand/factories.py +++ b/deckhand/factories.py @@ -237,7 +237,7 @@ class DocumentFactory(DeckhandFactory): # Set name. layer_template = copy.deepcopy(layer_template) layer_template['metadata']['name'] = "%s%d" % ( - layer_name, count + 1) + test_utils.rand_name(layer_name), count + 1) # Set layer. layer_template['metadata']['layeringDefinition'][ diff --git a/deckhand/tests/functional/gabbits/document-crud-success-single-bucket.yaml b/deckhand/tests/functional/gabbits/document-crud-success-single-bucket.yaml index 73ee08a7..ba11d139 100644 --- a/deckhand/tests/functional/gabbits/document-crud-success-single-bucket.yaml +++ b/deckhand/tests/functional/gabbits/document-crud-success-single-bucket.yaml @@ -49,7 +49,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: verify_initial desc: Verify initial document count and revisions @@ -77,7 +77,7 @@ tests: desc: Push a duplicate bucket of documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: verify_ignore desc: Verify duplicate documents were ignored diff --git a/deckhand/tests/functional/gabbits/document-render-error-layering-policy-conflict.yaml b/deckhand/tests/functional/gabbits/document-render-error-layering-policy-conflict.yaml new file mode 100644 index 00000000..9648feb1 --- /dev/null +++ b/deckhand/tests/functional/gabbits/document-render-error-layering-policy-conflict.yaml @@ -0,0 +1,29 @@ +# Tests failure paths for layering. +# +# 1. Purges existing data to ensure test isolation +# 2. Adds initial documents that do not include a layering policy +# 3. Verifies that 409 is raised when attempting to layer without a layering policy + +defaults: + request_headers: + content-type: application/x-yaml + response_headers: + content-type: application/x-yaml + +tests: + - name: purge + desc: Begin testing from known state. + DELETE: /api/v1.0/revisions + status: 204 + response_headers: null + + - name: initialize + desc: Create initial documents + PUT: /api/v1.0/buckets/mop/documents + status: 200 + data: <@resources/passphrase.yaml + + - name: verify_missing_layering_policy_raises_conflict + desc: Verify that attempting to render documents without a layering policy raises a 409 + GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents + status: 409 diff --git a/deckhand/tests/functional/gabbits/document-render-success-multiple-bucket-with-layering.yaml b/deckhand/tests/functional/gabbits/document-render-success-multiple-bucket-with-layering.yaml new file mode 100644 index 00000000..0aba0633 --- /dev/null +++ b/deckhand/tests/functional/gabbits/document-render-success-multiple-bucket-with-layering.yaml @@ -0,0 +1,45 @@ +# Tests success path for layering involving multiple source buckets. +# +# 1. Purges existing data to ensure test isolation +# 2. Adds documents to bucket a +# 3. Adds documents to bucket b +# 4. Verifies fully correctly layered document data + +defaults: + request_headers: + content-type: application/x-yaml + response_headers: + content-type: application/x-yaml + +tests: + - name: purge + desc: Begin testing from known state. + DELETE: /api/v1.0/revisions + status: 204 + response_headers: null + + - name: add_bucket_a + desc: Create documents for bucket a + PUT: /api/v1.0/buckets/a/documents + status: 200 + data: <@resources/design-doc-layering-sample-split-bucket-a.yaml + + - name: add_bucket_b + desc: Create documents for bucket b + PUT: /api/v1.0/buckets/b/documents + status: 200 + data: <@resources/design-doc-layering-sample-split-bucket-b.yaml + + - name: verify_layering + desc: Check for expected layering + GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents + status: 200 + response_multidoc_jsonpaths: + $.`len`: 1 + $.[*].schema: example/Kind/v1 + $.[*].metadata.name: site-1234 + $.[*].metadata.schema: metadata/Document/v1 + $.[*].data: + a: + z: 3 + b: 4 diff --git a/deckhand/tests/functional/gabbits/document-render-success-single-bucket-with-layering.yaml b/deckhand/tests/functional/gabbits/document-render-success-single-bucket-with-layering.yaml new file mode 100644 index 00000000..92729fd8 --- /dev/null +++ b/deckhand/tests/functional/gabbits/document-render-success-single-bucket-with-layering.yaml @@ -0,0 +1,66 @@ +# Tests success path for basic layering. +# +# 1. Purges existing data to ensure test isolation +# 2. Adds initial documents from layering sample of design doc +# 3. Verifies document data layered correctly (2 layers) +# 4. Verifies document data layered correctly (3 layers) + +defaults: + request_headers: + content-type: application/x-yaml + response_headers: + content-type: application/x-yaml + +tests: + - name: purge + desc: Begin testing from known state. + DELETE: /api/v1.0/revisions + status: 204 + response_headers: null + + - name: create_documents_for_validating_2_level_layering + desc: Create documents for validating 2 levels of layering (global, site) + PUT: /api/v1.0/buckets/mop/documents + status: 200 + data: <@resources/design-doc-layering-sample-2-layers.yaml + + - name: verify_layering_2_layers + desc: Check for expected layering with 2 layers + GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents + status: 200 + response_multidoc_jsonpaths: + $.`len`: 1 + $.[*].schema: example/Kind/v1 + $.[*].metadata.name: site-1234 + $.[*].metadata.schema: metadata/Document/v1 + $.[*].data: + a: + x: 1 + y: 2 + b: 5 + + - name: purge_again + desc: Begin testing from known state. + DELETE: /api/v1.0/revisions + status: 204 + response_headers: null + + - name: create_documents_for_validating_3_level_layering + desc: Create documents for validating 3 levels of layering (global, region, site) + PUT: /api/v1.0/buckets/mop/documents + status: 200 + data: <@resources/design-doc-layering-sample-3-layers.yaml + + - name: verify_layering_3_layers + desc: Check for expected layering with 3 layers + GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents + status: 200 + response_multidoc_jsonpaths: + $.`len`: 1 + $.[*].schema: example/Kind/v1 + $.[*].metadata.name: site-1234 + $.[*].metadata.schema: metadata/Document/v1 + $.[*].data: + a: + z: 3 + b: 4 diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-2-layers.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-2-layers.yaml new file mode 100644 index 00000000..7d9122a7 --- /dev/null +++ b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-2-layers.yaml @@ -0,0 +1,37 @@ +--- +schema: deckhand/LayeringPolicy/v1 +metadata: + schema: metadata/Control/v1 + name: layering-policy +data: + layerOrder: + - global + - site +--- +schema: example/Kind/v1 +metadata: + schema: metadata/Document/v1 + name: global-1234 + labels: + key1: value1 + layeringDefinition: + abstract: true + layer: global +data: + a: + x: 1 + y: 2 +--- +schema: example/Kind/v1 +metadata: + schema: metadata/Document/v1 + name: site-1234 + layeringDefinition: + layer: site + parentSelector: + key1: value1 + actions: + - method: merge + path: . +data: + b: 5 diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-3-layers.yaml similarity index 100% rename from deckhand/tests/functional/gabbits/resources/design-doc-layering-sample.yaml rename to deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-3-layers.yaml diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-a.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-a.yaml new file mode 100644 index 00000000..b7d421fa --- /dev/null +++ b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-a.yaml @@ -0,0 +1,25 @@ +--- +schema: deckhand/LayeringPolicy/v1 +metadata: + schema: metadata/Control/v1 + name: layering-policy +data: + layerOrder: + - global + - region + - site +--- +schema: example/Kind/v1 +metadata: + schema: metadata/Document/v1 + name: global-1234 + labels: + key1: value1 + layeringDefinition: + abstract: true + layer: global +data: + a: + x: 1 + y: 2 +... diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-b.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-b.yaml new file mode 100644 index 00000000..904316fb --- /dev/null +++ b/deckhand/tests/functional/gabbits/resources/design-doc-layering-sample-split-bucket-b.yaml @@ -0,0 +1,36 @@ +--- +schema: example/Kind/v1 +metadata: + schema: metadata/Document/v1 + name: region-1234 + labels: + key1: value1 + layeringDefinition: + abstract: true + layer: region + parentSelector: + key1: value1 + actions: + - method: replace + path: .a +data: + a: + z: 3 +--- +schema: example/Kind/v1 +metadata: + schema: metadata/Document/v1 + name: site-1234 + labels: + foo: bar + baz: qux + layeringDefinition: + layer: site + parentSelector: + key1: value1 + actions: + - method: merge + path: . +data: + b: 4 +... diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample-split-bucket-b.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample-split-bucket-b.yaml index 90e8f676..51ef71c0 100644 --- a/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample-split-bucket-b.yaml +++ b/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample-split-bucket-b.yaml @@ -1,4 +1,13 @@ --- +schema: deckhand/LayeringPolicy/v1 +metadata: + schema: metadata/Control/v1 + name: layering-policy +data: + layerOrder: + - region + - site +--- schema: deckhand/Certificate/v1 metadata: name: example-cert diff --git a/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample.yaml b/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample.yaml index 3492d2c8..66086a5b 100644 --- a/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample.yaml +++ b/deckhand/tests/functional/gabbits/resources/design-doc-substitution-sample.yaml @@ -1,4 +1,13 @@ --- +schema: deckhand/LayeringPolicy/v1 +metadata: + schema: metadata/Control/v1 + name: layering-policy +data: + layerOrder: + - region + - site +--- schema: deckhand/Certificate/v1 metadata: name: example-cert diff --git a/deckhand/tests/functional/gabbits/revision-crud-success-single-bucket.yaml b/deckhand/tests/functional/gabbits/revision-crud-success-single-bucket.yaml index cd4ac7bd..833ea39d 100644 --- a/deckhand/tests/functional/gabbits/revision-crud-success-single-bucket.yaml +++ b/deckhand/tests/functional/gabbits/revision-crud-success-single-bucket.yaml @@ -27,7 +27,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml # Validates whether revision was created. # Required parameters: diff --git a/deckhand/tests/functional/gabbits/revision-documents-filters-negative.yaml b/deckhand/tests/functional/gabbits/revision-documents-filters-negative.yaml index 6fd5d9ce..3cead92a 100644 --- a/deckhand/tests/functional/gabbits/revision-documents-filters-negative.yaml +++ b/deckhand/tests/functional/gabbits/revision-documents-filters-negative.yaml @@ -20,7 +20,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: filter_by_schema_partial_namespace desc: Verify revision documents do not return results for partial namespace diff --git a/deckhand/tests/functional/gabbits/revision-documents-filters.yaml b/deckhand/tests/functional/gabbits/revision-documents-filters.yaml index c3998adb..0ca9b2ea 100644 --- a/deckhand/tests/functional/gabbits/revision-documents-filters.yaml +++ b/deckhand/tests/functional/gabbits/revision-documents-filters.yaml @@ -24,7 +24,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: filter_by_schema desc: Verify revision documents filtered by schema diff --git a/deckhand/tests/functional/gabbits/revision-documents-multiple-filters.yaml b/deckhand/tests/functional/gabbits/revision-documents-multiple-filters.yaml index faf8c506..72e423e7 100644 --- a/deckhand/tests/functional/gabbits/revision-documents-multiple-filters.yaml +++ b/deckhand/tests/functional/gabbits/revision-documents-multiple-filters.yaml @@ -23,7 +23,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: filter_by_multiple_different_filters_expect_site desc: Verify revision documents filtered by multiple repeated keys that are different diff --git a/deckhand/tests/functional/gabbits/revision-filters.yaml b/deckhand/tests/functional/gabbits/revision-filters.yaml index 5a44bc03..9b4bbde4 100644 --- a/deckhand/tests/functional/gabbits/revision-filters.yaml +++ b/deckhand/tests/functional/gabbits/revision-filters.yaml @@ -21,7 +21,7 @@ tests: desc: Create first revision for testing PUT: /api/v1.0/buckets/bucket_a/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: initialize_again desc: Create second revision for testing diff --git a/deckhand/tests/functional/gabbits/revision-tag-success.yaml b/deckhand/tests/functional/gabbits/revision-tag-success.yaml index d90f8d81..0eae78e8 100644 --- a/deckhand/tests/functional/gabbits/revision-tag-success.yaml +++ b/deckhand/tests/functional/gabbits/revision-tag-success.yaml @@ -41,7 +41,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: create_tag desc: Create a tag for the revision diff --git a/deckhand/tests/functional/gabbits/rollback-success-single-bucket.yaml b/deckhand/tests/functional/gabbits/rollback-success-single-bucket.yaml index c144da7e..25f3e2b3 100644 --- a/deckhand/tests/functional/gabbits/rollback-success-single-bucket.yaml +++ b/deckhand/tests/functional/gabbits/rollback-success-single-bucket.yaml @@ -29,7 +29,7 @@ tests: desc: Create initial documents PUT: /api/v1.0/buckets/mop/documents status: 200 - data: <@resources/design-doc-layering-sample.yaml + data: <@resources/design-doc-layering-sample-3-layers.yaml - name: update_single_document desc: Update a single document, ignore other documents in the bucket diff --git a/deckhand/tests/unit/control/test_buckets_controller.py b/deckhand/tests/unit/control/test_buckets_controller.py index b588f5b5..2a723cd9 100644 --- a/deckhand/tests/unit/control/test_buckets_controller.py +++ b/deckhand/tests/unit/control/test_buckets_controller.py @@ -28,6 +28,18 @@ CONF = cfg.CONF class TestBucketsController(test_base.BaseControllerTest): """Test suite for validating positive scenarios for buckets controller.""" + def test_put_empty_bucket(self): + rules = {'deckhand:create_cleartext_documents': '@'} + self.policy.set_rules(rules) + + resp = self.app.simulate_put( + '/api/v1.0/buckets/mop/documents', + headers={'Content-Type': 'application/x-yaml'}, + body=yaml.safe_dump_all([])) + self.assertEqual(200, resp.status_code) + created_documents = list(yaml.safe_load_all(resp.text)) + self.assertEmpty(created_documents) + def test_put_bucket(self): rules = {'deckhand:create_cleartext_documents': '@'} self.policy.set_rules(rules) diff --git a/deckhand/tests/unit/control/test_rendered_documents_controller.py b/deckhand/tests/unit/control/test_rendered_documents_controller.py index 7ea8e348..c092feba 100644 --- a/deckhand/tests/unit/control/test_rendered_documents_controller.py +++ b/deckhand/tests/unit/control/test_rendered_documents_controller.py @@ -35,8 +35,8 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest): # Create 2 docs: one concrete, one abstract. documents_factory = factories.DocumentFactory(2, [1, 1]) payload = documents_factory.gen_test( - {}, global_abstract=False, region_abstract=True)[1:] - concrete_doc = payload[0] + {}, global_abstract=False, region_abstract=True) + concrete_doc = payload[1] resp = self.app.simulate_put( '/api/v1.0/buckets/mop/documents', @@ -78,23 +78,21 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest): self.policy.set_rules(rules) # Create 1st document. - documents_factory = factories.DocumentFactory(2, [1, 1]) + documents_factory = factories.DocumentFactory(1, [1]) payload = documents_factory.gen_test({}, global_abstract=False)[1:] - payload[0]['metadata']['name'] = test_utils.rand_name('document') resp = self.app.simulate_put( '/api/v1.0/buckets/mop/documents', headers={'Content-Type': 'application/x-yaml'}, body=yaml.safe_dump_all(payload)) self.assertEqual(200, resp.status_code) - # Create 2nd document (exclude 1st document). - payload = documents_factory.gen_test({}, global_abstract=False)[1:] - second_name = test_utils.rand_name('document') - payload[0]['metadata']['name'] = second_name + # Create 2nd document (exclude 1st document in new payload). + payload = documents_factory.gen_test({}, global_abstract=False) + new_name = payload[-1]['metadata']['name'] resp = self.app.simulate_put( '/api/v1.0/buckets/mop/documents', headers={'Content-Type': 'application/x-yaml'}, - body=yaml.safe_dump_all([payload[0]])) + body=yaml.safe_dump_all(payload)) self.assertEqual(200, resp.status_code) revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][ 'revision'] @@ -107,10 +105,37 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest): rendered_documents = list(yaml.safe_load_all(resp.text)) self.assertEqual(1, len(rendered_documents)) - self.assertEqual(second_name, - rendered_documents[0]['metadata']['name']) + self.assertEqual(new_name, rendered_documents[0]['metadata']['name']) self.assertEqual(2, rendered_documents[0]['status']['revision']) + def test_list_rendered_documents_multiple_buckets(self): + rules = {'deckhand:list_cleartext_documents': '@', + 'deckhand:list_encrypted_documents': '@', + 'deckhand:create_cleartext_documents': '@'} + self.policy.set_rules(rules) + + documents_factory = factories.DocumentFactory(1, [1]) + + for idx in range(2): + payload = documents_factory.gen_test({}) + if idx == 0: + # Pop off the first entry so that a conflicting layering + # policy isn't created during the 1st iteration. + payload.pop(0) + resp = self.app.simulate_put( + '/api/v1.0/buckets/%s/documents' % test_utils.rand_name( + 'bucket'), + headers={'Content-Type': 'application/x-yaml'}, + body=yaml.safe_dump_all(payload)) + self.assertEqual(200, resp.status_code) + revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][ + 'revision'] + + resp = self.app.simulate_get( + '/api/v1.0/revisions/%s/rendered-documents' % revision_id, + headers={'Content-Type': 'application/x-yaml'}) + self.assertEqual(200, resp.status_code) + class TestRenderedDocumentsControllerNegative( test_base.BaseControllerTest): @@ -125,8 +150,8 @@ class TestRenderedDocumentsControllerNegative( self.policy.set_rules(rules) # Create a document for a bucket. - secrets_factory = factories.DocumentSecretFactory() - payload = [secrets_factory.gen_test('Certificate', 'cleartext')] + documents_factory = factories.DocumentFactory(1, [1]) + payload = documents_factory.gen_test({}) resp = self.app.simulate_put( '/api/v1.0/buckets/mop/documents', headers={'Content-Type': 'application/x-yaml'}, @@ -161,8 +186,8 @@ class TestRenderedDocumentsControllerNegativeRBAC( self.policy.set_rules(rules) # Create a document for a bucket. - secrets_factory = factories.DocumentSecretFactory() - payload = [secrets_factory.gen_test('Certificate', 'cleartext')] + documents_factory = factories.DocumentFactory(1, [1]) + payload = [documents_factory.gen_test({})[0]] resp = self.app.simulate_put( '/api/v1.0/buckets/mop/documents', headers={'Content-Type': 'application/x-yaml'}, @@ -185,8 +210,13 @@ class TestRenderedDocumentsControllerNegativeRBAC( self.policy.set_rules(rules) # Create a document for a bucket. + documents_factory = factories.DocumentFactory(1, [1]) + layering_policy = documents_factory.gen_test({})[0] secrets_factory = factories.DocumentSecretFactory() - payload = [secrets_factory.gen_test('Certificate', 'encrypted')] + encrypted_document = secrets_factory.gen_test('Certificate', + 'encrypted') + payload = [layering_policy, encrypted_document] + with mock.patch.object(buckets.BucketsResource, 'secrets_mgr', autospec=True) as mock_secrets_mgr: mock_secrets_mgr.create.return_value = { diff --git a/deckhand/tests/unit/db/test_documents.py b/deckhand/tests/unit/db/test_documents.py index b158ac55..740e6ac0 100644 --- a/deckhand/tests/unit/db/test_documents.py +++ b/deckhand/tests/unit/db/test_documents.py @@ -214,23 +214,25 @@ class TestDocuments(base.TestDbBase): def test_delete_all_documents(self): payload = self.documents_factory.gen_test(self.document_mapping) bucket_name = test_utils.rand_name('bucket') - documents = self.create_documents(bucket_name, payload) + created_documents = self.create_documents(bucket_name, payload) + self.assertIsInstance(created_documents, list) + self.assertEqual(3, len(created_documents)) - self.assertIsInstance(documents, list) - self.assertEqual(3, len(documents)) + deleted_documents = self.create_documents(bucket_name, []) - documents = self.create_documents(bucket_name, []) - documents = sorted( - documents, key=lambda d: d['name']) + # Verify that all the expected documents were deleted. + self.assertEqual( + sorted([(d['metadata']['name'], d['schema']) + for d in created_documents]), + sorted([(d['name'], d['schema']) for d in deleted_documents])) - for idx in range(3): - self.assertTrue(documents[idx]['deleted']) - self.assertTrue(documents[idx]['deleted_at']) - self.assertEqual(documents[idx]['schema'], payload[idx]['schema']) - self.assertEqual(documents[idx]['name'], - payload[idx]['metadata']['name']) - self.assertEmpty(documents[idx]['metadata']) - self.assertEmpty(documents[idx]['data']) + # Verify that all their attributes have been cleared and that the + # deleted/deleted_at attributes have been set to True. + for deleted_document in deleted_documents: + self.assertTrue(deleted_document['deleted']) + self.assertTrue(deleted_document['deleted_at']) + self.assertEmpty(deleted_document['metadata']) + self.assertEmpty(deleted_document['data']) def test_delete_and_create_document_in_same_payload(self): payload = self.documents_factory.gen_test(self.document_mapping) diff --git a/deckhand/tests/unit/engine/test_document_layering.py b/deckhand/tests/unit/engine/test_document_layering.py index 1886ec83..75f30059 100644 --- a/deckhand/tests/unit/engine/test_document_layering.py +++ b/deckhand/tests/unit/engine/test_document_layering.py @@ -12,18 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy + from deckhand.engine import layering from deckhand import errors from deckhand import factories from deckhand.tests.unit import base as test_base +from deckhand import types class TestDocumentLayering(test_base.DeckhandTestCase): + def _extract_layering_policy(self, documents): + for doc in copy.copy(documents): + if doc['schema'].startswith(types.LAYERING_POLICY_SCHEMA): + layering_policy = doc + documents.remove(doc) + return layering_policy + return None + def _test_layering(self, documents, site_expected=None, region_expected=None, global_expected=None, exception_expected=None): - document_layering = layering.DocumentLayering(documents) + layering_policy = self._extract_layering_policy(documents) + document_layering = layering.DocumentLayering( + layering_policy, documents) if all([site_expected, region_expected, global_expected, exception_expected]): @@ -56,19 +69,22 @@ class TestDocumentLayering(test_base.DeckhandTestCase): site_expected = [site_expected] for idx, expected in enumerate(site_expected): - self.assertEqual(expected, site_docs[idx].get('data')) + self.assertEqual(expected, site_docs[idx].get('data'), + 'Actual site data does not match expected.') if region_expected: if not isinstance(region_expected, list): region_expected = [region_expected] for idx, expected in enumerate(region_expected): - self.assertEqual(expected, region_docs[idx].get('data')) + self.assertEqual(expected, region_docs[idx].get('data'), + 'Actual region data does not match expected.') if global_expected: if not isinstance(global_expected, list): global_expected = [global_expected] for idx, expected in enumerate(global_expected): - self.assertEqual(expected, global_docs[idx].get('data')) + self.assertEqual(expected, global_docs[idx].get('data'), + 'Actual global data does not match expected.') class TestDocumentLayering2Layers(TestDocumentLayering): @@ -138,6 +154,26 @@ class TestDocumentLayering2Layers(TestDocumentLayering): documents = doc_factory.gen_test(mapping, site_abstract=False) self._test_layering(documents, site_expected[idx]) + def test_layering_documents_with_different_schemas_do_not_layer(self): + """Validates that documents with different schemas are not layered + together. + """ + mapping = { + "_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}}, + "_SITE_DATA_1_": {"data": {"b": 4}}, + "_SITE_ACTIONS_1_": { + "actions": [{"method": "merge", "path": "."}]} + } + doc_factory = factories.DocumentFactory(2, [1, 1]) + documents = doc_factory.gen_test(mapping, site_abstract=False) + documents[1]['schema'] = 'deckhand/Document/v1' + documents[2]['schema'] = 'deckhand/Document/v2' + + global_expected = {"a": {"x": 1, "y": 2}} + site_expected = {'b': 4} + self._test_layering(documents, site_expected=site_expected, + global_expected=global_expected) + class TestDocumentLayering2LayersAbstractConcrete(TestDocumentLayering): """The the 2-layer payload with site/global layers concrete. diff --git a/deckhand/tests/unit/engine/test_document_layering_and_substitution.py b/deckhand/tests/unit/engine/test_document_layering_and_substitution.py new file mode 100644 index 00000000..02a28b2f --- /dev/null +++ b/deckhand/tests/unit/engine/test_document_layering_and_substitution.py @@ -0,0 +1,162 @@ +# Copyright 2017 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from deckhand.engine import secrets_manager +from deckhand import factories +from deckhand.tests.unit.engine import test_document_layering + + +class TestDocumentLayeringWithSubstitution( + test_document_layering.TestDocumentLayering): + + def test_layering_and_substitution_default_scenario(self): + mapping = { + "_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}}, + "_GLOBAL_SUBSTITUTIONS_1_": [{ + "dest": { + "path": ".c" + }, + "src": { + "schema": "deckhand/Certificate/v1", + "name": "global-cert", + "path": "." + } + + }], + "_SITE_DATA_1_": {"data": {"b": 4}}, + "_SITE_ACTIONS_1_": { + "actions": [{"method": "merge", "path": "."}]} + } + doc_factory = factories.DocumentFactory(2, [1, 1]) + documents = doc_factory.gen_test(mapping, site_abstract=False) + + secrets_factory = factories.DocumentSecretFactory() + certificate = secrets_factory.gen_test( + 'Certificate', 'cleartext', data={'secret': 'global-secret'}, + name='global-cert') + + global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'} + site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4, 'c': 'global-secret'} + + with mock.patch.object( + secrets_manager.db_api, 'document_get', + return_value=certificate, autospec=True) as mock_document_get: + self._test_layering(documents, site_expected=site_expected, + global_expected=global_expected) + mock_document_get.assert_called_once_with( + schema=certificate['schema'], name=certificate['metadata']['name'], + is_secret=True, **{'metadata.layeringDefinition.abstract': False}) + + def test_layering_and_substitution_no_children(self): + mapping = { + "_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}}, + "_GLOBAL_SUBSTITUTIONS_1_": [{ + "dest": { + "path": ".c" + }, + "src": { + "schema": "deckhand/Certificate/v1", + "name": "global-cert", + "path": "." + } + + }], + "_SITE_DATA_1_": {"data": {"b": 4}}, + "_SITE_ACTIONS_1_": { + "actions": [{"method": "merge", "path": "."}]} + } + doc_factory = factories.DocumentFactory(2, [1, 1]) + documents = doc_factory.gen_test(mapping, site_abstract=False) + + documents[1]['metadata']['labels'] = {} + secrets_factory = factories.DocumentSecretFactory() + certificate = secrets_factory.gen_test( + 'Certificate', 'cleartext', data={'secret': 'global-secret'}, + name='global-cert') + + global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'} + site_expected = {'b': 4} + + with mock.patch.object( + secrets_manager.db_api, 'document_get', + return_value=certificate, autospec=True) as mock_document_get: + self._test_layering(documents, site_expected=site_expected, + global_expected=global_expected) + mock_document_get.assert_called_once_with( + schema=certificate['schema'], name=certificate['metadata']['name'], + is_secret=True, **{'metadata.layeringDefinition.abstract': False}) + + def test_layering_parent_and_child_undergo_substitution(self): + mapping = { + "_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}}, + "_GLOBAL_SUBSTITUTIONS_1_": [{ + "dest": { + "path": ".b" + }, + "src": { + "schema": "deckhand/Certificate/v1", + "name": "global-cert", + "path": "." + } + + }], + "_SITE_DATA_1_": {"data": {"c": "need-site-secret"}}, + "_SITE_ACTIONS_1_": { + "actions": [{"method": "merge", "path": "."}]}, + "_SITE_SUBSTITUTIONS_1_": [{ + "dest": { + "path": ".c" + }, + "src": { + "schema": "deckhand/CertificateKey/v1", + "name": "site-cert", + "path": "." + } + + }], + } + doc_factory = factories.DocumentFactory(2, [1, 1]) + documents = doc_factory.gen_test(mapping, site_abstract=False) + secrets_factory = factories.DocumentSecretFactory() + + global_expected = {'a': {'x': 1, 'y': 2}, 'b': 'global-secret'} + site_expected = {'a': {'x': 1, 'y': 2}, 'b': 'global-secret', + 'c': 'site-secret'} + + def _get_secret_document(*args, **kwargs): + name = kwargs['name'] + prefix = name.split('-')[0] + return secrets_factory.gen_test( + 'Certificate', 'cleartext', + data={'secret': '%s-secret' % prefix}, + name='%s' % name) + + with mock.patch.object( + secrets_manager.db_api, 'document_get', + autospec=True) as mock_document_get: + mock_document_get.side_effect = _get_secret_document + self._test_layering(documents, site_expected=site_expected, + global_expected=global_expected) + mock_document_get.assert_has_calls([ + mock.call( + schema="deckhand/Certificate/v1", name='global-cert', + is_secret=True, + **{'metadata.layeringDefinition.abstract': False}), + mock.call( + schema="deckhand/CertificateKey/v1", name='site-cert', + is_secret=True, + **{'metadata.layeringDefinition.abstract': False}) + ]) diff --git a/deckhand/tests/unit/engine/test_document_layering_negative.py b/deckhand/tests/unit/engine/test_document_layering_negative.py index 65491866..a1d751ab 100644 --- a/deckhand/tests/unit/engine/test_document_layering_negative.py +++ b/deckhand/tests/unit/engine/test_document_layering_negative.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import mock + from deckhand.engine import layering from deckhand import errors from deckhand import factories @@ -63,107 +65,115 @@ class TestDocumentLayeringNegative( self._test_layering( documents, exception_expected=errors.MissingDocumentKey) - def test_layering_without_layering_policy(self): - doc_factory = factories.DocumentFactory(2, [1, 1]) - documents = doc_factory.gen_test({}, site_abstract=False) - documents.pop(0) # First doc is layering policy. - - self.assertRaises(errors.LayeringPolicyNotFound, - layering.DocumentLayering, documents) - - def test_layering_with_broken_layer_order(self): + @mock.patch.object(layering, 'LOG', autospec=True) + def test_layering_with_broken_layer_order(self, mock_log): doc_factory = factories.DocumentFactory(2, [1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) + layering_policy = self._extract_layering_policy(documents) broken_layer_orders = [ ['site', 'region', 'global'], ['broken', 'global'], ['broken'], ['site', 'broken']] for broken_layer_order in broken_layer_orders: - documents[0]['data']['layerOrder'] = broken_layer_order + layering_policy['data']['layerOrder'] = broken_layer_order # The site will not be able to find a correct parent. - self.assertRaises(errors.MissingDocumentParent, - layering.DocumentLayering, documents) + layering.DocumentLayering(layering_policy, documents) + self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0], + 'Could not find parent for document .*') + mock_log.info.reset_mock() - def test_layering_child_with_invalid_parent_selector(self): + @mock.patch.object(layering, 'LOG', autospec=True) + def test_layering_child_with_invalid_parent_selector(self, mock_log): doc_factory = factories.DocumentFactory(2, [1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) + layering_policy = self._extract_layering_policy(documents) for parent_selector in ({'key2': 'value2'}, {'key1': 'value2'}): documents[-1]['metadata']['layeringDefinition'][ 'parentSelector'] = parent_selector - self.assertRaises(errors.MissingDocumentParent, - layering.DocumentLayering, documents) + layering.DocumentLayering(layering_policy, documents) + self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0], + 'Could not find parent for document .*') + mock_log.info.reset_mock() - def test_layering_unreferenced_parent_label(self): + @mock.patch.object(layering, 'LOG', autospec=True) + def test_layering_unreferenced_parent_label(self, mock_log): doc_factory = factories.DocumentFactory(2, [1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) + layering_policy = self._extract_layering_policy(documents) for parent_label in ({'key2': 'value2'}, {'key1': 'value2'}): # Second doc is the global doc, or parent. - documents[1]['metadata']['labels'] = [parent_label] + documents[0]['metadata']['labels'] = [parent_label] - self.assertRaises(errors.MissingDocumentParent, - layering.DocumentLayering, documents) + layering.DocumentLayering(layering_policy, documents) + self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0], + 'Could not find parent for document .*') + mock_log.info.reset_mock() def test_layering_duplicate_parent_selector_2_layer(self): # Validate that documents belonging to the same layer cannot have the # same unique parent identifier referenced by `parentSelector`. doc_factory = factories.DocumentFactory(2, [1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) - documents.append(documents[1]) # Copy global layer. + layering_policy = self._extract_layering_policy(documents) + documents.append(documents[0]) # Copy global layer. self.assertRaises(errors.IndeterminateDocumentParent, - layering.DocumentLayering, documents) + layering.DocumentLayering, layering_policy, + documents) def test_layering_duplicate_parent_selector_3_layer(self): # Validate that documents belonging to the same layer cannot have the # same unique parent identifier referenced by `parentSelector`. doc_factory = factories.DocumentFactory(3, [1, 1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) + layering_policy = self._extract_layering_policy(documents) - # 1 is global layer, 2 is region layer. - for idx in (1, 2): + # 0 is global layer, 1 is region layer. + for idx in (0, 1): documents.append(documents[idx]) self.assertRaises(errors.IndeterminateDocumentParent, - layering.DocumentLayering, documents) + layering.DocumentLayering, layering_policy, + documents) documents.pop(-1) # Remove the just-appended duplicate. - def test_layering_document_references_itself(self): + @mock.patch.object(layering, 'LOG', autospec=True) + def test_layering_document_references_itself(self, mock_log): # Test that a parentSelector cannot reference the document itself # without an error being raised. doc_factory = factories.DocumentFactory(3, [1, 1, 1]) documents = doc_factory.gen_test({}, site_abstract=False) + layering_policy = self._extract_layering_policy(documents) self_ref = {"self": "self"} documents[2]['metadata']['labels'] = self_ref documents[2]['metadata']['layeringDefinition'][ 'parentSelector'] = self_ref - # Escape '[' and ']' for regex to work. - expected_err = ("Missing parent document for document %s." - % documents[2]).replace('[', '\[').replace(']', '\]') - self.assertRaisesRegex(errors.MissingDocumentParent, expected_err, - layering.DocumentLayering, documents) + layering.DocumentLayering(layering_policy, documents) + self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0], + 'Could not find parent for document .*') - def test_layering_documents_with_different_schemas(self): + @mock.patch.object(layering, 'LOG', autospec=True) + def test_layering_documents_with_different_schemas(self, mock_log): """Validate that attempting to layer documents with different schemas results in errors. """ doc_factory = factories.DocumentFactory(3, [1, 1, 1]) documents = doc_factory.gen_test({}) + layering_policy = self._extract_layering_policy(documents) # Region and site documents should result in no parent being found # since their schemas will not match that of their parent's. - for idx in range(2, 4): # Only region/site have parent. + for idx in range(1, 3): # Only region/site have parent. prev_schema = documents[idx]['schema'] documents[idx]['schema'] = test_utils.rand_name('schema') - # Escape '[' and ']' for regex to work. - expected_err = ( - "Missing parent document for document %s." - % documents[idx]).replace('[', '\[').replace(']', '\]') - self.assertRaisesRegex(errors.MissingDocumentParent, expected_err, - layering.DocumentLayering, documents) + layering.DocumentLayering(layering_policy, documents) + self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0], + 'Could not find parent for document .*') + mock_log.info.reset_mock() # Restore schema for next test run. documents[idx]['schema'] = prev_schema diff --git a/deckhand/utils.py b/deckhand/utils.py index 8f7ae434..d202df16 100644 --- a/deckhand/utils.py +++ b/deckhand/utils.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import ast import re import string import jsonpath_ng +import six from deckhand import errors @@ -167,3 +169,101 @@ def multisort(data, sort_by=None, order_by=None): return sorted(data, key=lambda d: [ jsonpath_parse(d, sort_key) for sort_key in sort_by], reverse=True if order_by == 'desc' else False) + + +def _add_microversion(value): + """Hack for coercing all Deckhand schema fields (``schema`` and + ``metadata.schema``) into ending with v1.0 rather than v1, for example. + """ + microversion_re = r'^.*/.*/v[0-9]+$' + if re.match(value, microversion_re): + return value + '.0' + return value + + +def deepfilter(dct, **filters): + """Match ``dct`` against all the filters in ``filters``. + + Check whether ``dct`` matches all the fitlers in ``filters``. The filters + can reference nested attributes, attributes that are contained within + other dictionaries within ``dct``. + + Useful for querying whether ``metadata.name`` or + ``metadata.layeringDefinition.layerOrder`` match specific values. + + :param dct: The dictionary to check against all the ``filters``. + :type dct: dict + :param filters: Dictionary of key-value pairs used for filtering out + unwanted results. + :type filters: dict + :returns: True if the dictionary satisfies all the filters, else False. + """ + def _transform_filter_bool(filter_val): + # Transform boolean values into string literals. + if isinstance(filter_val, six.string_types): + try: + filter_val = ast.literal_eval(filter_val.title()) + except ValueError: + # If not True/False, set to None to avoid matching + # `actual_val` which is always boolean. + filter_val = None + return filter_val + + for filter_key, filter_val in filters.items(): + # If the filter is a list of possibilities, e.g. ['site', 'region'] + # for metadata.layeringDefinition.layer, check whether the actual + # value is present. + if isinstance(filter_val, (list, tuple)): + actual_val = jsonpath_parse(dct, filter_key, match_all=True) + if not actual_val: + return False + + if isinstance(actual_val[0], bool): + filter_val = [_transform_filter_bool(x) for x in filter_val] + + if not set(actual_val).intersection(set(filter_val)): + return False + else: + actual_val = jsonpath_parse(dct, filter_key) + + # Else if both the filter value and the actual value in the doc + # are dictionaries, check whether the filter dict is a subset + # of the actual dict. + if (isinstance(actual_val, dict) + and isinstance(filter_val, dict)): + is_subset = set( + filter_val.items()).issubset(set(actual_val.items())) + if not is_subset: + return False + # Else both filters are string literals. + else: + # Filtering by schema must support namespace matching + # (e.g. schema=promenade) such that all kind and schema + # documents with promenade namespace are returned, or + # (e.g. schema=promenade/Node) such that all version + # schemas with namespace=schema and kind=Node are returned. + if isinstance(actual_val, bool): + filter_val = _transform_filter_bool(filter_val) + + if filter_key in ['schema', 'metadata.schema']: + actual_val = _add_microversion(actual_val) + filter_val = _add_microversion(filter_val) + parts = actual_val.split('/')[:2] + if len(parts) == 2: + actual_namespace, actual_kind = parts + elif len(parts) == 1: + actual_namespace = parts[0] + actual_kind = '' + else: + actual_namespace = actual_kind = '' + actual_minus_version = actual_namespace + '/' + actual_kind + + if not (filter_val == actual_val or + actual_minus_version == filter_val or + actual_namespace == filter_val): + return False + else: + if actual_val != filter_val: + return False + + return True diff --git a/doc/source/validation.rst b/doc/source/validation.rst index a7f50004..21ac7903 100644 --- a/doc/source/validation.rst +++ b/doc/source/validation.rst @@ -118,7 +118,7 @@ Validation Module Validation Schemas ================== -Below are the schemas deckhand uses to validate documents. +Below are the schemas Deckhand uses to validate documents. .. automodule:: deckhand.engine.schema.base_schema :members: schema