Document replacement: Update Document unique constraint
This updates the unique constraint for Document model from schema/metadata.name to schema/metadata.name/layer which is a pre-requisite for document replacement implementation. The remainder fo the changes are taken of in child PS (particulary those related to the layering module): https://review.gerrithub.io/#/c/403888/ Change-Id: Icc4f4960b3a3951f649c7886dbe0bce77341a9f7
This commit is contained in:
parent
9cae8f3b2b
commit
1264e5af6c
|
@ -16,8 +16,8 @@ import barbicanclient
|
|||
from oslo_log import log as logging
|
||||
|
||||
from deckhand.barbican import client_wrapper
|
||||
from deckhand.common import utils
|
||||
from deckhand import errors
|
||||
from deckhand import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -12,9 +12,13 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import inspect
|
||||
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from deckhand import utils
|
||||
from deckhand.common import utils
|
||||
|
||||
|
||||
class DocumentDict(dict):
|
||||
|
@ -28,6 +32,22 @@ class DocumentDict(dict):
|
|||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def from_dict(self, documents):
|
||||
"""Convert a list of documents or single document into an instance of
|
||||
this class.
|
||||
|
||||
:param documents: Documents to wrap in this class.
|
||||
:type documents: list or dict
|
||||
"""
|
||||
if isinstance(documents, collections.Iterable):
|
||||
return [DocumentDict(d) for d in documents]
|
||||
return DocumentDict(documents)
|
||||
|
||||
@property
|
||||
def meta(self):
|
||||
return (self.schema, self.layer, self.name)
|
||||
|
||||
@property
|
||||
def is_abstract(self):
|
||||
return utils.jsonpath_parse(
|
||||
|
@ -58,7 +78,7 @@ class DocumentDict(dict):
|
|||
|
||||
@property
|
||||
def name(self):
|
||||
return utils.jsonpath_parse(self, 'metadata.name') or ''
|
||||
return utils.jsonpath_parse(self, 'metadata.name')
|
||||
|
||||
@property
|
||||
def layering_definition(self):
|
||||
|
@ -105,3 +125,20 @@ class DocumentDict(dict):
|
|||
|
||||
def __hash__(self):
|
||||
return hash(json.dumps(self, sort_keys=True))
|
||||
|
||||
|
||||
def wrap_documents(f):
|
||||
"""Decorator to wrap dictionary-formatted documents in instances of
|
||||
``DocumentDict``.
|
||||
"""
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
fargs = inspect.getargspec(f)
|
||||
if 'documents' in fargs[0]:
|
||||
pos = fargs[0].index('documents')
|
||||
new_args = list(args)
|
||||
if new_args[pos] and not isinstance(
|
||||
new_args[pos][0], DocumentDict):
|
||||
new_args[pos] = DocumentDict.from_dict(args[pos])
|
||||
return f(*tuple(new_args), **kwargs)
|
||||
return wrapper
|
|
@ -16,6 +16,7 @@ import falcon
|
|||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.control import base as api_base
|
||||
from deckhand.control import common
|
||||
from deckhand.control.views import document as document_view
|
||||
|
@ -25,7 +26,6 @@ from deckhand.engine import layering
|
|||
from deckhand import errors
|
||||
from deckhand import policy
|
||||
from deckhand import types
|
||||
from deckhand import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -14,13 +14,13 @@
|
|||
|
||||
import falcon
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.control import base as api_base
|
||||
from deckhand.control import common
|
||||
from deckhand.control.views import revision as revision_view
|
||||
from deckhand.db.sqlalchemy import api as db_api
|
||||
from deckhand import errors
|
||||
from deckhand import policy
|
||||
from deckhand import utils
|
||||
|
||||
|
||||
class RevisionsResource(api_base.BaseResource):
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
|
||||
import collections
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.control import common
|
||||
from deckhand import types
|
||||
from deckhand import utils
|
||||
|
||||
|
||||
class ViewBuilder(common.ViewBuilder):
|
||||
|
|
|
@ -28,10 +28,11 @@ from oslo_serialization import jsonutils as json
|
|||
import sqlalchemy.orm as sa_orm
|
||||
from sqlalchemy import text
|
||||
|
||||
from deckhand.common import document as document_wrapper
|
||||
from deckhand.common import utils
|
||||
from deckhand.db.sqlalchemy import models
|
||||
from deckhand import errors
|
||||
from deckhand import types
|
||||
from deckhand import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
@ -133,20 +134,27 @@ def require_unique_document_schema(schema=None):
|
|||
def wrapper(bucket_name, documents, *args, **kwargs):
|
||||
existing_documents = revision_documents_get(
|
||||
schema=schema, deleted=False, include_history=False)
|
||||
existing_document_names = [x['name'] for x in existing_documents]
|
||||
existing_document_names = [
|
||||
x.meta for x in existing_documents
|
||||
]
|
||||
conflicting_names = [
|
||||
x['metadata']['name'] for x in documents
|
||||
if x['metadata']['name'] not in existing_document_names and
|
||||
x['schema'].startswith(schema)]
|
||||
x.meta for x in documents
|
||||
if x.meta not in existing_document_names and
|
||||
x.schema.startswith(schema)
|
||||
]
|
||||
if existing_document_names and conflicting_names:
|
||||
raise errors.SingletonDocumentConflict(
|
||||
document=existing_document_names[0],
|
||||
conflict=conflicting_names)
|
||||
schema=existing_document_names[0][0],
|
||||
layer=existing_document_names[0][1],
|
||||
name=existing_document_names[0][2],
|
||||
conflict=', '.join(["[%s, %s] %s" % (x[0], x[1], x[2])
|
||||
for x in conflicting_names]))
|
||||
return f(bucket_name, documents, *args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
@document_wrapper.wrap_documents
|
||||
@require_unique_document_schema(types.LAYERING_POLICY_SCHEMA)
|
||||
def documents_create(bucket_name, documents, validations=None,
|
||||
session=None):
|
||||
|
@ -162,8 +170,8 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
:param validation_policies: List of validation policies to be created.
|
||||
:param session: Database session object.
|
||||
:returns: List of created documents in dictionary format.
|
||||
:raises DocumentExists: If the (document.schema, document.metadata.name)
|
||||
already exists in another bucket.
|
||||
:raises DocumentExists: If the document already exists in the DB for any
|
||||
bucket.
|
||||
"""
|
||||
session = session or get_session()
|
||||
documents_to_create = _documents_create(bucket_name, documents, session)
|
||||
|
@ -173,12 +181,13 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
# The documents to be deleted are computed by comparing the documents for
|
||||
# the previous revision (if it exists) that belong to `bucket_name` with
|
||||
# `documents`: the difference between the former and the latter.
|
||||
document_history = [(d['schema'], d['name'])
|
||||
for d in revision_documents_get(
|
||||
bucket_name=bucket_name)]
|
||||
document_history = [
|
||||
d for d in revision_documents_get(bucket_name=bucket_name)
|
||||
]
|
||||
documents_to_delete = [
|
||||
h for h in document_history if h not in
|
||||
[(d['schema'], d['metadata']['name']) for d in documents]]
|
||||
h for h in document_history if h.meta not in [
|
||||
d.meta for d in documents]
|
||||
]
|
||||
|
||||
# Only create a revision if any docs have been created, changed or deleted.
|
||||
if any([documents_to_create, documents_to_delete]):
|
||||
|
@ -197,10 +206,11 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
doc = models.Document()
|
||||
with session.begin():
|
||||
# Store bare minimum information about the document.
|
||||
doc['schema'] = d[0]
|
||||
doc['name'] = d[1]
|
||||
doc['schema'] = d.schema
|
||||
doc['name'] = d.name
|
||||
doc['layer'] = d.layer
|
||||
doc['data'] = {}
|
||||
doc['_metadata'] = {}
|
||||
doc['meta'] = d.metadata
|
||||
doc['data_hash'] = _make_hash({})
|
||||
doc['metadata_hash'] = _make_hash({})
|
||||
doc['bucket_id'] = bucket['id']
|
||||
|
@ -211,8 +221,8 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
doc.save(session=session)
|
||||
except db_exception.DBDuplicateEntry:
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=doc['schema'], name=doc['name'],
|
||||
bucket=bucket['name'])
|
||||
schema=doc['schema'], layer=doc['layer'],
|
||||
name=doc['name'], bucket=bucket['name'])
|
||||
doc.safe_delete(session=session)
|
||||
deleted_documents.append(doc)
|
||||
resp.append(doc.to_dict())
|
||||
|
@ -224,13 +234,15 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
with session.begin():
|
||||
doc['bucket_id'] = bucket['id']
|
||||
doc['revision_id'] = revision['id']
|
||||
if not doc.get('orig_revision_id'):
|
||||
doc['orig_revision_id'] = doc['revision_id']
|
||||
|
||||
try:
|
||||
doc.save(session=session)
|
||||
except db_exception.DBDuplicateEntry:
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=doc['schema'], name=doc['name'],
|
||||
bucket=bucket['name'])
|
||||
schema=doc['schema'], layer=doc['layer'],
|
||||
name=doc['name'], bucket=bucket['name'])
|
||||
|
||||
resp.append(doc.to_dict())
|
||||
# NOTE(fmontei): The orig_revision_id is not copied into the
|
||||
|
@ -241,31 +253,31 @@ def documents_create(bucket_name, documents, validations=None,
|
|||
return resp
|
||||
|
||||
|
||||
def _documents_create(bucket_name, values_list, session=None):
|
||||
values_list = copy.deepcopy(values_list)
|
||||
def _documents_create(bucket_name, documents, session=None):
|
||||
documents = copy.deepcopy(documents)
|
||||
session = session or get_session()
|
||||
filters = ('name', 'schema')
|
||||
filters = ('name', 'schema', 'layer')
|
||||
changed_documents = []
|
||||
|
||||
def _document_create(values):
|
||||
document = models.Document()
|
||||
def _document_create(document):
|
||||
model = models.Document()
|
||||
with session.begin():
|
||||
document.update(values)
|
||||
return document
|
||||
model.update(document)
|
||||
return model
|
||||
|
||||
for values in values_list:
|
||||
values.setdefault('data', {})
|
||||
values = _fill_in_metadata_defaults(values)
|
||||
for document in documents:
|
||||
document.setdefault('data', {})
|
||||
document = _fill_in_metadata_defaults(document)
|
||||
|
||||
# Hash the document's metadata and data to later efficiently check
|
||||
# whether those data have changed.
|
||||
values['data_hash'] = _make_hash(values['data'])
|
||||
values['metadata_hash'] = _make_hash(values['_metadata'])
|
||||
document['data_hash'] = _make_hash(document['data'])
|
||||
document['metadata_hash'] = _make_hash(document['meta'])
|
||||
|
||||
try:
|
||||
existing_document = document_get(
|
||||
raw_dict=True, deleted=False, revision_id='latest',
|
||||
**{x: values[x] for x in filters})
|
||||
**{x: document[x] for x in filters})
|
||||
except errors.DocumentNotFound:
|
||||
# Ignore bad data at this point. Allow creation to bubble up the
|
||||
# error related to bad data.
|
||||
|
@ -273,49 +285,51 @@ def _documents_create(bucket_name, values_list, session=None):
|
|||
|
||||
if existing_document:
|
||||
# If the document already exists in another bucket, raise an error.
|
||||
# Ignore redundant validation policies as they are allowed to exist
|
||||
# in multiple buckets.
|
||||
if (existing_document['bucket_name'] != bucket_name and
|
||||
not existing_document['schema'].startswith(
|
||||
types.VALIDATION_POLICY_SCHEMA)):
|
||||
if existing_document['bucket_name'] != bucket_name:
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=existing_document['schema'],
|
||||
name=existing_document['name'],
|
||||
layer=existing_document['layer'],
|
||||
bucket=existing_document['bucket_name'])
|
||||
|
||||
if (existing_document['data_hash'] == values['data_hash'] and
|
||||
existing_document['metadata_hash'] == values['metadata_hash']):
|
||||
# By this point we know existing_document and document have the
|
||||
# same name, schema and layer due to the filters passed to the DB
|
||||
# query. But still want to check whether the document is precisely
|
||||
# the same one by comparing metadata/data hashes.
|
||||
if (existing_document['data_hash'] == document['data_hash'] and
|
||||
existing_document['metadata_hash'] == document[
|
||||
'metadata_hash']):
|
||||
# Since the document has not changed, reference the original
|
||||
# revision in which it was created. This is necessary so that
|
||||
# the correct revision history is maintained.
|
||||
if existing_document['orig_revision_id']:
|
||||
values['orig_revision_id'] = existing_document[
|
||||
document['orig_revision_id'] = existing_document[
|
||||
'orig_revision_id']
|
||||
else:
|
||||
values['orig_revision_id'] = existing_document[
|
||||
document['orig_revision_id'] = existing_document[
|
||||
'revision_id']
|
||||
|
||||
# Create all documents, even unchanged ones, for the current revision. This
|
||||
# makes the generation of the revision diff a lot easier.
|
||||
for values in values_list:
|
||||
doc = _document_create(values)
|
||||
for document in documents:
|
||||
doc = _document_create(document)
|
||||
changed_documents.append(doc)
|
||||
|
||||
return changed_documents
|
||||
|
||||
|
||||
def _fill_in_metadata_defaults(values):
|
||||
values['_metadata'] = values.pop('metadata')
|
||||
values['name'] = values['_metadata']['name']
|
||||
values['meta'] = values.pop('metadata')
|
||||
values['name'] = values['meta']['name']
|
||||
|
||||
if not values['_metadata'].get('storagePolicy', None):
|
||||
values['_metadata']['storagePolicy'] = 'cleartext'
|
||||
if not values['meta'].get('storagePolicy', None):
|
||||
values['meta']['storagePolicy'] = 'cleartext'
|
||||
|
||||
if 'layeringDefinition' not in values['_metadata']:
|
||||
values['_metadata'].setdefault('layeringDefinition', {})
|
||||
values['meta'].setdefault('layeringDefinition', {})
|
||||
values['layer'] = values['meta']['layeringDefinition'].get('layer')
|
||||
|
||||
if 'abstract' not in values['_metadata']['layeringDefinition']:
|
||||
values['_metadata']['layeringDefinition']['abstract'] = False
|
||||
if 'abstract' not in values['meta']['layeringDefinition']:
|
||||
values['meta']['layeringDefinition']['abstract'] = False
|
||||
|
||||
return values
|
||||
|
||||
|
@ -371,7 +385,7 @@ def document_get(session=None, raw_dict=False, revision_id=None, **filters):
|
|||
return d
|
||||
|
||||
filters.update(nested_filters)
|
||||
raise errors.DocumentNotFound(document=filters)
|
||||
raise errors.DocumentNotFound(filters=filters)
|
||||
|
||||
|
||||
def document_get_all(session=None, raw_dict=False, revision_id=None,
|
||||
|
@ -420,7 +434,7 @@ def document_get_all(session=None, raw_dict=False, revision_id=None,
|
|||
if utils.deepfilter(d, **nested_filters):
|
||||
final_documents.append(d)
|
||||
|
||||
return final_documents
|
||||
return document_wrapper.DocumentDict.from_dict(final_documents)
|
||||
|
||||
|
||||
####################
|
||||
|
@ -486,7 +500,7 @@ def revision_get(revision_id=None, session=None):
|
|||
.one()\
|
||||
.to_dict()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
raise errors.RevisionNotFound(revision=revision_id)
|
||||
raise errors.RevisionNotFound(revision_id=revision_id)
|
||||
|
||||
revision['documents'] = _update_revision_history(revision['documents'])
|
||||
|
||||
|
@ -506,7 +520,7 @@ def revision_get_latest(session=None):
|
|||
.order_by(models.Revision.created_at.desc())\
|
||||
.first()
|
||||
if not latest_revision:
|
||||
raise errors.RevisionNotFound(revision='latest')
|
||||
raise errors.RevisionNotFound(revision_id='latest')
|
||||
|
||||
latest_revision = latest_revision.to_dict()
|
||||
|
||||
|
@ -586,23 +600,24 @@ def revision_delete_all():
|
|||
raw_query("DELETE FROM revisions;")
|
||||
|
||||
|
||||
@document_wrapper.wrap_documents
|
||||
def _exclude_deleted_documents(documents):
|
||||
"""Excludes all documents that have been deleted including all documents
|
||||
earlier in the revision history with the same ``metadata.name`` and
|
||||
``schema`` from ``documents``.
|
||||
"""
|
||||
_documents_map = {} # (schema, metadata.name) => should be included?
|
||||
documents_map = {} # (document.meta) => should be included?
|
||||
|
||||
for doc in sorted(documents, key=lambda x: x['created_at']):
|
||||
if doc['deleted'] is True:
|
||||
previous_doc = _documents_map.get((doc['schema'], doc['name']))
|
||||
previous_doc = documents_map.get(doc.meta)
|
||||
if previous_doc:
|
||||
if doc['deleted_at'] >= previous_doc['created_at']:
|
||||
_documents_map[(doc['schema'], doc['name'])] = None
|
||||
documents_map[doc.meta] = None
|
||||
else:
|
||||
_documents_map[(doc['schema'], doc['name'])] = doc
|
||||
documents_map[doc.meta] = doc
|
||||
|
||||
return [d for d in _documents_map.values() if d is not None]
|
||||
return [d for d in documents_map.values() if d is not None]
|
||||
|
||||
|
||||
def _filter_revision_documents(documents, unique_only, **filters):
|
||||
|
@ -616,7 +631,7 @@ def _filter_revision_documents(documents, unique_only, **filters):
|
|||
"""
|
||||
# TODO(fmontei): Implement this as an sqlalchemy query.
|
||||
filtered_documents = {}
|
||||
unique_filters = ('schema', 'name')
|
||||
unique_filters = ('schema', 'name', 'layer')
|
||||
exclude_deleted = filters.pop('deleted', None) is False
|
||||
|
||||
if exclude_deleted:
|
||||
|
@ -680,14 +695,14 @@ def revision_documents_get(revision_id=None, include_history=True,
|
|||
revision_documents.extend(
|
||||
relevant_revision.to_dict()['documents'])
|
||||
except sa_orm.exc.NoResultFound:
|
||||
raise errors.RevisionNotFound(revision=revision_id)
|
||||
raise errors.RevisionNotFound(revision_id=revision_id)
|
||||
|
||||
revision_documents = _update_revision_history(revision_documents)
|
||||
|
||||
filtered_documents = _filter_revision_documents(
|
||||
revision_documents, unique_only, **filters)
|
||||
|
||||
return filtered_documents
|
||||
return document_wrapper.DocumentDict.from_dict(filtered_documents)
|
||||
|
||||
|
||||
# NOTE(fmontei): No need to include `@require_revision_exists` decorator as
|
||||
|
@ -991,11 +1006,11 @@ def revision_rollback(revision_id, latest_revision, session=None):
|
|||
# Create the documents for the revision.
|
||||
for orig_document in orig_revision['documents']:
|
||||
orig_document['revision_id'] = new_revision['id']
|
||||
orig_document['_metadata'] = orig_document.pop('metadata')
|
||||
orig_document['meta'] = orig_document.pop('metadata')
|
||||
|
||||
new_document = models.Document()
|
||||
new_document.update({x: orig_document[x] for x in (
|
||||
'name', '_metadata', 'data', 'data_hash', 'metadata_hash',
|
||||
'name', 'meta', 'layer', 'data', 'data_hash', 'metadata_hash',
|
||||
'schema', 'bucket_id')})
|
||||
new_document['revision_id'] = new_revision['id']
|
||||
|
||||
|
@ -1092,7 +1107,7 @@ def validation_get_all(revision_id, session=None):
|
|||
expected_validations = set()
|
||||
for vp in validation_policies:
|
||||
expected_validations = expected_validations.union(
|
||||
list(v['name'] for v in vp['data'].get('validations', [])))
|
||||
list(v['name'] for v in vp.data.get('validations', [])))
|
||||
|
||||
missing_validations = expected_validations - actual_validations
|
||||
extra_validations = actual_validations - expected_validations
|
||||
|
@ -1135,7 +1150,7 @@ def validation_get_all_entries(revision_id, val_name, session=None):
|
|||
expected_validations = set()
|
||||
for vp in validation_policies:
|
||||
expected_validations |= set(
|
||||
v['name'] for v in vp['data'].get('validations', []))
|
||||
v['name'] for v in vp.data.get('validations', []))
|
||||
|
||||
missing_validations = expected_validations - actual_validations
|
||||
extra_validations = actual_validations - expected_validations
|
||||
|
@ -1169,9 +1184,9 @@ def validation_get_all_entries(revision_id, val_name, session=None):
|
|||
'message': (
|
||||
'The result for this validation was externally '
|
||||
'registered but has been ignored because it is not '
|
||||
'found in the validations for ValidationPolicy [%s]'
|
||||
' %s: %s.' % (
|
||||
vp['schema'], vp['metadata']['name'],
|
||||
'found in the validations for ValidationPolicy '
|
||||
'[%s, %s] %s: %s.' % (
|
||||
vp.schema, vp.layer, vp.name,
|
||||
', '.join(v['name'] for v in vp['data'].get(
|
||||
'validations', []))
|
||||
)
|
||||
|
|
|
@ -132,7 +132,7 @@ def __build_tables(blob_type_obj, blob_type_list):
|
|||
return d
|
||||
|
||||
class Document(BASE, DeckhandBase):
|
||||
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
||||
UNIQUE_CONSTRAINTS = ('schema', 'layer', 'name', 'revision_id')
|
||||
|
||||
__tablename__ = 'documents'
|
||||
|
||||
|
@ -144,9 +144,10 @@ def __build_tables(blob_type_obj, blob_type_list):
|
|||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
schema = Column(String(64), nullable=False)
|
||||
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
|
||||
# must be used to store document metadata information in the DB.
|
||||
_metadata = Column(blob_type_obj, nullable=False)
|
||||
layer = Column(String(64), nullable=True)
|
||||
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``meta`` must
|
||||
# be used to store document metadata information in the DB.
|
||||
meta = Column(blob_type_obj, nullable=False)
|
||||
data = Column(blob_type_obj, nullable=True)
|
||||
data_hash = Column(String, nullable=False)
|
||||
metadata_hash = Column(String, nullable=False)
|
||||
|
@ -178,13 +179,13 @@ def __build_tables(blob_type_obj, blob_type_list):
|
|||
def to_dict(self, raw_dict=False):
|
||||
"""Convert the object into dictionary format.
|
||||
|
||||
:param raw_dict: Renames the key "_metadata" to "metadata".
|
||||
:param raw_dict: Renames the key "meta" to "metadata".
|
||||
"""
|
||||
d = super(Document, self).to_dict()
|
||||
d['bucket_name'] = self.bucket_name
|
||||
|
||||
if not raw_dict:
|
||||
d['metadata'] = d.pop('_metadata')
|
||||
d['metadata'] = d.pop('meta')
|
||||
|
||||
if 'bucket' in d:
|
||||
d.pop('bucket')
|
||||
|
|
|
@ -23,11 +23,11 @@ import jsonschema
|
|||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from deckhand.engine import document_wrapper
|
||||
from deckhand.common import document as document_wrapper
|
||||
from deckhand.common import utils
|
||||
from deckhand.engine.secrets_manager import SecretsSubstitution
|
||||
from deckhand import errors
|
||||
from deckhand import types
|
||||
from deckhand import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -145,13 +145,12 @@ class GenericValidator(BaseValidator):
|
|||
else:
|
||||
if error_messages:
|
||||
LOG.error(
|
||||
'Failed sanity-check validation for document [%s] %s. '
|
||||
'Details: %s', document.get('schema', 'N/A'),
|
||||
document.metadata.get('name'), error_messages)
|
||||
'Failed sanity-check validation for document [%s, %s] %s. '
|
||||
'Details: %s', document.schema, document.layer,
|
||||
document.name, error_messages)
|
||||
raise errors.InvalidDocumentFormat(
|
||||
document_schema=document.schema,
|
||||
document_name=document.name,
|
||||
errors=', '.join(error_messages))
|
||||
schema=document.schema, name=document.name,
|
||||
layer=document.layer, errors=', '.join(error_messages))
|
||||
|
||||
|
||||
class DataSchemaValidator(GenericValidator):
|
||||
|
@ -244,6 +243,7 @@ class DataSchemaValidator(GenericValidator):
|
|||
'schema_path': path_to_error_in_schema,
|
||||
'name': document.name,
|
||||
'schema': document.schema,
|
||||
'layer': document.layer,
|
||||
'path': path_to_error_in_document,
|
||||
'error_section': parent_error_section,
|
||||
'message': error.message
|
||||
|
@ -373,7 +373,7 @@ class DocumentValidation(object):
|
|||
self._documents = []
|
||||
self._external_data_schemas = [document_wrapper.DocumentDict(d)
|
||||
for d in existing_data_schemas or []]
|
||||
data_schema_map = {d.name: d for d in self._external_data_schemas}
|
||||
data_schema_map = {d.meta: d for d in self._external_data_schemas}
|
||||
|
||||
raw_properties = ('data', 'metadata', 'schema')
|
||||
|
||||
|
@ -391,9 +391,9 @@ class DocumentValidation(object):
|
|||
self._external_data_schemas.append(document)
|
||||
# If a newer version of the same DataSchema was passed in,
|
||||
# only use the new one and discard the old one.
|
||||
if document.name in data_schema_map:
|
||||
if document.meta in data_schema_map:
|
||||
self._external_data_schemas.remove(
|
||||
data_schema_map.pop(document.name))
|
||||
data_schema_map.pop(document.meta))
|
||||
|
||||
self._documents.append(document)
|
||||
|
||||
|
|
|
@ -21,13 +21,13 @@ from networkx.algorithms.dag import topological_sort
|
|||
from oslo_log import log as logging
|
||||
from oslo_log import versionutils
|
||||
|
||||
from deckhand.common import document as document_wrapper
|
||||
from deckhand.common import utils
|
||||
from deckhand.engine import document_validation
|
||||
from deckhand.engine import document_wrapper
|
||||
from deckhand.engine import secrets_manager
|
||||
from deckhand.engine import utils as engine_utils
|
||||
from deckhand import errors
|
||||
from deckhand import types
|
||||
from deckhand import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -191,7 +191,9 @@ class DocumentLayering(object):
|
|||
'. Each document must have exactly 1 parent.',
|
||||
all_children[doc], doc.name, doc.schema, doc.layer,
|
||||
doc.parent_selector)
|
||||
raise errors.IndeterminateDocumentParent(document=doc)
|
||||
raise errors.IndeterminateDocumentParent(
|
||||
name=doc.name, schema=doc.schema, layer=doc.layer,
|
||||
found=all_children[doc])
|
||||
|
||||
def _get_layering_order(self, layering_policy):
|
||||
# Pre-processing stage that removes empty layers from the
|
||||
|
@ -260,17 +262,18 @@ class DocumentLayering(object):
|
|||
val_errors = []
|
||||
for result in results:
|
||||
val_errors.extend(
|
||||
[(e['schema'], e['name'], e['message'])
|
||||
[(e['schema'], e['layer'], e['name'], e['message'])
|
||||
for e in result['errors']])
|
||||
if val_errors:
|
||||
for error in val_errors:
|
||||
LOG.error(
|
||||
'Document [%s] %s failed with pre-validation error: %s.',
|
||||
*error)
|
||||
'Document [%s, %s] %s failed with pre-validation error: '
|
||||
'%s.', *error)
|
||||
raise errors.InvalidDocumentFormat(
|
||||
document_schema=', '.join(v[0] for v in val_errors),
|
||||
document_name=', '.join(v[1] for v in val_errors),
|
||||
errors=', '.join(v[2] for v in val_errors))
|
||||
schema=', '.join(v[0] for v in val_errors),
|
||||
layer=', '.join(v[1] for v in val_errors),
|
||||
name=', '.join(v[2] for v in val_errors),
|
||||
errors=', '.join(v[3] for v in val_errors))
|
||||
|
||||
def __init__(self, documents, substitution_sources=None, validate=True,
|
||||
fail_on_missing_sub_src=True):
|
||||
|
@ -433,8 +436,10 @@ class DocumentLayering(object):
|
|||
if from_child is None:
|
||||
raise errors.MissingDocumentKey(
|
||||
child_schema=child_data.schema,
|
||||
child_layer=child_data.layer,
|
||||
child_name=child_data.name,
|
||||
parent_schema=overall_data.schema,
|
||||
parent_layer=overall_data.layer,
|
||||
parent_name=overall_data.name,
|
||||
action=action)
|
||||
|
||||
|
@ -454,8 +459,10 @@ class DocumentLayering(object):
|
|||
if from_child is None:
|
||||
raise errors.MissingDocumentKey(
|
||||
child_schema=child_data.schema,
|
||||
child_layer=child_data.layer,
|
||||
child_name=child_data.name,
|
||||
parent_schema=overall_data.schema,
|
||||
parent_layer=overall_data.layer,
|
||||
parent_name=overall_data.name,
|
||||
action=action)
|
||||
|
||||
|
|
|
@ -21,9 +21,9 @@ from oslo_utils import uuidutils
|
|||
import six
|
||||
|
||||
from deckhand.barbican import driver
|
||||
from deckhand.engine import document_wrapper
|
||||
from deckhand.common import document as document_wrapper
|
||||
from deckhand.common import utils
|
||||
from deckhand import errors
|
||||
from deckhand import utils
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -69,6 +69,10 @@ class SecretsManager(object):
|
|||
:returns: Dictionary representation of
|
||||
``deckhand.db.sqlalchemy.models.DocumentSecret``.
|
||||
"""
|
||||
# TODO(fmontei): Look into POSTing Deckhand metadata into Barbican's
|
||||
# Secrets Metadata API to make it easier to track stale secrets from
|
||||
# prior revisions that need to be deleted.
|
||||
|
||||
encryption_type = secret_doc['metadata']['storagePolicy']
|
||||
secret_type = cls._get_secret_type(secret_doc['schema'])
|
||||
|
||||
|
@ -216,6 +220,10 @@ class SecretsSubstitution(object):
|
|||
substitution source. Default is True.
|
||||
"""
|
||||
|
||||
# This maps a 2-tuple of (schema, name) to a document from which the
|
||||
# document.meta can be extracted which is a 3-tuple of (schema, layer,
|
||||
# name). This is necessary since the substitution format in the
|
||||
# document itself only provides a 2-tuple of (schema, name).
|
||||
self._substitution_sources = {}
|
||||
self._fail_on_missing_sub_src = fail_on_missing_sub_src
|
||||
|
||||
|
@ -257,12 +265,12 @@ class SecretsSubstitution(object):
|
|||
documents_to_substitute.append(document)
|
||||
|
||||
LOG.debug('Performing substitution on following documents: %s',
|
||||
', '.join(['[%s] %s' % (d.schema, d.name)
|
||||
', '.join(['[%s, %s] %s' % d.meta
|
||||
for d in documents_to_substitute]))
|
||||
|
||||
for document in documents_to_substitute:
|
||||
LOG.debug('Checking for substitutions for document [%s] %s.',
|
||||
document.schema, document.name)
|
||||
LOG.debug('Checking for substitutions for document [%s, %s] %s.',
|
||||
*document.meta)
|
||||
for sub in document.substitutions:
|
||||
src_schema = sub['src']['schema']
|
||||
src_name = sub['src']['name']
|
||||
|
@ -273,8 +281,8 @@ class SecretsSubstitution(object):
|
|||
(src_schema, src_name)]
|
||||
else:
|
||||
message = ('Could not find substitution source document '
|
||||
'[%s] %s among the provided '
|
||||
'`substitution_sources`.', src_schema, src_name)
|
||||
'[%s] %s among the provided substitution '
|
||||
'sources.', src_schema, src_name)
|
||||
if self._fail_on_missing_sub_src:
|
||||
LOG.error(message)
|
||||
raise errors.SubstitutionSourceNotFound(
|
||||
|
@ -302,14 +310,15 @@ class SecretsSubstitution(object):
|
|||
except errors.BarbicanException as e:
|
||||
LOG.error(
|
||||
'Failed to resolve a Barbican reference for '
|
||||
'substitution source document [%s] %s referenced '
|
||||
'in document [%s] %s. Details: %s', src_schema,
|
||||
src_name, document.schema, document.name,
|
||||
'substitution source document [%s, %s] %s '
|
||||
'referenced in document [%s, %s] %s. Details: %s',
|
||||
src_schema, src_doc.layer, src_name,
|
||||
document.schema, document.layer, document.name,
|
||||
e.format_message())
|
||||
raise errors.UnknownSubstitutionError(
|
||||
src_schema=src_schema, src_name=src_name,
|
||||
document_name=document.name,
|
||||
document_schema=document.schema,
|
||||
src_schema=src_schema, src_layer=src_doc.layer,
|
||||
src_name=src_name, schema=document.schema,
|
||||
layer=document.layer, name=document.name,
|
||||
details=e.format_message())
|
||||
|
||||
dest_path = sub['dest']['path']
|
||||
|
@ -330,23 +339,24 @@ class SecretsSubstitution(object):
|
|||
else:
|
||||
exc_message = (
|
||||
'Failed to create JSON path "%s" in the '
|
||||
'destination document [%s] %s. No data was '
|
||||
'destination document [%s, %s] %s. No data was '
|
||||
'substituted.', dest_path, document.schema,
|
||||
document.name)
|
||||
LOG.error(exc_message)
|
||||
document.layer, document.name)
|
||||
except Exception as e:
|
||||
LOG.error('Unexpected exception occurred while attempting '
|
||||
'substitution using source document [%s] %s '
|
||||
'referenced in [%s] %s. Details: %s', src_schema,
|
||||
src_name, document.schema, document.name,
|
||||
'substitution using source document [%s, %s] %s '
|
||||
'referenced in [%s, %s] %s. Details: %s',
|
||||
src_schema, src_name, src_doc.layer,
|
||||
document.schema, document.layer, document.name,
|
||||
six.text_type(e))
|
||||
exc_message = six.text_type(e)
|
||||
finally:
|
||||
if exc_message:
|
||||
LOG.error(exc_message)
|
||||
raise errors.UnknownSubstitutionError(
|
||||
src_schema=src_schema, src_name=src_name,
|
||||
document_name=document.name,
|
||||
document_schema=document.schema,
|
||||
src_schema=src_schema, src_layer=src_doc.layer,
|
||||
src_name=src_name, schema=document.schema,
|
||||
layer=document.layer, name=document.name,
|
||||
details=exc_message)
|
||||
|
||||
yield document
|
||||
|
|
|
@ -175,8 +175,8 @@ class InvalidDocumentFormat(DeckhandException):
|
|||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = ("The provided document [%(document_schema)s] %(document_name)s "
|
||||
"failed schema validation. Errors: %(errors)s")
|
||||
msg_fmt = ("The provided document(s) schema=%(schema)s, layer=%(layer)s, "
|
||||
"name=%(name)s failed schema validation. Errors: %(errors)s")
|
||||
code = 400
|
||||
|
||||
|
||||
|
@ -213,7 +213,8 @@ class IndeterminateDocumentParent(DeckhandException):
|
|||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = "Too many parent documents found for document %(document)s."
|
||||
msg_fmt = ("Too many parent documents found for document [%(schema)s, "
|
||||
"%(layer)s] %(name)s. Found: %(found)s. Expected: 1.")
|
||||
code = 400
|
||||
|
||||
|
||||
|
@ -243,8 +244,9 @@ class MissingDocumentKey(DeckhandException):
|
|||
rendered data for a document can also complicate debugging this issue.
|
||||
"""
|
||||
msg_fmt = ("Missing action path in %(action)s needed for layering from "
|
||||
"either the data section of the parent [%(parent_schema)s] "
|
||||
"%(parent_name)s or child [%(child_schema)s] %(child_name)s "
|
||||
"either the data section of the parent [%(parent_schema)s, "
|
||||
"%(parent_layer)s] %(parent_name)s or child [%(child_schema)s, "
|
||||
"%(child_layer)s] %(child_name)s "
|
||||
"document.")
|
||||
code = 400
|
||||
|
||||
|
@ -283,7 +285,8 @@ class DocumentNotFound(DeckhandException):
|
|||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = ("The requested document %(document)s was not found.")
|
||||
msg_fmt = ("The requested document using filters: %(filters)s was not "
|
||||
"found.")
|
||||
code = 404
|
||||
|
||||
|
||||
|
@ -292,7 +295,7 @@ class RevisionNotFound(DeckhandException):
|
|||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = "The requested revision %(revision)s was not found."
|
||||
msg_fmt = "The requested revision=%(revision_id)s was not found."
|
||||
code = 404
|
||||
|
||||
|
||||
|
@ -323,8 +326,8 @@ class DuplicateDocumentExists(DeckhandException):
|
|||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = ("Document with schema %(schema)s and metadata.name "
|
||||
"%(name)s already exists in bucket: %(bucket)s.")
|
||||
msg_fmt = ("Document [%(schema)s, %(layer)s] %(name)s already exists in "
|
||||
"bucket: %(bucket)s.")
|
||||
code = 409
|
||||
|
||||
|
||||
|
@ -334,9 +337,9 @@ class SingletonDocumentConflict(DeckhandException):
|
|||
**Troubleshoot:**
|
||||
"""
|
||||
|
||||
msg_fmt = ("A singleton document by the name %(document)s already "
|
||||
"exists in the system. The new document %(conflict)s cannot be "
|
||||
"created. To create a document with a new name, delete the "
|
||||
msg_fmt = ("A singleton document [%(schema)s, %(layer)s] %(name)s already "
|
||||
"exists in the system. The new document(s) %(conflict)s cannot "
|
||||
"be created. To create a document with a new name, delete the "
|
||||
"current one first.")
|
||||
code = 409
|
||||
|
||||
|
@ -395,7 +398,7 @@ class UnknownSubstitutionError(DeckhandException):
|
|||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = ('An unknown exception occurred while trying to perform '
|
||||
'substitution using source document [%(src_schema)s] '
|
||||
'%(src_name)s contained in document [%(document_schema)s] '
|
||||
'%(document_name)s. Details: %(details)s')
|
||||
'substitution using source document [%(src_schema)s, '
|
||||
'%(src_layer)s] %(src_name)s contained in document ['
|
||||
'%(schema)s, %(layer)s] %(name)s. Details: %(details)s')
|
||||
code = 500
|
||||
|
|
|
@ -93,7 +93,7 @@ class DocumentFactory(DeckhandFactory):
|
|||
"schema": "metadata/Control/v1",
|
||||
"layeringDefinition": {
|
||||
"abstract": False,
|
||||
"layer": ""
|
||||
"layer": "layer"
|
||||
}
|
||||
},
|
||||
"schema": "deckhand/LayeringPolicy/v1"
|
||||
|
@ -105,7 +105,7 @@ class DocumentFactory(DeckhandFactory):
|
|||
"labels": {"": ""},
|
||||
"layeringDefinition": {
|
||||
"abstract": False,
|
||||
"layer": ""
|
||||
"layer": "layer"
|
||||
},
|
||||
"name": "",
|
||||
"schema": "metadata/Document/v1"
|
||||
|
|
|
@ -168,6 +168,7 @@ tests:
|
|||
schema_path: ".properties.b.maximum"
|
||||
name: bad
|
||||
schema: example/Doc/v1
|
||||
layer: site
|
||||
path: ".data.b"
|
||||
message: 177 is greater than the maximum of 100
|
||||
name: deckhand-schema-validation
|
||||
|
@ -219,6 +220,7 @@ tests:
|
|||
$.[0].message:
|
||||
- errors:
|
||||
- name: bad
|
||||
layer: site
|
||||
schema: example/Doc/v1
|
||||
path: .data.b
|
||||
schema_path: .properties.b.maximum
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.tests.unit import base as test_base
|
||||
from deckhand import utils
|
||||
|
||||
|
||||
class TestUtils(test_base.DeckhandTestCase):
|
|
@ -17,6 +17,7 @@ import os
|
|||
|
||||
import mock
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.control import api
|
||||
from deckhand.control import buckets
|
||||
from deckhand.control import health
|
||||
|
@ -28,7 +29,6 @@ from deckhand.control import rollback
|
|||
from deckhand.control import validations
|
||||
from deckhand.control import versions
|
||||
from deckhand.tests.unit import base as test_base
|
||||
from deckhand import utils
|
||||
|
||||
|
||||
class TestApi(test_base.DeckhandTestCase):
|
||||
|
|
|
@ -184,8 +184,11 @@ class TestBucketsControllerNegative(test_base.BaseControllerTest):
|
|||
|
||||
# Validate that a layering policy with a different, conflicting name
|
||||
# raises the expected exception.
|
||||
error_re = ('.*A singleton document by the name %s already exists in '
|
||||
'the system.' % payload['metadata']['name'])
|
||||
error_re = (r'.*A singleton document \[%s, %s\] %s already exists in '
|
||||
'the system.' % (payload['schema'],
|
||||
payload['metadata']['layeringDefinition'][
|
||||
'layer'],
|
||||
payload['metadata']['name']))
|
||||
payload['metadata']['name'] = test_utils.rand_name('layering-policy')
|
||||
resp = self.app.simulate_put(
|
||||
'/api/v1.0/buckets/mop/documents',
|
||||
|
|
|
@ -599,6 +599,7 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
|||
'schema': doc_to_test['schema']
|
||||
},
|
||||
'name': 'test_doc',
|
||||
'layer': 'global',
|
||||
'path': '.data',
|
||||
'schema': 'example/foo/v1',
|
||||
'message': "'b' is a required property",
|
||||
|
@ -607,6 +608,7 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
|||
}, {
|
||||
'error_section': {'a': 'fail'},
|
||||
'name': 'test_doc',
|
||||
'layer': 'global',
|
||||
'path': '.data.a',
|
||||
'schema': 'example/foo/v1',
|
||||
'message': "'fail' is not of type 'integer'",
|
||||
|
@ -644,15 +646,18 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
|||
},
|
||||
'required': ['a']
|
||||
}
|
||||
|
||||
expected_errors = [{
|
||||
'error_section': {'a': 'fail'},
|
||||
'name': 'test_doc',
|
||||
'name': 'fail_doc',
|
||||
'layer': 'global',
|
||||
'path': '.data.a',
|
||||
'schema': 'example/foo/v1',
|
||||
'message': "'fail' is not of type 'integer'",
|
||||
'validation_schema': schema_to_use,
|
||||
'schema_path': '.properties.a.type'
|
||||
}]
|
||||
|
||||
data_schema = data_schema_factory.gen_test(
|
||||
metadata_name, data=schema_to_use)
|
||||
|
||||
|
@ -707,15 +712,6 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
|||
headers={'Content-Type': 'application/x-yaml'})
|
||||
self.assertEqual(200, resp.status_code)
|
||||
body = yaml.safe_load(resp.text)
|
||||
expected_errors = [{
|
||||
'error_section': {'a': 'fail'},
|
||||
'name': 'fail_doc',
|
||||
'path': '.data.a',
|
||||
'schema': 'example/foo/v1',
|
||||
'message': "'fail' is not of type 'integer'",
|
||||
'validation_schema': schema_to_use,
|
||||
'schema_path': '.properties.a.type'
|
||||
}]
|
||||
|
||||
self.assertIn('errors', body)
|
||||
self.assertEqual(expected_errors, body['errors'])
|
||||
|
@ -842,6 +838,7 @@ metadata:
|
|||
name: site-deploy-ready
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: deckhand-schema-validation
|
||||
|
@ -881,6 +878,7 @@ metadata:
|
|||
name: site-deploy-ready
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: deckhand-schema-validation
|
||||
|
@ -931,6 +929,7 @@ metadata:
|
|||
name: vp-1
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: deckhand-schema-validation
|
||||
|
@ -941,6 +940,7 @@ metadata:
|
|||
name: vp-2
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: promenade-schema-validation
|
||||
|
@ -990,6 +990,7 @@ metadata:
|
|||
name: site-deploy-ready
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: deckhand-schema-validation
|
||||
|
@ -1070,6 +1071,7 @@ metadata:
|
|||
name: site-deploy-ready
|
||||
layeringDefinition:
|
||||
abstract: true
|
||||
layer: site
|
||||
data:
|
||||
validations:
|
||||
- name: deckhand-schema-validation
|
||||
|
@ -1125,8 +1127,11 @@ data:
|
|||
|
||||
expected_msg = ('The result for this validation was externally '
|
||||
'registered but has been ignored because it is not '
|
||||
'found in the validations for ValidationPolicy [%s] '
|
||||
'%s: %s.' % (validation_policy['schema'],
|
||||
'found in the validations for ValidationPolicy '
|
||||
'[%s, %s] %s: %s.' % (
|
||||
validation_policy['schema'],
|
||||
validation_policy['metadata'][
|
||||
'layeringDefinition']['layer'],
|
||||
validation_policy['metadata']['name'],
|
||||
types.DECKHAND_SCHEMA_VALIDATION))
|
||||
expected_errors = yaml.safe_load(VALIDATION_FAILURE_RESULT)['errors']
|
||||
|
|
|
@ -20,8 +20,8 @@ from deckhand.tests.unit import base
|
|||
|
||||
BASE_EXPECTED_FIELDS = ("created_at", "updated_at", "deleted_at", "deleted")
|
||||
DOCUMENT_EXPECTED_FIELDS = BASE_EXPECTED_FIELDS + (
|
||||
"id", "schema", "name", "metadata", "data", "data_hash", "metadata_hash",
|
||||
"revision_id", "bucket_id")
|
||||
"id", "schema", "name", "layer", "metadata", "data", "data_hash",
|
||||
"metadata_hash", "revision_id", "bucket_id")
|
||||
REVISION_EXPECTED_FIELDS = ("id", "documents", "tags")
|
||||
|
||||
|
||||
|
|
|
@ -42,7 +42,6 @@ class TestDocuments(base.TestDbBase):
|
|||
|
||||
for idx in range(len(documents)):
|
||||
retrieved_document = self.show_document(id=documents[idx]['id'])
|
||||
self.assertIsNone(retrieved_document.pop('orig_revision_id'))
|
||||
self.assertEqual(documents[idx], retrieved_document)
|
||||
|
||||
def test_create_and_get_multiple_document(self):
|
||||
|
@ -61,7 +60,6 @@ class TestDocuments(base.TestDbBase):
|
|||
documents = self.create_documents(bucket_name, payload)
|
||||
|
||||
revision = self.show_revision(documents[0]['revision_id'])
|
||||
self.assertIsNone(revision['documents'][0].pop('orig_revision_id'))
|
||||
self.assertEqual(3, len(revision['documents']))
|
||||
self.assertEqual(documents[0], revision['documents'][0])
|
||||
|
||||
|
@ -97,7 +95,6 @@ class TestDocuments(base.TestDbBase):
|
|||
document['revision_id'], **filters)
|
||||
|
||||
self.assertEqual(1, len(documents))
|
||||
self.assertIsNone(documents[0].pop('orig_revision_id'))
|
||||
self.assertEqual(document, documents[0])
|
||||
|
||||
def test_create_multiple_documents_and_get_revision(self):
|
||||
|
@ -115,7 +112,6 @@ class TestDocuments(base.TestDbBase):
|
|||
|
||||
# Validate that the revision is valid.
|
||||
for document in created_documents:
|
||||
document['orig_revision_id'] = None
|
||||
revision = self.show_revision(document['revision_id'])
|
||||
self.assertEqual(3, len(revision['documents']))
|
||||
self.assertIn(document, revision['documents'])
|
||||
|
@ -140,7 +136,6 @@ class TestDocuments(base.TestDbBase):
|
|||
document['revision_id'], **filters)
|
||||
|
||||
self.assertEqual(1, len(filtered_documents))
|
||||
self.assertIsNone(filtered_documents[0].pop('orig_revision_id'))
|
||||
self.assertEqual(document, filtered_documents[0])
|
||||
|
||||
def test_create_certificate(self):
|
||||
|
@ -205,7 +200,6 @@ class TestDocuments(base.TestDbBase):
|
|||
self.assertTrue(documents[0]['deleted_at'])
|
||||
self.assertEqual(documents[0]['schema'], payload['schema'])
|
||||
self.assertEqual(documents[0]['name'], payload['metadata']['name'])
|
||||
self.assertEmpty(documents[0]['metadata'])
|
||||
self.assertEmpty(documents[0]['data'])
|
||||
|
||||
def test_delete_all_documents(self):
|
||||
|
@ -228,7 +222,6 @@ class TestDocuments(base.TestDbBase):
|
|||
for deleted_document in deleted_documents:
|
||||
self.assertTrue(deleted_document['deleted'])
|
||||
self.assertTrue(deleted_document['deleted_at'])
|
||||
self.assertEmpty(deleted_document['metadata'])
|
||||
self.assertEmpty(deleted_document['data'])
|
||||
|
||||
def test_delete_and_create_document_in_same_payload(self):
|
||||
|
@ -247,7 +240,6 @@ class TestDocuments(base.TestDbBase):
|
|||
# Check that deleted doc is formatted correctly.
|
||||
self.assertTrue(documents[0]['deleted'])
|
||||
self.assertTrue(documents[0]['deleted_at'])
|
||||
self.assertEmpty(documents[0]['metadata'])
|
||||
self.assertEmpty(documents[0]['data'])
|
||||
# Check that created doc isn't deleted.
|
||||
self.assertFalse(documents[1]['deleted'])
|
||||
|
@ -283,5 +275,27 @@ class TestDocuments(base.TestDbBase):
|
|||
self.assertEqual(documents[idx]['schema'], payload[idx]['schema'])
|
||||
self.assertEqual(documents[idx]['name'],
|
||||
payload[idx]['metadata']['name'])
|
||||
self.assertEmpty(documents[idx]['metadata'])
|
||||
self.assertEmpty(documents[idx]['data'])
|
||||
|
||||
def test_create_duplicate_bucket(self):
|
||||
"""Validates that creating a bucket with the exact same documents
|
||||
references the last revision via orig_revision_id, as in reality
|
||||
the bucket is merely a revision history placeholder for the first
|
||||
bucket, since nothing was changed.
|
||||
|
||||
Note that this is different from creating a duplicate document or
|
||||
creating a duplicate document in a separate bucket.
|
||||
|
||||
"""
|
||||
bucket_name = test_utils.rand_name('bucket')
|
||||
payload = base.DocumentFixture.get_minimal_fixture()
|
||||
|
||||
orig_documents = self.create_documents(bucket_name, [payload])
|
||||
duplicate_documents = self.create_documents(bucket_name, [payload])
|
||||
|
||||
self.assertEqual(orig_documents[0]['revision_id'],
|
||||
duplicate_documents[0]['orig_revision_id'])
|
||||
self.assertDictItemsAlmostEqual(
|
||||
sorted(orig_documents, key=lambda d: d['created_at']),
|
||||
sorted(duplicate_documents, key=lambda d: d['created_at']),
|
||||
ignore=['created_at', 'updated_at', 'revision_id', 'id'])
|
||||
|
|
|
@ -61,10 +61,7 @@ class TestDocumentsNegative(base.TestDbBase):
|
|||
|
||||
# Verify that the document cannot be created in another bucket.
|
||||
alt_bucket_name = test_utils.rand_name('bucket')
|
||||
error_re = ("Document with schema %s and metadata.name "
|
||||
"%s already exists in bucket: %s." % (
|
||||
payload['schema'], payload['metadata']['name'],
|
||||
bucket_name))
|
||||
error_re = r"^Document .* already exists in bucket: %s.$" % bucket_name
|
||||
self.assertRaisesRegex(
|
||||
errors.DuplicateDocumentExists, error_re, self.create_documents,
|
||||
alt_bucket_name, payload)
|
||||
|
|
|
@ -126,14 +126,16 @@ class TestRevisions(base.TestDbBase):
|
|||
|
||||
# Validate that all revisions were deleted.
|
||||
for revision_id in all_revision_ids:
|
||||
error_re = 'The requested revision %s was not found.' % revision_id
|
||||
error_re = (r'^The requested revision=%s was not found.$'
|
||||
% revision_id)
|
||||
self.assertRaisesRegex(errors.RevisionNotFound, error_re,
|
||||
self.show_revision, revision_id)
|
||||
|
||||
# Validate that the documents (children) were deleted.
|
||||
for doc in created_documents:
|
||||
filters = {'id': doc['id']}
|
||||
error_re = 'The requested document %s was not found.' % filters
|
||||
error_re = (r'^The requested document using filters: %s was not '
|
||||
'found.$' % filters)
|
||||
self.assertRaisesRegex(errors.DocumentNotFound, error_re,
|
||||
self.show_document, **filters)
|
||||
|
||||
|
|
|
@ -266,9 +266,12 @@ class TestDocumentLayeringValidationNegative(
|
|||
|
||||
layering_policy = copy.deepcopy(lp_template)
|
||||
del layering_policy['data']['layerOrder']
|
||||
error_re = ("The provided document \[%s\] %s failed schema validation."
|
||||
" Errors: 'layerOrder' is a required property" % (
|
||||
error_re = ("The provided document\(s\) schema=%s, layer=%s, name=%s "
|
||||
"failed schema validation. Errors: 'layerOrder' is a "
|
||||
"required property" % (
|
||||
layering_policy['schema'],
|
||||
layering_policy['metadata']['layeringDefinition'][
|
||||
'layer'],
|
||||
layering_policy['metadata']['name']))
|
||||
self.assertRaisesRegexp(
|
||||
errors.InvalidDocumentFormat, error_re, self._test_layering,
|
||||
|
|
|
@ -14,10 +14,10 @@
|
|||
|
||||
import mock
|
||||
|
||||
from deckhand.common import utils
|
||||
from deckhand.engine import document_validation
|
||||
from deckhand import factories
|
||||
from deckhand.tests.unit.engine import base as engine_test_base
|
||||
from deckhand import utils
|
||||
|
||||
|
||||
class TestDocumentValidation(engine_test_base.TestDocumentValidationBase):
|
||||
|
|
|
@ -17,9 +17,10 @@ import yaml
|
|||
|
||||
import mock
|
||||
from oslo_utils import uuidutils
|
||||
import testtools
|
||||
|
||||
from deckhand.db.sqlalchemy import api as db_api
|
||||
from deckhand.engine import secrets_manager
|
||||
from deckhand import errors
|
||||
from deckhand import factories
|
||||
from deckhand.tests import test_utils
|
||||
from deckhand.tests.unit.db import base as test_base
|
||||
|
@ -129,20 +130,17 @@ class TestSecretsSubstitution(test_base.TestDbBase):
|
|||
self.document_factory = factories.DocumentFactory(1, [1])
|
||||
self.secrets_factory = factories.DocumentSecretFactory()
|
||||
|
||||
def _test_doc_substitution(self, document_mapping, secret_documents,
|
||||
def _test_doc_substitution(self, document_mapping, substitution_sources,
|
||||
expected_data):
|
||||
payload = self.document_factory.gen_test(document_mapping,
|
||||
global_abstract=False)
|
||||
bucket_name = test_utils.rand_name('bucket')
|
||||
documents = self.create_documents(
|
||||
bucket_name, secret_documents + [payload[-1]])
|
||||
bucket_name, substitution_sources + [payload[-1]])
|
||||
|
||||
expected_document = copy.deepcopy(documents[-1])
|
||||
expected_document['data'] = expected_data
|
||||
|
||||
substitution_sources = db_api.document_get_all(
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
|
||||
secret_substitution = secrets_manager.SecretsSubstitution(
|
||||
substitution_sources)
|
||||
substituted_docs = list(secret_substitution.substitute_all(documents))
|
||||
|
@ -704,3 +702,56 @@ data:
|
|||
secret_substitution = secrets_manager.SecretsSubstitution(documents)
|
||||
substituted_docs = list(secret_substitution.substitute_all(documents))
|
||||
self.assertEqual(expected, substituted_docs[0])
|
||||
|
||||
|
||||
class TestSecretsSubstitutionNegative(test_base.TestDbBase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestSecretsSubstitutionNegative, self).setUp()
|
||||
self.document_factory = factories.DocumentFactory(1, [1])
|
||||
self.secrets_factory = factories.DocumentSecretFactory()
|
||||
|
||||
def _test_secrets_substitution(self, secret_type, expected_exception):
|
||||
secret_ref = ("http://127.0.0.1/key-manager/v1/secrets/%s"
|
||||
% test_utils.rand_uuid_hex())
|
||||
certificate = self.secrets_factory.gen_test(
|
||||
'Certificate', secret_type, data=secret_ref)
|
||||
certificate['metadata']['name'] = 'example-cert'
|
||||
|
||||
document_mapping = {
|
||||
"_GLOBAL_SUBSTITUTIONS_1_": [{
|
||||
"dest": {
|
||||
"path": ".chart.values.tls.certificate"
|
||||
},
|
||||
"src": {
|
||||
"schema": "deckhand/Certificate/v1",
|
||||
"name": "example-cert",
|
||||
"path": "."
|
||||
}
|
||||
|
||||
}]
|
||||
}
|
||||
payload = self.document_factory.gen_test(document_mapping,
|
||||
global_abstract=False)
|
||||
bucket_name = test_utils.rand_name('bucket')
|
||||
documents = self.create_documents(
|
||||
bucket_name, [certificate] + [payload[-1]])
|
||||
|
||||
secrets_substitution = secrets_manager.SecretsSubstitution(
|
||||
[certificate])
|
||||
with testtools.ExpectedException(expected_exception):
|
||||
next(secrets_substitution.substitute_all(documents))
|
||||
|
||||
@mock.patch.object(secrets_manager, 'SecretsManager', autospec=True)
|
||||
def test_barbican_exception_raises_unknown_error(
|
||||
self, mock_secrets_manager):
|
||||
mock_secrets_manager.get.side_effect = errors.BarbicanException
|
||||
self._test_secrets_substitution(
|
||||
'encrypted', errors.UnknownSubstitutionError)
|
||||
|
||||
@mock.patch('deckhand.engine.secrets_manager.utils', autospec=True)
|
||||
def test_generic_exception_raises_unknown_error(
|
||||
self, mock_utils):
|
||||
mock_utils.jsonpath_replace.side_effect = Exception('test')
|
||||
self._test_secrets_substitution(
|
||||
'cleartext', errors.UnknownSubstitutionError)
|
||||
|
|
Loading…
Reference in New Issue