Move DB calls out of engine module into controllers
This PS moves DB calls out of the engine module and into the appropriate controllers so that a "production" set of documents can leverage Deckhand layering and substitution after importing the Deckhand engine module directly. These operations will be carried out offline meaning that DB calls are not possible. Unit tests were refactored to work with the changes. Some testing documentation was also updated. Closes 16 Change-Id: I6e0757746cd949985d57102d1c85acfbbed86078
This commit is contained in:
parent
453927facf
commit
67d46531f6
@ -243,12 +243,12 @@ class Client(object):
|
||||
|
||||
@property
|
||||
def projectid(self):
|
||||
self.logger.warning(_("Property 'projectid' is deprecated since "
|
||||
"Ocata. Use 'project_name' instead."))
|
||||
self.logger.warning("Property 'projectid' is deprecated since "
|
||||
"Ocata. Use 'project_name' instead.")
|
||||
return self.project_name
|
||||
|
||||
@property
|
||||
def tenant_id(self):
|
||||
self.logger.warning(_("Property 'tenant_id' is deprecated since "
|
||||
"Ocata. Use 'project_id' instead."))
|
||||
self.logger.warning("Property 'tenant_id' is deprecated since "
|
||||
"Ocata. Use 'project_id' instead.")
|
||||
return self.project_id
|
||||
|
@ -25,6 +25,7 @@ from deckhand.engine import document_validation
|
||||
from deckhand.engine import secrets_manager
|
||||
from deckhand import errors as deckhand_errors
|
||||
from deckhand import policy
|
||||
from deckhand import types
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -50,8 +51,11 @@ class BucketsResource(api_base.BaseResource):
|
||||
# because we expect certain formatting of the documents while doing
|
||||
# policy enforcement. If any documents fail basic schema validaiton
|
||||
# raise an exception immediately.
|
||||
data_schemas = db_api.revision_documents_get(
|
||||
schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
|
||||
try:
|
||||
doc_validator = document_validation.DocumentValidation(documents)
|
||||
doc_validator = document_validation.DocumentValidation(
|
||||
documents, data_schemas)
|
||||
validations = doc_validator.validate_all()
|
||||
except deckhand_errors.InvalidDocumentFormat as e:
|
||||
LOG.exception(e.format_message())
|
||||
|
@ -106,8 +106,11 @@ class RenderedDocumentsResource(api_base.BaseResource):
|
||||
|
||||
documents = self._retrieve_documents_for_rendering(revision_id,
|
||||
**filters)
|
||||
substitution_sources = self._retrieve_substitution_sources()
|
||||
|
||||
try:
|
||||
document_layering = layering.DocumentLayering(documents)
|
||||
document_layering = layering.DocumentLayering(
|
||||
documents, substitution_sources)
|
||||
rendered_documents = document_layering.render()
|
||||
except (errors.IndeterminateDocumentParent,
|
||||
errors.UnsupportedActionMethod,
|
||||
@ -164,10 +167,18 @@ class RenderedDocumentsResource(api_base.BaseResource):
|
||||
|
||||
return documents
|
||||
|
||||
def _retrieve_substitution_sources(self):
|
||||
# Return all concrete documents as potential substitution sources.
|
||||
return db_api.document_get_all(
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
|
||||
def _post_validate(self, documents):
|
||||
# Perform schema validation post-rendering to ensure that rendering
|
||||
# and substitution didn't break anything.
|
||||
doc_validator = document_validation.DocumentValidation(documents)
|
||||
data_schemas = db_api.revision_documents_get(
|
||||
schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
|
||||
doc_validator = document_validation.DocumentValidation(
|
||||
documents, data_schemas)
|
||||
try:
|
||||
doc_validator.validate_all()
|
||||
except errors.InvalidDocumentFormat as e:
|
||||
|
@ -19,7 +19,6 @@ import jsonschema
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from deckhand.db.sqlalchemy import api as db_api
|
||||
from deckhand.engine.schema import base_schema
|
||||
from deckhand.engine.schema import v1_0
|
||||
from deckhand import errors
|
||||
@ -243,7 +242,7 @@ class DataSchemaValidator(SchemaValidator):
|
||||
|
||||
class DocumentValidation(object):
|
||||
|
||||
def __init__(self, documents):
|
||||
def __init__(self, documents, existing_data_schemas=None):
|
||||
"""Class for document validation logic for documents.
|
||||
|
||||
This class is responsible for validating documents according to their
|
||||
@ -254,12 +253,16 @@ class DocumentValidation(object):
|
||||
|
||||
:param documents: Documents to be validated.
|
||||
:type documents: List[dict]
|
||||
|
||||
:param existing_data_schemas: ``DataSchema`` documents created in prior
|
||||
revisions to be used the "data" section of each document in
|
||||
``documents``. Additional ``DataSchema`` documents in ``documents``
|
||||
are combined with these.
|
||||
:type existing_data_schemas: List[dict]
|
||||
"""
|
||||
|
||||
self.documents = []
|
||||
data_schemas = db_api.revision_documents_get(
|
||||
schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
|
||||
existing_data_schemas = existing_data_schemas or []
|
||||
data_schemas = existing_data_schemas[:]
|
||||
db_data_schemas = {d['metadata']['name']: d for d in data_schemas}
|
||||
|
||||
if not isinstance(documents, (list, tuple)):
|
||||
|
@ -154,7 +154,7 @@ class DocumentLayering(object):
|
||||
)
|
||||
return None, [document.Document(d) for d in documents]
|
||||
|
||||
def __init__(self, documents):
|
||||
def __init__(self, documents, substitution_sources=None):
|
||||
"""Contructor for ``DocumentLayering``.
|
||||
|
||||
:param layering_policy: The document with schema
|
||||
@ -162,6 +162,10 @@ class DocumentLayering(object):
|
||||
:param documents: List of all other documents to be layered together
|
||||
in accordance with the ``layerOrder`` defined by the
|
||||
LayeringPolicy document.
|
||||
:type documents: List[dict]
|
||||
:param substitution_sources: List of documents that are potential
|
||||
sources for substitution. Should only include concrete documents.
|
||||
:type substitution_sources: List[dict]
|
||||
"""
|
||||
self.layering_policy, self.documents = self._extract_layering_policy(
|
||||
documents)
|
||||
@ -173,6 +177,7 @@ class DocumentLayering(object):
|
||||
raise errors.LayeringPolicyNotFound()
|
||||
self.layer_order = list(self.layering_policy['data']['layerOrder'])
|
||||
self.layered_docs = self._calc_document_children()
|
||||
self.substitution_sources = substitution_sources or []
|
||||
|
||||
def _apply_action(self, action, child_data, overall_data):
|
||||
"""Apply actions to each layer that is rendered.
|
||||
@ -249,9 +254,10 @@ class DocumentLayering(object):
|
||||
|
||||
return overall_data
|
||||
|
||||
def _apply_substitutions(self, data):
|
||||
def _apply_substitutions(self, document):
|
||||
try:
|
||||
secrets_substitution = secrets_manager.SecretsSubstitution(data)
|
||||
secrets_substitution = secrets_manager.SecretsSubstitution(
|
||||
document, self.substitution_sources)
|
||||
return secrets_substitution.substitute_all()
|
||||
except errors.DocumentNotFound as e:
|
||||
LOG.error('Failed to render the documents because a secret '
|
||||
|
@ -16,7 +16,6 @@ from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from deckhand.barbican import driver
|
||||
from deckhand.db.sqlalchemy import api as db_api
|
||||
from deckhand.engine import document as document_wrapper
|
||||
from deckhand import errors
|
||||
from deckhand import utils
|
||||
@ -98,24 +97,32 @@ class SecretsManager(object):
|
||||
class SecretsSubstitution(object):
|
||||
"""Class for document substitution logic for YAML files."""
|
||||
|
||||
def __init__(self, documents):
|
||||
def __init__(self, documents, substitution_sources=None):
|
||||
"""SecretSubstitution constructor.
|
||||
|
||||
:param documents: List of documents that are candidates for secret
|
||||
substitution. This class will automatically detect documents that
|
||||
require substitution; documents need not be filtered prior to being
|
||||
passed to the constructor.
|
||||
This class will automatically detect documents that require
|
||||
substitution; documents need not be filtered prior to being passed to
|
||||
the constructor.
|
||||
|
||||
:param documents: List of documents that are candidates for
|
||||
substitution.
|
||||
:type documents: List[dict]
|
||||
:param substitution_sources: List of documents that are potential
|
||||
sources for substitution. Should only include concrete documents.
|
||||
:type substitution_sources: List[dict]
|
||||
"""
|
||||
if not isinstance(documents, (list, tuple)):
|
||||
documents = [documents]
|
||||
|
||||
self.docs_to_sub = []
|
||||
self.substitution_documents = []
|
||||
self.substitution_sources = substitution_sources or []
|
||||
|
||||
for document in documents:
|
||||
if not isinstance(document, document_wrapper.Document):
|
||||
document_obj = document_wrapper.Document(document)
|
||||
# If the document has substitutions include it.
|
||||
if document_obj.get_substitutions():
|
||||
self.docs_to_sub.append(document_obj)
|
||||
self.substitution_documents.append(document_obj)
|
||||
|
||||
def substitute_all(self):
|
||||
"""Substitute all documents that have a `metadata.substitutions` field.
|
||||
@ -128,10 +135,10 @@ class SecretsSubstitution(object):
|
||||
:returns: List of fully substituted documents.
|
||||
"""
|
||||
LOG.debug('Substituting secrets for documents: %s',
|
||||
self.docs_to_sub)
|
||||
self.substitution_documents)
|
||||
substituted_docs = []
|
||||
|
||||
for doc in self.docs_to_sub:
|
||||
for doc in self.substitution_documents:
|
||||
LOG.debug(
|
||||
'Checking for substitutions in schema=%s, metadata.name=%s',
|
||||
doc.get_name(), doc.get_schema())
|
||||
@ -140,11 +147,13 @@ class SecretsSubstitution(object):
|
||||
src_name = sub['src']['name']
|
||||
src_path = sub['src']['path']
|
||||
|
||||
# TODO(fmontei): Use SecretsManager for this logic. Need to
|
||||
# check Barbican for the secret if it has been encrypted.
|
||||
src_doc = db_api.document_get(
|
||||
schema=src_schema, name=src_name,
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
is_match = (lambda x: x['schema'] == src_schema and
|
||||
x['metadata']['name'] == src_name)
|
||||
try:
|
||||
src_doc = next(
|
||||
iter(filter(is_match, self.substitution_sources)))
|
||||
except StopIteration:
|
||||
src_doc = {}
|
||||
|
||||
# If the data is a dictionary, retrieve the nested secret
|
||||
# via jsonpath_parse, else the secret is the primitive/string
|
||||
@ -153,7 +162,7 @@ class SecretsSubstitution(object):
|
||||
src_secret = utils.jsonpath_parse(src_doc.get('data', {}),
|
||||
src_path)
|
||||
else:
|
||||
src_secret = src_doc['data']
|
||||
src_secret = src_doc.get('data')
|
||||
|
||||
dest_path = sub['dest']['path']
|
||||
dest_pattern = sub['dest'].get('pattern', None)
|
||||
|
@ -23,8 +23,9 @@ class TestDocumentLayering(test_base.DeckhandTestCase):
|
||||
|
||||
def _test_layering(self, documents, site_expected=None,
|
||||
region_expected=None, global_expected=None,
|
||||
exception_expected=None):
|
||||
document_layering = layering.DocumentLayering(documents)
|
||||
exception_expected=None, substitution_sources=None):
|
||||
document_layering = layering.DocumentLayering(
|
||||
documents, substitution_sources)
|
||||
|
||||
if all([site_expected, region_expected, global_expected,
|
||||
exception_expected]):
|
||||
|
@ -12,9 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from deckhand.engine import secrets_manager
|
||||
from deckhand import factories
|
||||
from deckhand.tests.unit.engine import test_document_layering
|
||||
|
||||
@ -51,14 +48,9 @@ class TestDocumentLayeringWithSubstitution(
|
||||
global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'}
|
||||
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4, 'c': 'global-secret'}
|
||||
|
||||
with mock.patch.object(
|
||||
secrets_manager.db_api, 'document_get',
|
||||
return_value=certificate, autospec=True) as mock_document_get:
|
||||
self._test_layering(documents, site_expected=site_expected,
|
||||
global_expected=global_expected)
|
||||
mock_document_get.assert_called_once_with(
|
||||
schema=certificate['schema'], name=certificate['metadata']['name'],
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
global_expected=global_expected,
|
||||
substitution_sources=[certificate])
|
||||
|
||||
def test_layering_and_substitution_no_children(self):
|
||||
mapping = {
|
||||
@ -90,14 +82,9 @@ class TestDocumentLayeringWithSubstitution(
|
||||
global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'}
|
||||
site_expected = {'b': 4}
|
||||
|
||||
with mock.patch.object(
|
||||
secrets_manager.db_api, 'document_get',
|
||||
return_value=certificate, autospec=True) as mock_document_get:
|
||||
self._test_layering(documents, site_expected=site_expected,
|
||||
global_expected=global_expected)
|
||||
mock_document_get.assert_called_once_with(
|
||||
schema=certificate['schema'], name=certificate['metadata']['name'],
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
global_expected=global_expected,
|
||||
substitution_sources=[certificate])
|
||||
|
||||
def test_layering_parent_and_child_undergo_substitution(self):
|
||||
mapping = {
|
||||
@ -136,24 +123,14 @@ class TestDocumentLayeringWithSubstitution(
|
||||
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 'global-secret',
|
||||
'c': 'site-secret'}
|
||||
|
||||
def _get_secret_document(*args, **kwargs):
|
||||
name = kwargs['name']
|
||||
prefix = name.split('-')[0]
|
||||
return secrets_factory.gen_test(
|
||||
'Certificate', 'cleartext', data='%s-secret' % prefix,
|
||||
name='%s' % name)
|
||||
certificate = secrets_factory.gen_test(
|
||||
'Certificate', 'cleartext', data='global-secret',
|
||||
name='global-cert')
|
||||
certificate_key = secrets_factory.gen_test(
|
||||
'CertificateKey', 'cleartext', data='site-secret',
|
||||
name='site-cert')
|
||||
|
||||
with mock.patch.object(
|
||||
secrets_manager.db_api, 'document_get',
|
||||
autospec=True) as mock_document_get:
|
||||
mock_document_get.side_effect = _get_secret_document
|
||||
self._test_layering(documents, site_expected=site_expected,
|
||||
global_expected=global_expected)
|
||||
mock_document_get.assert_has_calls([
|
||||
mock.call(
|
||||
schema="deckhand/Certificate/v1", name='global-cert',
|
||||
**{'metadata.layeringDefinition.abstract': False}),
|
||||
mock.call(
|
||||
schema="deckhand/CertificateKey/v1", name='site-cert',
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
])
|
||||
self._test_layering(
|
||||
documents, site_expected=site_expected,
|
||||
global_expected=global_expected,
|
||||
substitution_sources=[certificate, certificate_key])
|
||||
|
@ -30,10 +30,6 @@ class TestDocumentValidation(engine_test_base.TestDocumentValidationBase):
|
||||
self.dataschema = dataschema_factory.gen_test(
|
||||
self.test_document['schema'], {})
|
||||
|
||||
# Stub out the DB call for retrieving DataSchema documents.
|
||||
self.patchobject(document_validation.db_api, 'revision_documents_get',
|
||||
lambda *a, **k: [])
|
||||
|
||||
def test_data_schema_missing_optional_sections(self):
|
||||
optional_missing_data = [
|
||||
self._corrupt_data(self.test_document, 'metadata.labels'),
|
||||
|
@ -32,12 +32,6 @@ class TestDocumentValidationNegative(test_base.TestDocumentValidationBase):
|
||||
'schema': errors.InvalidDocumentFormat,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestDocumentValidationNegative, self).setUp()
|
||||
# Stub out the DB call for retrieving DataSchema documents.
|
||||
self.patchobject(document_validation.db_api, 'revision_documents_get',
|
||||
lambda *a, **k: [])
|
||||
|
||||
def _do_validations(self, document_validator, expected, expected_err):
|
||||
validations = document_validator.validate_all()
|
||||
self.assertEqual(2, len(validations))
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import copy
|
||||
|
||||
from deckhand.db.sqlalchemy import api as db_api
|
||||
from deckhand.engine import secrets_manager
|
||||
from deckhand import factories
|
||||
from deckhand.tests import test_utils
|
||||
@ -90,7 +91,11 @@ class TestSecretsSubstitution(test_base.TestDbBase):
|
||||
expected_document = copy.deepcopy(documents[-1])
|
||||
expected_document['data'] = expected_data
|
||||
|
||||
secret_substitution = secrets_manager.SecretsSubstitution(documents)
|
||||
substitution_sources = db_api.document_get_all(
|
||||
**{'metadata.layeringDefinition.abstract': False})
|
||||
|
||||
secret_substitution = secrets_manager.SecretsSubstitution(
|
||||
documents, substitution_sources)
|
||||
substituted_docs = secret_substitution.substitute_all()
|
||||
|
||||
self.assertIn(expected_document, substituted_docs)
|
||||
|
@ -17,6 +17,11 @@
|
||||
Testing
|
||||
=======
|
||||
|
||||
.. warning::
|
||||
|
||||
Deckhand has only been tested against a Ubuntu 16.04 environment. The guide
|
||||
below assumes the user is using Ubuntu.
|
||||
|
||||
Unit testing
|
||||
============
|
||||
|
||||
@ -27,13 +32,18 @@ Prerequisites
|
||||
postgresql database for unit tests. The DB URL is set up as an environment
|
||||
variable via ``PIFPAF_URL`` which is referenced by Deckhand's unit test suite.
|
||||
|
||||
When running `pifpaf run postgresql` (implicitly called by unit tests below),
|
||||
pifpaf uses `pg_config` which can be installed on Ubuntu via::
|
||||
#. PostgreSQL must be installed. To do so, run::
|
||||
|
||||
sudo apt-get install libpq-dev -y
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install postgresql postgresql-contrib -y
|
||||
|
||||
Guide
|
||||
-----
|
||||
#. When running ``pifpaf run postgresql`` (implicitly called by unit tests below),
|
||||
pifpaf uses ``pg_config`` which can be installed by running::
|
||||
|
||||
$ sudo apt-get install libpq-dev -y
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
Unit testing currently uses an in-memory sqlite database. Since Deckhand's
|
||||
primary function is to serve as the back-end storage for UCP, the majority
|
||||
@ -143,3 +153,24 @@ Which will result in the following script output:
|
||||
For testing dev changes, it is **not** recommended to follow this approach,
|
||||
as the most up-to-date code is located in the repository itself. Running tests
|
||||
against a remote image will likely result in false positives.
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
|
||||
* For any errors related to ``tox``:
|
||||
|
||||
Ensure that ``tox`` is installed::
|
||||
|
||||
$ sudo apt-get install tox -y
|
||||
|
||||
* For any errors related to running ``tox -e py27``:
|
||||
|
||||
Ensure that ``python-dev`` is installed::
|
||||
|
||||
$ sudo apt-get install python-dev -y
|
||||
|
||||
* For any errors related to running ``tox -e py35``:
|
||||
|
||||
Ensure that ``python3-dev`` is installed::
|
||||
|
||||
$ sudo apt-get install python3-dev -y
|
||||
|
Loading…
Reference in New Issue
Block a user