Extending components DSL model with additional predicates
* Add ability for handling new predicates for requires component's property * Refactoring of exist components validation function * Update release components according to new DSL model Implements: blueprint component-registry-improvements Change-Id: I6175e28118dac494d48968e3f8c51e89ab74a34b Depends-On: Iab03bf5e36800c8aea0e054719d40ca42a542b73
This commit is contained in:
parent
052082bd71
commit
cea0523ff5
|
@ -14,7 +14,6 @@
|
|||
# under the License.
|
||||
|
||||
from distutils import version
|
||||
from itertools import groupby
|
||||
|
||||
import six
|
||||
import sqlalchemy as sa
|
||||
|
@ -28,6 +27,7 @@ from nailgun.db.sqlalchemy.models import Node
|
|||
from nailgun import errors
|
||||
from nailgun import objects
|
||||
from nailgun.plugins.manager import PluginManager
|
||||
from nailgun.utils.restrictions import ComponentsRestrictions
|
||||
|
||||
|
||||
class ClusterValidator(base.BasicValidator):
|
||||
|
@ -60,70 +60,10 @@ class ClusterValidator(base.BasicValidator):
|
|||
def _validate_components(cls, release_id, components_list):
|
||||
release = objects.Release.get_by_uid(release_id)
|
||||
release_components = objects.Release.get_all_components(release)
|
||||
components_set = set(components_list)
|
||||
found_release_components = [
|
||||
c for c in release_components if c['name'] in components_set]
|
||||
found_release_components_names_set = set(
|
||||
c['name'] for c in found_release_components)
|
||||
|
||||
if found_release_components_names_set != components_set:
|
||||
raise errors.InvalidData(
|
||||
u'{0} components are not related to release "{1}".'.format(
|
||||
sorted(
|
||||
components_set - found_release_components_names_set),
|
||||
release.name
|
||||
),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
mandatory_component_types = set(['hypervisor', 'network', 'storage'])
|
||||
for component in found_release_components:
|
||||
component_name = component['name']
|
||||
for incompatible in component.get('incompatible', []):
|
||||
incompatible_component_names = list(
|
||||
cls._resolve_names_for_dependency(
|
||||
components_set, incompatible))
|
||||
if incompatible_component_names:
|
||||
raise errors.InvalidData(
|
||||
u"Incompatible components were found: "
|
||||
u"'{0}' incompatible with {1}.".format(
|
||||
component_name, incompatible_component_names),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
component_type = lambda x: x['name'].split(':', 1)[0]
|
||||
for c_type, group in groupby(
|
||||
sorted(component.get('requires', []), key=component_type),
|
||||
component_type):
|
||||
group_components = list(group)
|
||||
for require in group_components:
|
||||
component_exist = any(
|
||||
cls._resolve_names_for_dependency(
|
||||
components_set, require))
|
||||
if component_exist:
|
||||
break
|
||||
else:
|
||||
raise errors.InvalidData(
|
||||
u"Requires {0} for '{1}' components were not "
|
||||
u"satisfied.".format(
|
||||
[c['name'] for c in group_components],
|
||||
component_name),
|
||||
log_message=True
|
||||
)
|
||||
if component_type(component) in mandatory_component_types:
|
||||
mandatory_component_types.remove(component_type(component))
|
||||
|
||||
if mandatory_component_types:
|
||||
raise errors.InvalidData(
|
||||
"Components with {0} types required but wasn't found in data"
|
||||
.format(sorted(mandatory_component_types)),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_names_for_dependency(components_set, dependency):
|
||||
prefix = dependency['name'].split('*', 1)[0]
|
||||
return (name for name in components_set if name.startswith(prefix))
|
||||
ComponentsRestrictions.validate_components(
|
||||
components_list,
|
||||
release_components,
|
||||
release.required_component_types)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
|
|
|
@ -37,9 +37,11 @@ def upgrade():
|
|||
upgrade_plugin_links_constraints()
|
||||
upgrade_plugin_with_nics_and_nodes_attributes()
|
||||
upgrade_node_deployment_info()
|
||||
upgrade_release_required_component_types()
|
||||
|
||||
|
||||
def downgrade():
|
||||
downgrade_release_required_component_types()
|
||||
downgrade_node_deployment_info()
|
||||
downgrade_plugin_with_nics_and_nodes_attributes()
|
||||
downgrade_plugin_links_constraints()
|
||||
|
@ -226,6 +228,24 @@ def upgrade_plugin_with_nics_and_nodes_attributes():
|
|||
)
|
||||
|
||||
|
||||
def upgrade_release_required_component_types():
|
||||
op.add_column(
|
||||
'releases',
|
||||
sa.Column(
|
||||
'required_component_types',
|
||||
fields.JSON(),
|
||||
nullable=False,
|
||||
server_default='[]'
|
||||
)
|
||||
)
|
||||
connection = op.get_bind()
|
||||
connection.execute(
|
||||
sa.sql.text(
|
||||
"UPDATE releases SET required_component_types = :required_types"),
|
||||
required_types=jsonutils.dumps(['hypervisor', 'network', 'storage'])
|
||||
)
|
||||
|
||||
|
||||
def downgrade_plugin_with_nics_and_nodes_attributes():
|
||||
op.drop_table('node_cluster_plugins')
|
||||
op.drop_table('node_bond_interface_cluster_plugins')
|
||||
|
@ -281,3 +301,7 @@ def upgrade_node_deployment_info():
|
|||
|
||||
def downgrade_node_deployment_info():
|
||||
op.drop_table('node_deployment_info')
|
||||
|
||||
|
||||
def downgrade_release_required_component_types():
|
||||
op.drop_column('releases', 'required_component_types')
|
||||
|
|
|
@ -60,6 +60,8 @@ class Release(Base):
|
|||
MutableDict.as_mutable(JSON), default={})
|
||||
components_metadata = Column(
|
||||
MutableList.as_mutable(JSON), default=[], server_default='[]')
|
||||
required_component_types = Column(MutableList.as_mutable(JSON), default=[],
|
||||
server_default='[]', nullable=False)
|
||||
modes = Column(MutableList.as_mutable(JSON), default=[])
|
||||
clusters = relationship(
|
||||
"Cluster",
|
||||
|
|
|
@ -1731,6 +1731,12 @@
|
|||
description: "dialog.create_cluster_wizard.compute.qemu_description"
|
||||
compatible:
|
||||
- name: hypervisor:vmware
|
||||
requires:
|
||||
- one_of:
|
||||
items:
|
||||
- network:neutron:ml2:vlan
|
||||
- network:neutron:ml2:tun
|
||||
message: "dialog.create_cluster_wizard.compute.qemu_requires_network_backend"
|
||||
- name: hypervisor:vmware
|
||||
weight: 15
|
||||
bind:
|
||||
|
@ -1740,8 +1746,16 @@
|
|||
compatible:
|
||||
- name: hypervisor:qemu
|
||||
requires:
|
||||
- name: hypervisor:qemu
|
||||
message: "dialog.create_cluster_wizard.compute.vcenter_warning"
|
||||
- one_of:
|
||||
items:
|
||||
- hypervisor:qemu
|
||||
message: "dialog.create_cluster_wizard.compute.vcenter_warning"
|
||||
- one_of:
|
||||
items:
|
||||
- network:neutron:ml2:dvs
|
||||
- network:neutron:ml2:nsx
|
||||
message: "dialog.create_cluster_wizard.compute.vcenter_requires_network_backend"
|
||||
message_invalid: "dialog.create_cluster_wizard.compute.vcenter_requires_network_plugins"
|
||||
- name: network:neutron:core:ml2
|
||||
default: true
|
||||
weight: 1000
|
||||
|
@ -1766,7 +1780,9 @@
|
|||
- name: hypervisor:qemu
|
||||
- name: hypervisor:vmware
|
||||
requires:
|
||||
- name: network:neutron:core:ml2
|
||||
- one_of:
|
||||
items:
|
||||
- network:neutron:core:ml2
|
||||
- name: network:neutron:ml2:tun
|
||||
bind: !!pairs
|
||||
- "cluster:net_provider": "neutron"
|
||||
|
@ -1785,7 +1801,9 @@
|
|||
- name: additional_service:ironic
|
||||
message: "dialog.create_cluster_wizard.additional.ironic_network_mode_alert"
|
||||
requires:
|
||||
- name: network:neutron:core:ml2
|
||||
- one_of:
|
||||
items:
|
||||
- network:neutron:core:ml2
|
||||
- name: storage:block:lvm
|
||||
label: "dialog.create_cluster_wizard.storage.lvm"
|
||||
description: "dialog.create_cluster_wizard.storage.default_provider"
|
||||
|
@ -1951,6 +1969,10 @@
|
|||
incompatible:
|
||||
- name: network:neutron:ml2:tun
|
||||
message: "dialog.create_cluster_wizard.additional.ironic_network_mode_alert"
|
||||
required_component_types:
|
||||
- hypervisor
|
||||
- network
|
||||
- storage
|
||||
modes: ['ha_compact']
|
||||
extensions: ['volume_manager']
|
||||
- pk: 1
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from nailgun import consts
|
||||
|
@ -21,6 +22,7 @@ from nailgun.db.sqlalchemy.models import Cluster
|
|||
from nailgun.db.sqlalchemy.models import DeploymentGraph
|
||||
from nailgun.db.sqlalchemy.models import NetworkGroup
|
||||
from nailgun.db.sqlalchemy.models import Node
|
||||
from nailgun import errors
|
||||
from nailgun.test.base import BaseIntegrationTest
|
||||
from nailgun.test.base import fake_tasks
|
||||
from nailgun.utils import reverse
|
||||
|
@ -338,6 +340,17 @@ class TestClusterComponents(BaseIntegrationTest):
|
|||
'mode': consts.CLUSTER_MODES.ha_compact
|
||||
}
|
||||
|
||||
def test_component_validation_failed(self):
|
||||
error_msg = "Component validation error"
|
||||
self.cluster_data.update(
|
||||
{'components': ['hypervisor:test_hypervisor']})
|
||||
with mock.patch('nailgun.utils.restrictions.ComponentsRestrictions.'
|
||||
'validate_components') as validate_mock:
|
||||
validate_mock.side_effect = errors.InvalidData(error_msg)
|
||||
resp = self._create_cluster_with_expected_errors(self.cluster_data)
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(error_msg, resp.json_body['message'])
|
||||
|
||||
def test_components_not_in_release(self):
|
||||
self.cluster_data.update(
|
||||
{'components': ['storage:not_existing_component']})
|
||||
|
@ -345,7 +358,7 @@ class TestClusterComponents(BaseIntegrationTest):
|
|||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
u"[u'storage:not_existing_component'] components are not "
|
||||
"related to release \"release_name_2015.1-8.0\".",
|
||||
"related to used release.",
|
||||
resp.json_body['message']
|
||||
)
|
||||
|
||||
|
@ -369,8 +382,8 @@ class TestClusterComponents(BaseIntegrationTest):
|
|||
resp = self._create_cluster_with_expected_errors(self.cluster_data)
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
u"Requires [u'hypervisors:test_hypervisor'] for "
|
||||
"'storage:test_storage' components were not satisfied.",
|
||||
u"Component 'storage:test_storage' requires any of components "
|
||||
"from [u'hypervisors:test_hypervisor'] set.",
|
||||
resp.json_body['message']
|
||||
)
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@ def prepare():
|
|||
'roles': '{}',
|
||||
'roles_metadata': '{}',
|
||||
'is_deployable': True,
|
||||
'required_component_types': ['network', 'storage']
|
||||
}]
|
||||
)
|
||||
|
||||
|
@ -101,3 +102,10 @@ class TestPluginLinksConstraints(base.BaseAlembicMigrationTest):
|
|||
[sa.func.count(self.meta.tables['cluster_plugin_links'].c.id)]
|
||||
)).fetchone()[0]
|
||||
self.assertEqual(links_count, 2)
|
||||
|
||||
|
||||
class TestRequiredComponentTypesField(base.BaseAlembicMigrationTest):
|
||||
|
||||
def test_downgrade_release_required_component_types(self):
|
||||
releases_table = self.meta.tables['releases']
|
||||
self.assertNotIn('required_component_types', releases_table.c)
|
||||
|
|
|
@ -17,6 +17,7 @@ import datetime
|
|||
import alembic
|
||||
from oslo_serialization import jsonutils
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from nailgun.db import db
|
||||
from nailgun.db import dropdb
|
||||
|
@ -373,6 +374,7 @@ def prepare():
|
|||
}
|
||||
]
|
||||
)
|
||||
TestRequiredComponentTypesField.prepare(meta)
|
||||
db.commit()
|
||||
|
||||
|
||||
|
@ -486,3 +488,49 @@ class TestSplitDeploymentInfo(base.BaseAlembicMigrationTest):
|
|||
res = db.execute(sa.select([tasks_table]))
|
||||
for data in res:
|
||||
self.assertIsNone(data.deployment_info)
|
||||
|
||||
|
||||
class TestRequiredComponentTypesField(base.BaseAlembicMigrationTest):
|
||||
release_name = 'test_release'
|
||||
version = '2015.1-10.0'
|
||||
|
||||
@classmethod
|
||||
def prepare(cls, meta):
|
||||
db.execute(
|
||||
meta.tables['releases'].insert(),
|
||||
[{
|
||||
'name': cls.release_name,
|
||||
'version': cls.version,
|
||||
'operating_system': 'ubuntu',
|
||||
'state': 'available',
|
||||
'roles_metadata': '{}',
|
||||
'is_deployable': True
|
||||
}])
|
||||
|
||||
def test_upgrade_release_required_component_types(self):
|
||||
releases_table = self.meta.tables['releases']
|
||||
result = db.execute(
|
||||
sa.select([releases_table.c.required_component_types]).
|
||||
where(releases_table.c.name == self.release_name).
|
||||
where(releases_table.c.version == self.version)).fetchone()
|
||||
self.assertEqual(jsonutils.loads(result['required_component_types']),
|
||||
['hypervisor', 'network', 'storage'])
|
||||
|
||||
def test_not_nullable_required_component_types(self):
|
||||
with self.assertRaisesRegexp(
|
||||
IntegrityError,
|
||||
'null value in column "required_component_types" '
|
||||
'violates not-null constraint'
|
||||
):
|
||||
db.execute(
|
||||
self.meta.tables['releases'].insert(),
|
||||
{
|
||||
'name': 'test_release',
|
||||
'version': '2015.1-10.0',
|
||||
'operating_system': 'ubuntu',
|
||||
'state': 'available',
|
||||
'roles_metadata': '{}',
|
||||
'is_deployable': True,
|
||||
'required_component_types': None
|
||||
})
|
||||
db.rollback()
|
||||
|
|
|
@ -22,6 +22,7 @@ from nailgun import objects
|
|||
from nailgun.settings import settings
|
||||
from nailgun.test import base
|
||||
from nailgun.utils.restrictions import AttributesRestriction
|
||||
from nailgun.utils.restrictions import ComponentsRestrictions
|
||||
from nailgun.utils.restrictions import LimitsMixin
|
||||
from nailgun.utils.restrictions import RestrictionBase
|
||||
from nailgun.utils.restrictions import VmwareAttributesRestriction
|
||||
|
@ -464,3 +465,231 @@ class TestVmwareAttributesRestriction(base.BaseTestCase):
|
|||
metadata=vmware_attributes['editable']['metadata'],
|
||||
data=vmware_attributes['editable']['value'])
|
||||
self.assertItemsEqual(errs, [])
|
||||
|
||||
|
||||
class TestComponentsRestrictions(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestComponentsRestrictions, self).setUp()
|
||||
self.required_components_types = ['hypervisor', 'network', 'storage']
|
||||
self.components_metadata = [
|
||||
{
|
||||
'name': 'hypervisor:test_hypervisor'
|
||||
},
|
||||
{
|
||||
'name': 'network:core:test_network_1',
|
||||
'incompatible': [
|
||||
{'name': 'hypervisor:test_hypervisor'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'name': 'network:core:test_network_2'
|
||||
},
|
||||
{
|
||||
'name': 'network:ml2:test_network_3'
|
||||
},
|
||||
{
|
||||
'name': 'storage:test_storage',
|
||||
'compatible': [
|
||||
{'name': 'hypervisor:test_hypervisor'}
|
||||
],
|
||||
'requires': [
|
||||
{'name': 'hypervisor:test_hypervisor'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'name': 'storage:test_storage_2'
|
||||
}
|
||||
]
|
||||
|
||||
def test_components_not_in_available_components(self):
|
||||
self._validate_with_expected_errors(
|
||||
['storage:not_existing_component'],
|
||||
"['storage:not_existing_component'] components are not related to "
|
||||
"used release."
|
||||
)
|
||||
|
||||
def test_not_all_required_types_components(self):
|
||||
selected_components_list = [
|
||||
'hypervisor:test_hypervisor',
|
||||
'network:core:test_network_2',
|
||||
'storage:test_storage_2'
|
||||
]
|
||||
ComponentsRestrictions.validate_components(
|
||||
selected_components_list, self.components_metadata,
|
||||
self.required_components_types)
|
||||
|
||||
while selected_components_list:
|
||||
selected_components_list.pop()
|
||||
self._validate_with_expected_errors(
|
||||
selected_components_list,
|
||||
"Components with {0} types are required but wasn't found "
|
||||
"in data.".format(sorted(
|
||||
set(self.required_components_types) - set(
|
||||
[x.split(':')[0] for x in selected_components_list])
|
||||
))
|
||||
)
|
||||
|
||||
def test_incompatible_components_found(self):
|
||||
self._validate_with_expected_errors(
|
||||
['hypervisor:test_hypervisor', 'network:core:test_network_1'],
|
||||
"Incompatible components were found: 'network:core:test_network_1'"
|
||||
" incompatible with ['hypervisor:test_hypervisor']."
|
||||
)
|
||||
|
||||
def test_requires_components_not_found(self):
|
||||
self._validate_with_expected_errors(
|
||||
['storage:test_storage'],
|
||||
"Component 'storage:test_storage' requires any of components from "
|
||||
"['hypervisor:test_hypervisor'] set."
|
||||
)
|
||||
|
||||
def test_requires_mixed_format(self):
|
||||
self.components_metadata.append({
|
||||
'name': 'storage:wrong_storage',
|
||||
'requires': [
|
||||
{'any_of': {
|
||||
'items': ['network:core:*']
|
||||
}},
|
||||
{'name': 'hypervisor:test_hypervisor'}
|
||||
]
|
||||
})
|
||||
self._validate_with_expected_errors(
|
||||
['storage:wrong_storage'],
|
||||
"Component 'storage:wrong_storage' has mixed format of requires."
|
||||
)
|
||||
|
||||
def test_requires_any_of_predicate(self):
|
||||
self.components_metadata.append({
|
||||
'name': 'additional_service:test_service',
|
||||
'requires': [
|
||||
{'any_of': {
|
||||
'items': ['network:core:*']
|
||||
}},
|
||||
{'any_of': {
|
||||
'items': [
|
||||
'storage:test_storage_2', 'hypervisor:test_hypervisor'
|
||||
],
|
||||
}}
|
||||
]
|
||||
})
|
||||
self._validate_with_expected_errors(
|
||||
['additional_service:test_service', 'network:ml2:test_network_3'],
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': any_of(['network:core:*'])"
|
||||
)
|
||||
|
||||
self._validate_with_expected_errors(
|
||||
['additional_service:test_service', 'network:core:test_network_2'],
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': "
|
||||
"any_of(['hypervisor:test_hypervisor', 'storage:test_storage_2'])"
|
||||
)
|
||||
|
||||
ComponentsRestrictions.validate_components(
|
||||
['additional_service:test_service', 'network:core:test_network_2',
|
||||
'hypervisor:test_hypervisor', 'storage:test_storage_2'],
|
||||
self.components_metadata,
|
||||
self.required_components_types
|
||||
)
|
||||
|
||||
def test_requires_one_of_predicate(self):
|
||||
self.components_metadata.append({
|
||||
'name': 'additional_service:test_service',
|
||||
'requires': [
|
||||
{'one_of': {
|
||||
'items': ['network:core:*']
|
||||
}},
|
||||
{'one_of': {
|
||||
'items': [
|
||||
'storage:test_storage_2', 'hypervisor:test_hypervisor'
|
||||
]
|
||||
}}
|
||||
]
|
||||
})
|
||||
selected_components_list = ['additional_service:test_service',
|
||||
'network:core:test_network_1',
|
||||
'network:core:test_network_2',
|
||||
'storage:test_storage_2']
|
||||
self._validate_with_expected_errors(
|
||||
selected_components_list,
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': one_of(['network:core:*'])"
|
||||
)
|
||||
|
||||
self._validate_with_expected_errors(
|
||||
['additional_service:test_service', 'network:core:test_network_1'],
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': "
|
||||
"one_of(['hypervisor:test_hypervisor', 'storage:test_storage_2'])"
|
||||
)
|
||||
|
||||
ComponentsRestrictions.validate_components(
|
||||
['additional_service:test_service', 'network:core:test_network_2',
|
||||
'hypervisor:test_hypervisor', 'storage:test_storage'],
|
||||
self.components_metadata,
|
||||
self.required_components_types
|
||||
)
|
||||
|
||||
def test_requires_none_of_predicate(self):
|
||||
self.components_metadata.append({
|
||||
'name': 'additional_service:test_service',
|
||||
'requires': [{
|
||||
'none_of': {
|
||||
'items': ['network:core:*', 'storage:test_storage']
|
||||
}
|
||||
}]
|
||||
})
|
||||
selected_components_list = ['additional_service:test_service',
|
||||
'network:core:test_network_1']
|
||||
self._validate_with_expected_errors(
|
||||
selected_components_list,
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': "
|
||||
"none_of(['network:core:*', 'storage:test_storage'])"
|
||||
)
|
||||
|
||||
ComponentsRestrictions.validate_components(
|
||||
['additional_service:test_service', 'network:ml2:test_network_3',
|
||||
'storage:test_storage_2', 'hypervisor:test_hypervisor'],
|
||||
self.components_metadata,
|
||||
self.required_components_types
|
||||
)
|
||||
|
||||
def test_requires_all_of_predicate(self):
|
||||
self.components_metadata.append({
|
||||
'name': 'additional_service:test_service',
|
||||
'requires': [{
|
||||
'all_of': {
|
||||
'items': [
|
||||
'network:core:test_network_2',
|
||||
'storage:*',
|
||||
'hypervisor:test_hypervisor'
|
||||
]
|
||||
}
|
||||
}]
|
||||
})
|
||||
selected_components_list = ['additional_service:test_service',
|
||||
'network:core:test_network_2',
|
||||
'storage:test_storage_2',
|
||||
'hypervisor:test_hypervisor']
|
||||
|
||||
self._validate_with_expected_errors(
|
||||
selected_components_list,
|
||||
"Requirements was not satisfied for component "
|
||||
"'additional_service:test_service': all_of(["
|
||||
"'hypervisor:test_hypervisor', 'network:core:test_network_2', "
|
||||
"'storage:*'])"
|
||||
)
|
||||
|
||||
selected_components_list.append('storage:test_storage')
|
||||
ComponentsRestrictions.validate_components(
|
||||
selected_components_list, self.components_metadata,
|
||||
self.required_components_types)
|
||||
|
||||
def _validate_with_expected_errors(self, components_list, error_msg):
|
||||
with self.assertRaises(errors.InvalidData) as exc_cm:
|
||||
ComponentsRestrictions.validate_components(
|
||||
components_list, self.components_metadata,
|
||||
self.required_components_types)
|
||||
self.assertEqual(exc_cm.exception.message, error_msg)
|
||||
|
|
|
@ -19,6 +19,7 @@ Classes for checking data restrictions and limits
|
|||
"""
|
||||
|
||||
from functools import partial
|
||||
from itertools import groupby
|
||||
import re
|
||||
import six
|
||||
|
||||
|
@ -29,6 +30,14 @@ from nailgun.utils import compact
|
|||
from nailgun.utils import flatten
|
||||
|
||||
|
||||
PREDICATE_FUNCTION_MAP = {
|
||||
'one_of': lambda x, y: len(x) == 1,
|
||||
'any_of': lambda x, y: len(x) > 0,
|
||||
'none_of': lambda x, y: len(x) == 0,
|
||||
'all_of': lambda x, y: x == y
|
||||
}
|
||||
|
||||
|
||||
class LimitsMixin(object):
|
||||
"""Mixin with limits processing functionality"""
|
||||
|
||||
|
@ -393,7 +402,7 @@ class VmwareAttributesRestriction(RestrictionBase):
|
|||
:type metadata: list|dict
|
||||
:param data: vmware attributes data(value) object
|
||||
:type data: list|dict
|
||||
:retruns: func -- generator which produces errors
|
||||
:returns: func -- generator which produces errors
|
||||
"""
|
||||
root_key = camel_to_snake_case(cls.__name__)
|
||||
|
||||
|
@ -459,3 +468,184 @@ class VmwareAttributesRestriction(RestrictionBase):
|
|||
yield result
|
||||
|
||||
return find
|
||||
|
||||
|
||||
class ComponentsRestrictions(object):
|
||||
|
||||
@classmethod
|
||||
def validate_components(cls, components_names, available_components,
|
||||
required_component_types):
|
||||
"""Check if selected components is valid and suitable for each other.
|
||||
|
||||
:param components_names: list of component names for validation
|
||||
:type components_names: list
|
||||
:param available_components: list of all available components
|
||||
:type available_components: list of dict
|
||||
:param required_component_types: list of all required components types
|
||||
:type required_component_types: list
|
||||
:raises: errors.InvalidData
|
||||
"""
|
||||
components_names = set(components_names)
|
||||
available_components_names = set()
|
||||
found_components = []
|
||||
|
||||
for component in available_components:
|
||||
available_components_names.add(component['name'])
|
||||
if component['name'] in components_names:
|
||||
found_components.append(component)
|
||||
|
||||
if len(components_names) != len(found_components):
|
||||
raise errors.InvalidData(
|
||||
'{0} components are not related to used release.'.format(
|
||||
sorted(components_names - available_components_names)
|
||||
),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
components_types_set = set()
|
||||
for component in found_components:
|
||||
cls._check_component_incompatibles(component, components_names)
|
||||
cls._check_component_requires(
|
||||
component, components_names, available_components_names)
|
||||
components_types_set.add(cls._get_component_type(component))
|
||||
|
||||
cls._check_required_component_types(components_types_set,
|
||||
set(required_component_types))
|
||||
|
||||
@classmethod
|
||||
def _check_component_incompatibles(cls, component, components_names):
|
||||
"""Check if component has incompatible components.
|
||||
|
||||
:param component: target component for checking
|
||||
:type component: dict
|
||||
:param components_names: list of components names for checking
|
||||
:type components_names: list
|
||||
:raises: errors.InvalidData
|
||||
"""
|
||||
for incompatible in component.get('incompatible', []):
|
||||
incompatible_component_names = list(
|
||||
cls._resolve_names_for_dependency(components_names,
|
||||
incompatible['name'])
|
||||
)
|
||||
if incompatible_component_names:
|
||||
raise errors.InvalidData(
|
||||
"Incompatible components were found: "
|
||||
"'{0}' incompatible with {1}.".format(
|
||||
component['name'],
|
||||
incompatible_component_names),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _check_component_requires(cls, component, components_names,
|
||||
available_components_names):
|
||||
"""Check if all component's requires is satisfied.
|
||||
|
||||
:param component: target component for checking
|
||||
:type component: dict
|
||||
:param components_names: list of components names for checking
|
||||
:type components_names: list
|
||||
:param available_components_names: names of all available components
|
||||
:type available_components_names: list
|
||||
:raises: errors.InvalidData
|
||||
"""
|
||||
component_requires = component.get('requires', [])
|
||||
requires_without_predicates = [
|
||||
r.get('name') for r in component_requires]
|
||||
if all(requires_without_predicates):
|
||||
for c_type, group in groupby(sorted(component_requires,
|
||||
key=cls._get_component_type),
|
||||
cls._get_component_type):
|
||||
group_components = list(group)
|
||||
for require in group_components:
|
||||
if cls._resolve_names_for_dependency(
|
||||
components_names, require['name']):
|
||||
break
|
||||
else:
|
||||
raise errors.InvalidData(
|
||||
"Component '{0}' requires any of components from {1} "
|
||||
"set.".format(
|
||||
component['name'],
|
||||
sorted([c['name'] for c in group_components])),
|
||||
log_message=True
|
||||
)
|
||||
elif any(requires_without_predicates):
|
||||
raise errors.InvalidData("Component '{0}' has mixed format of "
|
||||
"requires.".format(component['name']))
|
||||
else:
|
||||
check_result = cls._check_predicates(component_requires,
|
||||
components_names,
|
||||
available_components_names)
|
||||
if check_result:
|
||||
raise errors.InvalidData(
|
||||
"Requirements was not satisfied for component '{0}': "
|
||||
"{1}({2})".format(
|
||||
component['name'],
|
||||
check_result['failed_predicate'],
|
||||
sorted(item for item in check_result['items'])),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _check_predicates(cls, requires, components_names_set,
|
||||
available_components_names):
|
||||
"""Check that all predicate conditions are satisfied.
|
||||
|
||||
:param requires: list of predicates condition data
|
||||
:type requires: list of dicts
|
||||
:param components_names_set: list of components names for checking
|
||||
:type components_names_set: list
|
||||
:param available_components_names: names of all available components
|
||||
:type available_components_names: list
|
||||
:returns: dict -- failed predicate in format:
|
||||
{
|
||||
'failed_predicate': 'one_of|any_of|none_of|all_of'
|
||||
'items': <list of predicate items names>
|
||||
}
|
||||
"""
|
||||
for require in requires:
|
||||
for predicate in require:
|
||||
predicate_func = PREDICATE_FUNCTION_MAP[predicate]
|
||||
matched_components = set()
|
||||
matched_available_components = set()
|
||||
predicate_items = require[predicate]['items']
|
||||
for item in predicate_items:
|
||||
matched_components.update(
|
||||
cls._resolve_names_for_dependency(
|
||||
components_names_set, item)
|
||||
)
|
||||
matched_available_components.update(
|
||||
cls._resolve_names_for_dependency(
|
||||
available_components_names, item)
|
||||
)
|
||||
predicate_result = predicate_func(
|
||||
matched_components,
|
||||
matched_available_components
|
||||
)
|
||||
if not predicate_result:
|
||||
return {'failed_predicate': predicate,
|
||||
'items': predicate_items}
|
||||
|
||||
@staticmethod
|
||||
def _check_required_component_types(components_types_set,
|
||||
required_components_types_set):
|
||||
missed_types_set = required_components_types_set - components_types_set
|
||||
if missed_types_set:
|
||||
raise errors.InvalidData(
|
||||
"Components with {0} types are required but wasn't found "
|
||||
"in data.".format(sorted(missed_types_set)),
|
||||
log_message=True
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_names_for_dependency(components_names, dependency_name):
|
||||
if '*' in dependency_name:
|
||||
prefix = dependency_name.split('*', 1)[0]
|
||||
return set(name for name in components_names
|
||||
if name.startswith(prefix))
|
||||
return set(name for name in components_names
|
||||
if name == dependency_name)
|
||||
|
||||
@staticmethod
|
||||
def _get_component_type(component):
|
||||
return component['name'].split(':', 1)[0]
|
||||
|
|
Loading…
Reference in New Issue