Add plugin deployment tasks to deployment graph

`get_deployment_tasks` for nailgun cluster object modified so that it
now by default (if deployment graph was not defined for the cluster)
returns summary list of deployment task for release and all enabled for
the cluster plugins.

Change-Id: I4e0b063a80a6d94bfb792b80cd6880a2a471f135
Implements: blueprint role-as-a-plugin
This commit is contained in:
Artem Roma 2015-07-06 11:57:26 +03:00
parent bb362555db
commit 86e90616dc
11 changed files with 403 additions and 13 deletions

View File

@ -91,6 +91,8 @@ default_messages = {
# Plugin errors
"PackageVersionIsNotCompatible": "Package version is not compatible",
"PluginsTasksOverlapping":
"There is task with same id supplied by another plugin",
# unknown
"UnknownError": "Unknown error"

View File

@ -788,14 +788,19 @@ class Cluster(NailgunObject):
- if there is deployment_graph defined by user - use it instead of
defined
- if instance assigned for patching - return custom patching graph
- else return default for release deployment graph
- else return default for release and enabled plugins
deployment graph
"""
if instance.deployment_tasks:
return instance.deployment_tasks
elif instance.pending_release_id:
return yaml.load(graph_configuration.PATCHING)
else:
return Release.get_deployment_tasks(instance.release)
release_deployment_tasks = \
Release.get_deployment_tasks(instance.release)
plugin_deployment_tasks = \
PluginManager.get_plugins_deployment_tasks(instance)
return release_deployment_tasks + plugin_deployment_tasks
@classmethod
def create_vmware_attributes(cls, instance):

View File

@ -177,6 +177,8 @@ class Release(NailgunObject):
elif env_version.startswith('5.1') or env_version.startswith('6.0'):
return yaml.load(graph_configuration.DEPLOYMENT_51_60)
return []
@classmethod
def get_min_controller_count(cls, instance):
return instance.roles_metadata['controller']['limits']['min']

View File

@ -134,7 +134,11 @@ class PluginsPreDeploymentHooksSerializer(BasePluginDeploymentHooksSerializer):
repo_tasks = []
for plugin in plugins:
uids = get_uids_for_tasks(self.nodes, plugin.tasks)
# TODO(aroma): remove this concatenation when unified way of
# processing will be introduced for deployment tasks and existing
# plugin tasks
tasks_of_plugin = plugin.tasks + plugin.deployment_tasks
uids = get_uids_for_tasks(self.nodes, tasks_of_plugin)
# If there are no nodes for tasks execution
# or if there are no files in repository
@ -177,7 +181,11 @@ class PluginsPreDeploymentHooksSerializer(BasePluginDeploymentHooksSerializer):
def sync_scripts(self, plugins):
tasks = []
for plugin in plugins:
uids = get_uids_for_tasks(self.nodes, plugin.tasks)
# TODO(aroma): remove this concatenation when unified way of
# processing will be introduced for deployment tasks and existing
# plugin tasks
tasks_of_plugin = plugin.tasks + plugin.deployment_tasks
uids = get_uids_for_tasks(self.nodes, tasks_of_plugin)
if not uids:
continue
tasks.append(

View File

@ -37,15 +37,22 @@ def get_uids_for_tasks(nodes, tasks):
"""
roles = []
for task in tasks:
if task['role'] == consts.ALL_ROLES:
# plugin tasks may store information about node
# role not only in `role` key but also in `groups`
task_role = task.get('role', task.get('groups'))
if task_role == consts.ALL_ROLES:
return get_uids_for_roles(nodes, consts.ALL_ROLES)
elif task['role'] == consts.MASTER_ROLE:
elif task_role == consts.MASTER_ROLE:
return ['master']
elif isinstance(task['role'], list):
roles.extend(task['role'])
else:
elif isinstance(task_role, list):
roles.extend(task_role)
# if task has 'skipped' status it is allowed that 'roles' and
# 'groups' are not be specified
elif task['type'] != consts.ORCHESTRATOR_TASK_TYPES.skipped:
logger.warn(
'Wrong roles format, `roles` should be a list or "*" in %s',
'Wrong roles format in task %s: either '
'`roles` or `groups` must be specified and contain '
'a list of roles or "*"',
task)
return get_uids_for_roles(nodes, roles)

View File

@ -160,7 +160,7 @@ class PluginAdapterBase(object):
u'weight': 70, u'label': self.plugin.title,
'plugin_id': self.plugin.id}
def set_cluster_tasks(self, cluster):
def set_cluster_tasks(self):
"""Loads plugins provided tasks from tasks config file and
sets them to instance tasks variable.
"""
@ -192,6 +192,14 @@ class PluginAdapterBase(object):
return settings.PLUGINS_SLAVES_SCRIPTS_PATH.format(
plugin_name=self.path_name)
@property
def deployment_tasks(self):
deployment_tasks = []
for task in self.plugin.deployment_tasks:
task['parameters'].setdefault('cwd', self.slaves_scripts_path)
deployment_tasks.append(task)
return deployment_tasks
def get_release_info(self, release):
"""Returns plugin release information which corresponds to
a provided release.

View File

@ -13,7 +13,9 @@
# under the License.
import six
from six.moves import map
from nailgun.errors import errors
from nailgun.logger import logger
from nailgun.objects.plugin import Plugin
from nailgun.objects.plugin import PluginCollection
@ -76,7 +78,7 @@ class PluginManager(object):
cluster_plugins = []
for plugin_db in cluster.plugins:
plugin_adapter = wrap_plugin(plugin_db)
plugin_adapter.set_cluster_tasks(cluster)
plugin_adapter.set_cluster_tasks()
cluster_plugins.append(plugin_adapter)
return cluster_plugins
@ -93,3 +95,29 @@ class PluginManager(object):
for plugin in plugins:
plugin_adapter = wrap_plugin(plugin)
plugin_adapter.sync_metadata_to_db()
@classmethod
def get_plugins_deployment_tasks(cls, cluster):
deployment_tasks = []
processed_tasks = {}
for plugin_adapter in map(wrap_plugin, cluster.plugins):
depl_tasks = plugin_adapter.deployment_tasks
for t in depl_tasks:
t_id = t['id']
if t_id in processed_tasks:
raise errors.PluginsTasksOverlapping(
'Plugin {0} is overlapping with plugin {1} '
'by introducing the same deployment task with '
'id {2}'
.format(plugin_adapter.full_name,
processed_tasks[t_id],
t_id)
)
processed_tasks[t_id] = plugin_adapter.full_name
deployment_tasks.extend(depl_tasks)
return deployment_tasks

View File

@ -24,6 +24,8 @@ from nailgun import objects
from nailgun.orchestrator.deployment_graph import AstuteGraph
from nailgun.orchestrator.deployment_serializers import \
get_serializer_for_cluster
from nailgun.orchestrator import stages
from nailgun.test import base
from nailgun.test.integration.test_orchestrator_serializer import \
BaseDeploymentSerializer
@ -309,3 +311,264 @@ class TestDeploymentSerializationForNovaNetwork70(BaseDeploymentSerializer):
def test_generate_vmware_attributes_data(self):
self.check_generate_vmware_attributes_data()
class TestPluginDeploymentTasksInjection(base.BaseIntegrationTest):
release_deployment_tasks = [
{'id': 'pre_deployment_start',
'type': 'stage'},
{'id': 'pre_deployment_end',
'type': 'stage',
'requires': ['pre_deployment_start']},
{'id': 'deploy_start',
'type': 'stage'},
{'id': 'deploy_end',
'requires': ['deploy_start'],
'type': 'stage'},
{'id': 'post_deployment_start',
'type': 'stage',
'requires': ['deploy_end']},
{'id': 'post_deployment_end',
'type': 'stage',
'requires': ['post_deployment_start']},
{'id': 'primary-controller',
'parameters': {'strategy': {'type': 'one_by_one'}},
'required_for': ['deploy_end'],
'requires': ['deploy_start'],
'role': ['primary-controller'],
'type': 'group'},
{'id': 'first-fake-depl-task',
'required_for': ['deploy_end'],
'requires': ['deploy_start'],
'type': 'puppet',
'parameters': {'puppet_manifest': 'first-fake-depl-task',
'puppet_modules': 'test',
'timeout': 0},
'groups': ['primary-controller']},
{'id': 'second-fake-depl-task',
'required_for': ['deploy_end'],
'requires': ['deploy_start'],
'type': 'puppet',
'parameters': {'puppet_manifest': 'second-fake-depl-task',
'puppet_modules': 'test',
'timeout': 0},
'groups': ['primary-controller']},
]
def setUp(self):
super(TestPluginDeploymentTasksInjection, self).setUp()
self.cluster = self._prepare_cluster()
def _prepare_cluster(self):
self.env.create(
release_kwargs={
'version': '2015.1.0-7.0',
'deployment_tasks': self.release_deployment_tasks,
},
cluster_kwargs={
'mode': 'ha_compact',
'net_provider': 'neutron',
'net_segment_type': 'vlan',
},
nodes_kwargs=[
{'roles': ['controller'], 'primary_roles': ['controller'],
'pending_addition': True}
]
)
return self.env.clusters[0]
def prepare_plugins_for_cluster(self, cluster, plugins_kw_list):
plugins = [
self._create_plugin(**kw)
for kw in plugins_kw_list
]
cluster.plugins.extend(plugins)
self.db.flush()
def _create_plugin(self, **plugin_kwargs):
plugin_kwargs.update(
{
'releases': [
{
'repository_path': 'plugin_test',
'version': self.cluster.release.version,
'os':
self.cluster.release.operating_system.lower(),
'mode': ['ha', 'multinode'],
'deployment_scripts_path': 'plugin_test/'
},
],
}
)
plugin_data = self.env.get_default_plugin_metadata(
**plugin_kwargs
)
return objects.Plugin.create(plugin_data)
def _check_pre_deployment_tasks(self, serialized, task_type):
self.assertTrue(serialized)
needed_task = next(
t for t in serialized
if t['type'] == task_type)
self.assertIsNotNone(needed_task)
self.assertIsNotNone(needed_task.get('parameters'))
self.assertItemsEqual(
(n.uid for n in self.cluster.nodes),
needed_task['uids']
)
def test_plugin_depl_tasks_proper_injections(self):
self.prepare_plugins_for_cluster(
self.cluster,
[
{
'name': 'between_rel_tasks',
'deployment_tasks': [
{
'id': 'between-rel-tasks',
'type': 'puppet',
'groups': ['primary-controller'],
'requires': ['first-fake-depl-task'],
'required_for': ['second-fake-depl-task'],
'parameters': {
'puppet_manifest': 'between-rel-tasks',
'puppet_modules': 'test',
'timeout': 0,
}
},
],
},
]
)
graph = AstuteGraph(self.cluster)
objects.NodeCollection.prepare_for_deployment(self.cluster.nodes)
serializer = \
get_serializer_for_cluster(self.cluster)(graph)
serialized = serializer.serialize(self.cluster, self.cluster.nodes)
serialized_tasks = serialized[0]['tasks']
expected_priority = {
100: 'first-fake-depl-task',
200: 'between-rel-tasks',
300: 'second-fake-depl-task',
}
for task in serialized_tasks:
task_identificator = task['parameters']['puppet_manifest']
self.assertEqual(
task_identificator, expected_priority[task['priority']]
)
def test_plugin_depl_task_overwrite_from_rel(self):
self.prepare_plugins_for_cluster(
self.cluster,
[
{
'name': 'between_rel_tasks',
'deployment_tasks': [
{
'id': 'first-fake-depl-task',
'type': 'puppet',
'groups': ['primary-controller'],
'requires': ['deploy_start'],
'required_for': ['second-fake-depl-task'],
'parameters': {
'puppet_manifest': 'plugin_task',
'puppet_modules': 'test',
'timeout': 0,
}
},
],
},
]
)
graph = AstuteGraph(self.cluster)
objects.NodeCollection.prepare_for_deployment(self.cluster.nodes)
serializer = \
get_serializer_for_cluster(self.cluster)(graph)
serialized = serializer.serialize(self.cluster, self.cluster.nodes)
serialized_tasks = serialized[0]['tasks']
needed_task_priority = next(
t['priority'] for t in serialized_tasks
if t['parameters']['puppet_manifest'] == 'plugin_task'
)
# first task in graph has priority equal 100
self.assertEqual(needed_task_priority, 100)
def test_plugin_depl_task_in_pre_depl(self):
self.prepare_plugins_for_cluster(
self.cluster,
[
{
'name': 'pre_depl_plugin_task',
'deployment_tasks': [
{
'id': 'pre-depl-plugin-task',
'type': 'puppet',
'role': ['primary-controller'],
'requires': ['pre_deployment_start'],
'required_for': ['pre_deployment_end'],
'parameters': {
'puppet_manifest': 'pre_depl_plugin_task',
'puppet_modules': 'test',
'timeout': 0,
}
},
],
},
]
)
graph = AstuteGraph(self.cluster)
objects.NodeCollection.prepare_for_deployment(self.cluster.nodes)
with mock.patch('nailgun.plugins.adapters.glob.glob',
mock.Mock(return_value='path/to/test/repos')):
pre_deployment = stages.pre_deployment_serialize(
graph, self.cluster, self.cluster.nodes)
for task_type in (consts.ORCHESTRATOR_TASK_TYPES.sync,
consts.ORCHESTRATOR_TASK_TYPES.upload_file):
self._check_pre_deployment_tasks(pre_deployment, task_type)
def test_plugin_depl_task_in_post_depl(self):
self.prepare_plugins_for_cluster(
self.cluster,
[
{
'name': 'post-depl-plugin-task',
'deployment_tasks': [
{
'id': 'post-depl-plugin-task',
'type': 'puppet',
'role': ['primary-controller'],
'requires': ['post_deployment_start'],
'required_for': ['post_deployment_end'],
'parameters': {
'puppet_manifest': 'post_depl_plugin_task',
'puppet_modules': 'test',
'timeout': 0,
}
},
],
},
]
)
graph = AstuteGraph(self.cluster)
objects.NodeCollection.prepare_for_deployment(self.cluster.nodes)
post_deployment = stages.post_deployment_serialize(
graph, self.cluster, self.cluster.nodes)
self.assertEqual(
post_deployment[0]['parameters']['puppet_manifest'],
'post_depl_plugin_task'
)

View File

@ -743,6 +743,14 @@ class TestClusterObject(BaseTestCase):
{'roles': ['compute']},
{'roles': ['cinder']}])
def _create_cluster_with_plugins(self, plugins_kw_list):
cluster = self.env.create_cluster(api=False)
for kw in plugins_kw_list:
cluster.plugins.append(objects.Plugin.create(kw))
return cluster
def test_all_controllers(self):
self.assertEqual(len(objects.Cluster.get_nodes_by_role(
self.env.clusters[0], 'controller')), 2)
@ -780,6 +788,46 @@ class TestClusterObject(BaseTestCase):
objects.Cluster.get_network_roles(cluster),
cluster.release.network_roles_metadata)
def test_get_deployment_tasks(self):
deployment_tasks = self.env.get_default_plugin_deployment_tasks()
plugin_metadata = self.env.get_default_plugin_metadata(
deployment_tasks=deployment_tasks
)
cluster = self._create_cluster_with_plugins([plugin_metadata])
cluster_deployment_tasks = \
objects.Cluster.get_deployment_tasks(cluster)
tasks_ids = [t['id'] for t in cluster_deployment_tasks]
depl_task_id = deployment_tasks[0]['id']
self.assertIn(depl_task_id, tasks_ids)
default_tasks_count = len(cluster.release.deployment_tasks)
self.assertEqual(len(cluster_deployment_tasks),
default_tasks_count +
len(cluster.plugins[0].deployment_tasks))
def test_get_deployment_tasks_overlapping_error(self):
deployment_tasks = self.env.get_default_plugin_deployment_tasks()
plugins_kw_list = [
self.env.get_default_plugin_metadata(
name=plugin_name,
deployment_tasks=deployment_tasks)
for plugin_name in ('test_plugin_first', 'test_plugin_second')
]
cluster = self._create_cluster_with_plugins(plugins_kw_list)
expected_message = (
'Plugin test_plugin_second-0.1.0 is overlapping with plugin '
'test_plugin_first-0.1.0 by introducing the same '
'deployment task with id role-name'
)
with self.assertRaisesRegexp(errors.PluginsTasksOverlapping,
expected_message):
objects.Cluster.get_deployment_tasks(cluster)
class TestClusterObjectGetNetworkManager(BaseTestCase):
def setUp(self):

View File

@ -136,6 +136,23 @@ class TestPluginBase(base.BaseTestCase):
self.assertEqual(
getattr(self.plugin, key), val)
def test_get_deployment_tasks(self):
self.plugin.deployment_tasks = \
self.env.get_default_plugin_deployment_tasks()
depl_task = self.plugin_adapter.deployment_tasks[0]
self.assertEqual(depl_task['parameters'].get('cwd'),
self.plugin_adapter.slaves_scripts_path)
def test_get_deployment_tasks_params_not_changed(self):
expected = 'path/to/some/dir'
self.plugin.deployment_tasks = \
self.env.get_default_plugin_deployment_tasks(
parameters={'cwd': expected}
)
depl_task = self.plugin_adapter.deployment_tasks[0]
self.assertEqual(depl_task['parameters'].get('cwd'), expected)
class TestPluginV1(TestPluginBase):

View File

@ -197,7 +197,9 @@ class TestPluginsPreDeploymentHooksSerializer(base.BaseTestCase):
self.hook = PluginsPreDeploymentHooksSerializer(
self.cluster,
self.nodes)
self.plugins = [mock.Mock()]
plugin = mock.Mock(tasks=[], deployment_tasks=[])
self.plugins = [plugin]
@mock.patch(
'nailgun.orchestrator.plugins_serializers.get_uids_for_tasks',