Fuel UI
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

704 lines
27 KiB

  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014 Mirantis, Inc.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License"); you may
  5. # not use this file except in compliance with the License. You may obtain
  6. # a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  12. # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
  13. # License for the specific language governing permissions and limitations
  14. # under the License.
  15. import mock
  16. import yaml
  17. from nailgun import consts
  18. from nailgun import errors
  19. from nailgun.extensions.network_manager.serializers.base \
  20. import NetworkDeploymentSerializer
  21. from nailgun import objects
  22. from nailgun.orchestrator import orchestrator_graph
  23. from nailgun.orchestrator import tasks_serializer
  24. from nailgun.test import base
  25. def update_nodes_net_info(cluster, nodes):
  26. return nodes
  27. class BaseTaskSerializationTest(base.BaseTestCase):
  28. TASKS = """"""
  29. def setUp(self):
  30. super(BaseTaskSerializationTest, self).setUp()
  31. self.release = self.env.create_release(
  32. api=False)
  33. self.cluster = self.env.create_cluster(
  34. api=False, release_id=self.release.id)
  35. self.nodes = [
  36. self.env.create_node(
  37. roles=['controller'], cluster_id=self.cluster.id),
  38. self.env.create_node(
  39. roles=['controller'], primary_tags=['controller'],
  40. cluster_id=self.cluster.id),
  41. self.env.create_node(
  42. roles=['cinder', 'compute'], cluster_id=self.cluster.id)]
  43. self.all_uids = [n.uid for n in self.nodes]
  44. # imitate behaviour of old-style tasks merge where cluster-level
  45. # deployment graph is overriding all other graphs.
  46. dg = objects.DeploymentGraph.get_for_model(self.cluster.release)
  47. objects.DeploymentGraph.update(dg, {'tasks': []})
  48. dg = objects.DeploymentGraph.get_for_model(self.cluster)
  49. objects.DeploymentGraph.update(dg, {'tasks': yaml.load(self.TASKS)})
  50. class BaseTaskSerializationTestUbuntu(base.BaseTestCase):
  51. TASKS = """"""
  52. def setUp(self):
  53. super(BaseTaskSerializationTestUbuntu, self).setUp()
  54. self._requests_mock = mock.patch(
  55. 'nailgun.utils.debian.requests.get',
  56. return_value=mock.Mock(text='Archive: test'))
  57. self._requests_mock.start()
  58. self.release = self.env.create_release(
  59. api=False, attributes_metadata=self.env.read_fixtures(
  60. ['openstack'])[1]['fields']['attributes_metadata'])
  61. self.cluster = self.env.create_cluster(
  62. api=False, release_id=self.release.id)
  63. self.nodes = [
  64. self.env.create_node(
  65. roles=['controller'], cluster_id=self.cluster.id),
  66. self.env.create_node(
  67. roles=['controller'], cluster_id=self.cluster.id),
  68. self.env.create_node(
  69. roles=['cinder', 'compute'], cluster_id=self.cluster.id)]
  70. self.all_uids = [n.uid for n in self.nodes]
  71. dg = objects.DeploymentGraph.get_for_model(self.cluster)
  72. objects.DeploymentGraph.update(dg, {'tasks': yaml.load(self.TASKS)})
  73. def tearDown(self):
  74. self._requests_mock.stop()
  75. super(BaseTaskSerializationTestUbuntu, self).tearDown()
  76. class TestHooksSerializersUbuntu(BaseTaskSerializationTestUbuntu):
  77. def test_create_repo_ubuntu(self):
  78. task_config = {'id': 'upload_mos_repos',
  79. 'type': 'upload_file',
  80. 'role': '*'}
  81. self.cluster.release.operating_system = consts.RELEASE_OS.ubuntu
  82. task = tasks_serializer.UploadMOSRepo(
  83. task_config, self.cluster, self.nodes)
  84. serialized = list(task.serialize())
  85. self.assertEqual(len(serialized), 17)
  86. self.assertEqual(serialized[0]['type'], 'shell')
  87. self.assertEqual(
  88. serialized[0]['parameters']['cmd'], '> /etc/apt/sources.list')
  89. self.assertEqual(serialized[1]['type'], 'upload_file')
  90. self.assertEqual(serialized[2]['type'], 'upload_file')
  91. self.assertEqual(serialized[3]['type'], 'upload_file')
  92. self.assertEqual(serialized[4]['type'], 'upload_file')
  93. self.assertEqual(serialized[5]['type'], 'upload_file')
  94. self.assertEqual(serialized[6]['type'], 'upload_file')
  95. self.assertEqual(serialized[7]['type'], 'upload_file')
  96. self.assertEqual(serialized[8]['type'], 'upload_file')
  97. self.assertEqual(serialized[9]['type'], 'upload_file')
  98. self.assertEqual(serialized[10]['type'], 'upload_file')
  99. self.assertEqual(serialized[11]['type'], 'upload_file')
  100. self.assertEqual(serialized[12]['type'], 'upload_file')
  101. self.assertEqual(serialized[13]['type'], 'upload_file')
  102. self.assertEqual(serialized[14]['type'], 'upload_file')
  103. self.assertEqual(serialized[15]['type'], 'upload_file')
  104. self.assertEqual(serialized[16]['type'], 'shell')
  105. self.assertEqual(serialized[16]['parameters']['cmd'], 'apt-get update')
  106. self.assertItemsEqual(serialized[3]['uids'], self.all_uids)
  107. class TestHooksSerializers(BaseTaskSerializationTest):
  108. def test_sync_puppet(self):
  109. task_config = {'id': 'rsync_mos_puppet',
  110. 'type': 'sync',
  111. 'role': '*',
  112. 'parameters': {'src': '/etc/puppet/{OPENSTACK_VERSION}',
  113. 'dst': '/etc/puppet'}}
  114. task = tasks_serializer.RsyncPuppet(
  115. task_config, self.cluster, self.nodes)
  116. serialized = next(task.serialize())
  117. self.assertEqual(serialized['type'], 'sync')
  118. self.assertIn(
  119. self.cluster.release.version,
  120. serialized['parameters']['src'])
  121. def test_create_repo_centos(self):
  122. """Verify that repository is created with correct metadata."""
  123. task_config = {'id': 'upload_mos_repos',
  124. 'type': 'upload_file',
  125. 'role': '*'}
  126. self.cluster.release.operating_system = consts.RELEASE_OS.centos
  127. task = tasks_serializer.UploadMOSRepo(
  128. task_config, self.cluster, self.nodes)
  129. serialized = list(task.serialize())
  130. self.assertEqual(len(serialized), 5)
  131. self.assertEqual(serialized[0]['type'], 'upload_file')
  132. self.assertEqual(serialized[1]['type'], 'upload_file')
  133. self.assertEqual(serialized[2]['type'], 'upload_file')
  134. self.assertEqual(serialized[3]['type'], 'upload_file')
  135. self.assertEqual(serialized[4]['type'], 'shell')
  136. self.assertEqual(serialized[4]['parameters']['cmd'], 'yum clean all')
  137. self.assertItemsEqual(serialized[4]['uids'], self.all_uids)
  138. def test_serialize_rados_with_ceph(self):
  139. task_config = {'id': 'restart_radosgw',
  140. 'type': 'shell',
  141. 'role': ['controller', 'primary-controller'],
  142. 'stage': 'post-deployment',
  143. 'parameters': {'cmd': '/cmd.sh', 'timeout': 60}}
  144. self.nodes.append(self.env.create_node(
  145. roles=['ceph-osd'], cluster_id=self.cluster.id))
  146. task = tasks_serializer.RestartRadosGW(
  147. task_config, self.cluster, self.nodes)
  148. serialized = list(task.serialize())
  149. self.assertEqual(len(serialized), 1)
  150. self.assertEqual(serialized[0]['type'], 'shell')
  151. self.assertEqual(
  152. serialized[0]['parameters']['cmd'],
  153. task_config['parameters']['cmd'])
  154. def test_serialzize_rados_wo_ceph(self):
  155. task_config = {'id': 'restart_radosgw',
  156. 'type': 'shell',
  157. 'role': ['controller', 'primary-controller'],
  158. 'stage': 'post-deployment',
  159. 'parameters': {'cmd': '/cmd.sh', 'timeout': 60}}
  160. task = tasks_serializer.RestartRadosGW(
  161. task_config, self.cluster, self.nodes)
  162. self.assertFalse(task.should_execute())
  163. @mock.patch.object(NetworkDeploymentSerializer, 'update_nodes_net_info')
  164. @mock.patch.object(objects.Node, 'all_tags')
  165. def test_upload_nodes_info(self, m_roles, m_update_nodes):
  166. # mark one node as ready so we can test for duplicates
  167. self.env.nodes[0].status = consts.NODE_STATUSES.ready
  168. self.db.flush()
  169. # add one node that will not be deployed
  170. discovered_node = self.env.create_node(
  171. roles=['compute'], cluster_id=self.cluster.id,
  172. status=consts.NODE_STATUSES.discover)
  173. m_roles.return_value = ['role_1', ]
  174. m_update_nodes.side_effect = lambda cluster, nodes: nodes
  175. self.cluster.release.version = '2014.1.1-6.1'
  176. dst = '/some/path/file.yaml'
  177. task_config = {
  178. 'id': 'upload_nodes_info',
  179. 'type': 'upload_file',
  180. 'role': '*',
  181. 'parameters': {
  182. 'path': dst,
  183. },
  184. }
  185. task = tasks_serializer.UploadNodesInfo(
  186. task_config, self.cluster, self.nodes)
  187. serialized_tasks = list(task.serialize())
  188. self.assertEqual(len(serialized_tasks), 1)
  189. serialized_task = serialized_tasks[0]
  190. self.assertEqual(serialized_task['type'], 'upload_file')
  191. self.assertItemsEqual(serialized_task['uids'], self.all_uids)
  192. self.assertNotIn(discovered_node.uid, self.all_uids)
  193. self.assertEqual(serialized_task['parameters']['path'], dst)
  194. serialized_nodes = yaml.safe_load(
  195. serialized_task['parameters']['data'])
  196. serialized_uids = [n['uid'] for n in serialized_nodes['nodes']]
  197. self.assertItemsEqual(serialized_uids, self.all_uids)
  198. self.assertNotIn(discovered_node.uid, serialized_uids)
  199. def test_upload_configuration(self):
  200. task_config = {
  201. 'id': 'upload_configuration',
  202. 'type': 'upload_file',
  203. 'role': '*',
  204. }
  205. configs = [
  206. mock.Mock(config_type=consts.OPENSTACK_CONFIG_TYPES.cluster,
  207. configuration={'cluster': {'value': 'foo'}}),
  208. mock.Mock(config_type=consts.OPENSTACK_CONFIG_TYPES.role,
  209. node_role='compute',
  210. configuration={'compute': {'value': 'bar'}}),
  211. mock.Mock(config_type=consts.OPENSTACK_CONFIG_TYPES.role,
  212. node_role='cinder',
  213. configuration={'cinder': {'value': 'buzz'}}),
  214. mock.Mock(config_type=consts.OPENSTACK_CONFIG_TYPES.node,
  215. node_id=self.env.nodes[0].id,
  216. configuration={'node_0': {'value': 'quux'}})
  217. ]
  218. task = tasks_serializer.UploadConfiguration(
  219. task_config, self.cluster, self.nodes, configs)
  220. serialized_tasks = list(task.serialize())
  221. self.assertEqual(len(serialized_tasks), 5)
  222. cluster_uids = []
  223. role_uids = []
  224. node_uids = []
  225. for task in serialized_tasks:
  226. self.assertEqual('upload_file', task['type'])
  227. if '/cluster' in task['parameters']['path']:
  228. cluster_uids.extend(task['uids'])
  229. if '/role' in task['parameters']['path']:
  230. role_uids.extend(task['uids'])
  231. if '/node' in task['parameters']['path']:
  232. node_uids.extend(task['uids'])
  233. self.assertItemsEqual(self.all_uids, cluster_uids)
  234. self.assertItemsEqual([self.nodes[2].uid], role_uids)
  235. self.assertItemsEqual([self.nodes[0].uid], node_uids)
  236. def test_upload_configuration_merge_roles(self):
  237. task_config = {
  238. 'id': 'upload_configuration',
  239. 'type': 'upload_file',
  240. 'role': '*',
  241. }
  242. self.env.create_openstack_config(
  243. cluster_id=self.cluster.id,
  244. config_type=consts.OPENSTACK_CONFIG_TYPES.role,
  245. node_role='compute',
  246. configuration={
  247. 'nova_config': {
  248. 'DEFAULT/param_a': {'value': 'value_compute'},
  249. },
  250. 'keystone_config': {
  251. 'DEFAULT/param_a': {'value': 'value_compute'},
  252. }
  253. }),
  254. self.env.create_openstack_config(
  255. cluster_id=self.cluster.id,
  256. config_type=consts.OPENSTACK_CONFIG_TYPES.role,
  257. node_role='cinder',
  258. configuration={
  259. 'nova_config': {
  260. 'DEFAULT/param_b': {'value': 'value_cinder'}
  261. },
  262. 'keystone_config': {
  263. 'DEFAULT/param_a': {'value': 'value_cinder'},
  264. }
  265. })
  266. task = tasks_serializer.UploadConfiguration(
  267. task_config, self.cluster, self.nodes)
  268. serialized_task = next(task.serialize())
  269. config = yaml.safe_load(
  270. serialized_task['parameters']['data'])
  271. self.assertEqual(config, {
  272. 'configuration': {
  273. 'nova_config': {
  274. 'DEFAULT/param_a': {'value': 'value_compute'},
  275. 'DEFAULT/param_b': {'value': 'value_cinder'}
  276. },
  277. 'keystone_config': {
  278. 'DEFAULT/param_a': {'value': 'value_cinder'},
  279. }
  280. }})
  281. def test_update_hosts(self):
  282. # mark one node as ready so we can test for duplicates
  283. self.env.nodes[0].status = consts.NODE_STATUSES.ready
  284. self.db.flush()
  285. # add one node that will not be deployed
  286. discovered_node = self.env.create_node(
  287. roles=['compute'], cluster_id=self.cluster.id,
  288. status=consts.NODE_STATUSES.discover)
  289. task_config = {
  290. 'id': 'upload_nodes_info',
  291. 'type': 'puppet',
  292. 'role': '*',
  293. 'parameters': {
  294. 'puppet_manifest': '/puppet/modules/modular/hosts/hosts.pp',
  295. 'puppet_modules': '/puppet/modules',
  296. 'timeout': 3600,
  297. 'cwd': '/',
  298. },
  299. }
  300. task = tasks_serializer.UpdateHosts(
  301. task_config, self.cluster, self.nodes)
  302. serialized_tasks = list(task.serialize())
  303. self.assertEqual(len(serialized_tasks), 1)
  304. serialized_task = serialized_tasks[0]
  305. self.assertEqual(serialized_task['type'], 'puppet')
  306. self.assertItemsEqual(serialized_task['uids'], self.all_uids)
  307. self.assertNotIn(discovered_node.uid, self.all_uids)
  308. self.assertNotIn(discovered_node.uid, serialized_task['uids'])
  309. def test_copy_keys(self):
  310. task_config = {
  311. 'id': 'copy_keys',
  312. 'type': 'copy_files',
  313. 'role': '*',
  314. 'parameters': {
  315. 'files': [{
  316. 'src': '/var/www/nailgun/keys/{CLUSTER_ID}/nova.key',
  317. 'dst': '/var/lib/astute/nova.key'}],
  318. 'permissions': '0600',
  319. 'dir_permissions': '0700'}}
  320. task = tasks_serializer.CopyKeys(
  321. task_config, self.cluster, self.nodes)
  322. serialized = next(task.serialize())
  323. self.assertEqual(serialized['type'], 'copy_files')
  324. files = []
  325. files.append({
  326. 'src': '/var/www/nailgun/keys/{CLUSTER_ID}/nova.key'.
  327. format(CLUSTER_ID=self.cluster.id),
  328. 'dst': '/var/lib/astute/nova.key'})
  329. self.assertItemsEqual(
  330. files, serialized['parameters']['files'])
  331. def test_copy_keys_no_nodes(self):
  332. task_config = {
  333. 'id': 'copy_keys',
  334. 'type': 'copy_files',
  335. 'role': '*',
  336. 'parameters': {
  337. 'files': [{
  338. 'src': '/var/www/nailgun/keys/{CLUSTER_ID}/nova.key',
  339. 'dst': '/var/lib/astute/nova.key'}],
  340. 'permissions': '0600',
  341. 'dir_permissions': '0700'}}
  342. task = tasks_serializer.CopyKeys(
  343. task_config, self.cluster, [])
  344. with self.assertRaises(StopIteration):
  345. next(task.serialize())
  346. def test_generate_keys(self):
  347. task_config = {
  348. 'id': 'generate_keys',
  349. 'type': 'shell',
  350. 'role': 'master',
  351. 'parameters': {
  352. 'cmd': ("sh /etc/puppet/modules/osnailyfacter/modular/generate"
  353. "_keys.sh -i {CLUSTER_ID} -o 'mongodb' -s 'neutron nov"
  354. "a ceph mysql' -p /etc/fuel/keys/"),
  355. 'timeout': 180}}
  356. task = tasks_serializer.GenerateKeys(
  357. task_config, self.cluster, self.nodes)
  358. serialized = next(task.serialize())
  359. self.assertEqual(serialized['type'], 'shell')
  360. self.assertEqual(
  361. serialized['parameters']['cmd'],
  362. "sh /etc/puppet/modules/osnailyfacter/modular/generate_keys.sh -i "
  363. "{CLUSTER_ID} -o 'mongodb' -s 'neutron nova ceph mysql' -p "
  364. "/etc/fuel/keys/".format(CLUSTER_ID=self.cluster.id))
  365. def test_copy_keys_ceph(self):
  366. task_config = {
  367. 'id': 'copy_keys_ceph',
  368. 'type': 'copy_files',
  369. 'role': '*',
  370. 'parameters': {
  371. 'files': [{
  372. 'src': '/var/lib/fuel/keys/{CLUSTER_ID}/ceph/ceph.pub',
  373. 'dst': '/var/lib/astute/ceph/ceph.pub'}],
  374. 'permissions': '0600',
  375. 'dir_permissions': '0700'}}
  376. task = tasks_serializer.CopyCephKeys(
  377. task_config, self.cluster, self.nodes)
  378. serialized = next(task.serialize())
  379. self.assertEqual(serialized['type'], 'copy_files')
  380. files = []
  381. files.append({
  382. 'src': '/var/lib/fuel/keys/{CLUSTER_ID}/ceph/ceph.pub'.
  383. format(CLUSTER_ID=self.cluster.id),
  384. 'dst': '/var/lib/astute/ceph/ceph.pub'})
  385. self.assertItemsEqual(
  386. files, serialized['parameters']['files'])
  387. def test_generate_keys_ceph(self):
  388. task_config = {
  389. 'id': 'generate_keys_ceph',
  390. 'type': 'shell',
  391. 'role': 'master',
  392. 'parameters': {
  393. 'cmd': ("sh /etc/puppet/modules/osnailyfacter/modular/astute/"
  394. "generate_keys.sh -i {CLUSTER_ID} -s 'ceph' -p /var/"
  395. "lib/fuel/keys/"),
  396. 'timeout': 180}}
  397. task = tasks_serializer.GenerateCephKeys(
  398. task_config, self.cluster, self.nodes)
  399. serialized = next(task.serialize())
  400. self.assertEqual(serialized['type'], 'shell')
  401. self.assertEqual(
  402. serialized['parameters']['cmd'],
  403. "sh /etc/puppet/modules/osnailyfacter/modular/astute/"
  404. "generate_keys.sh -i {CLUSTER_ID} -s 'ceph' -p /var/"
  405. "lib/fuel/keys/".format(CLUSTER_ID=self.cluster.id))
  406. def test_generate_haproxy_keys(self):
  407. cmd_template = "sh /etc/puppet/modules/osnailyfacter/modular/" \
  408. "astute/generate_haproxy_keys.sh -i {CLUSTER_ID} " \
  409. "-h {CN_HOSTNAME} -o 'haproxy' -p /var/lib/fuel/keys/"
  410. task_config = {
  411. 'id': 'generate_haproxy_keys',
  412. 'type': 'shell',
  413. 'role': 'master',
  414. 'parameters': {
  415. 'cmd': cmd_template,
  416. 'timeout': 180}}
  417. task = tasks_serializer.GenerateHaproxyKeys(
  418. task_config, self.cluster, self.nodes)
  419. serialized = next(task.serialize())
  420. self.assertEqual(serialized['type'], 'shell')
  421. editable = self.cluster.attributes.editable
  422. hostname = editable['public_ssl']['hostname']['value']
  423. expected_cmd = cmd_template.format(
  424. CLUSTER_ID=self.cluster.id, CN_HOSTNAME=hostname)
  425. self.assertEqual(expected_cmd, serialized['parameters']['cmd'])
  426. def test_serialize_ironic_upload_images(self):
  427. task_config = {'id': 'ironic_upload_images',
  428. 'type': 'shell',
  429. 'role': ['primary-controller'],
  430. 'stage': 'post-deployment',
  431. 'parameters': {'cmd': '{CLUSTER_ID}', 'timeout': 60}}
  432. task = tasks_serializer.IronicUploadImages(
  433. task_config, self.cluster, self.nodes)
  434. serialized = list(task.serialize())
  435. self.assertEqual(len(serialized), 1)
  436. self.assertEqual(serialized[0]['parameters']['cmd'],
  437. str(self.cluster.id))
  438. new_node = self.env.create_node(
  439. roles=['ironic'], cluster_id=self.cluster.id)
  440. task = tasks_serializer.IronicUploadImages(
  441. task_config, self.cluster, [new_node])
  442. serialized = list(task.serialize())
  443. self.assertEqual(len(serialized), 0)
  444. def test_serialize_ironic_copy_bootstrap_key(self):
  445. task_config = {'id': 'ironic_copy_bootstrap_key',
  446. 'type': 'copy_files',
  447. 'role': ['ironic'],
  448. 'stage': 'post-deployment',
  449. 'parameters': {
  450. 'files': [{'src': '/1', 'dst': '/2'}],
  451. 'permissions': '0600',
  452. 'dir_permissions': '0700'}}
  453. task = tasks_serializer.IronicCopyBootstrapKey(
  454. task_config, self.cluster, self.nodes)
  455. self.assertFalse(task.should_execute())
  456. new_node = self.env.create_node(
  457. roles=['ironic'], cluster_id=self.cluster.id)
  458. task = tasks_serializer.IronicCopyBootstrapKey(
  459. task_config, self.cluster, [new_node])
  460. self.assertTrue(task.should_execute())
  461. class TestPreTaskSerialization(BaseTaskSerializationTestUbuntu):
  462. TASKS = ("""
  463. - id: pre_deployment_start
  464. type: stage
  465. - id: pre_deployment
  466. type: stage
  467. requires: [pre_deployment_start]
  468. - id: deploy_start
  469. type: stage
  470. requires: [pre_deployment]
  471. - id: upload_core_repos
  472. type: upload_file
  473. role: '*'
  474. required_for: [pre_deployment]
  475. requires: [pre_deployment_start]
  476. - id: rsync_core_puppet
  477. type: sync
  478. role: '*'
  479. required_for: [pre_deployment]
  480. requires: [upload_core_repos]
  481. parameters:
  482. src: /etc/puppet/{OPENSTACK_VERSION}/
  483. dst: /etc/puppet
  484. timeout: 180
  485. - id: copy_keys
  486. type: copy_files
  487. role: '*'
  488. required_for: [pre_deployment]
  489. requires: [generate_keys]
  490. parameters:
  491. files:
  492. - src: '{CLUSTER_ID}/nova.key'
  493. dst: 'nova.key'
  494. permissions: 0600
  495. dir_permissions: 0700
  496. - id: generate_keys
  497. type: shell
  498. role: 'master'
  499. requires: [pre_deployment_start]
  500. parameters:
  501. cmd: shorted_command
  502. timeout: 180
  503. """)
  504. def test_tasks_serialized_correctly(self):
  505. self.graph = orchestrator_graph.AstuteGraph(self.cluster)
  506. self.cluster.release.operating_system = consts.RELEASE_OS.ubuntu
  507. tasks = self.graph.pre_tasks_serialize(self.nodes)
  508. self.assertEqual(len(tasks), 20)
  509. tasks_tests = [('shell', ['master']),
  510. ('shell', sorted(self.all_uids)),
  511. ('upload_file', sorted(self.all_uids)),
  512. ('upload_file', sorted(self.all_uids)),
  513. ('upload_file', sorted(self.all_uids)),
  514. ('upload_file', sorted(self.all_uids)),
  515. ('upload_file', sorted(self.all_uids)),
  516. ('upload_file', sorted(self.all_uids)),
  517. ('upload_file', sorted(self.all_uids)),
  518. ('upload_file', sorted(self.all_uids)),
  519. ('upload_file', sorted(self.all_uids)),
  520. ('upload_file', sorted(self.all_uids)),
  521. ('upload_file', sorted(self.all_uids)),
  522. ('upload_file', sorted(self.all_uids)),
  523. ('upload_file', sorted(self.all_uids)),
  524. ('upload_file', sorted(self.all_uids)),
  525. ('upload_file', sorted(self.all_uids)),
  526. ('copy_files', sorted(self.all_uids)),
  527. ('sync', sorted(self.all_uids)),
  528. ('shell', sorted(self.all_uids))]
  529. tasks_output = []
  530. for task in tasks:
  531. tasks_output.append((task['type'], sorted(task['uids'])))
  532. self.assertItemsEqual(tasks_tests, tasks_output)
  533. class TestPostTaskSerialization(BaseTaskSerializationTest):
  534. TASKS = """
  535. - id: deploy_end
  536. type: stage
  537. - id: post_deployment_start
  538. type: stage
  539. requires: [deploy_end]
  540. - id: post_deployment
  541. type: stage
  542. requires: [post_deployment_start]
  543. - id: restart_radosgw
  544. type: shell
  545. role: [controller, primary-controller]
  546. required_for: [post_deployment]
  547. requires: [post_deployment_start]
  548. parameters:
  549. cmd: /etc/puppet/restart_radosgw.sh
  550. timeout: 180
  551. """
  552. def setUp(self):
  553. super(TestPostTaskSerialization, self).setUp()
  554. self.control_uids = [n.uid for n in self.nodes
  555. if 'controller' in n.roles]
  556. self.graph = orchestrator_graph.AstuteGraph(self.cluster)
  557. def test_post_task_serialize_all_tasks(self):
  558. self.nodes.append(self.env.create_node(
  559. roles=['ceph-osd'], cluster_id=self.cluster.id))
  560. tasks = self.graph.post_tasks_serialize(self.nodes)
  561. self.assertEqual(len(tasks), 1)
  562. self.assertItemsEqual(tasks[0]['uids'], self.control_uids)
  563. self.assertEqual(tasks[0]['type'], 'shell')
  564. class TestConditionalTasksSerializers(BaseTaskSerializationTest):
  565. TASKS = """
  566. - id: pre_deployment_start
  567. type: stage
  568. - id: pre_deployment
  569. type: stage
  570. requires: [pre_deployment_start]
  571. - id: deploy_start
  572. type: stage
  573. requires: [pre_deployment]
  574. - id: generic_uid
  575. type: upload_file
  576. role: '*'
  577. requires: [pre_deployment_start]
  578. condition: cluster:status == 'operational'
  579. parameters:
  580. cmd: /tmp/bash_script.sh
  581. timeout: 180
  582. - id: generic_second_task
  583. type: sync
  584. role: '*'
  585. requires: [generic_uid]
  586. required_for: [pre_deployment]
  587. condition: settings:enabled
  588. parameters:
  589. cmd: /tmp/bash_script.sh
  590. timeout: 180
  591. """
  592. def setUp(self):
  593. super(TestConditionalTasksSerializers, self).setUp()
  594. self.graph = orchestrator_graph.AstuteGraph(self.cluster)
  595. def test_conditions_satisfied(self):
  596. self.cluster.status = 'operational'
  597. self.cluster.attributes.editable = {'enabled': True}
  598. self.db.flush()
  599. tasks = self.graph.pre_tasks_serialize(self.nodes)
  600. self.assertEqual(len(tasks), 2)
  601. self.assertEqual(tasks[0]['type'], 'upload_file')
  602. self.assertEqual(tasks[1]['type'], 'sync')
  603. def test_conditions_not_satisfied(self):
  604. self.cluster.status = 'new'
  605. self.cluster.attributes.editable = {'enabled': False}
  606. self.db.flush()
  607. tasks = self.graph.pre_tasks_serialize(self.nodes)
  608. self.assertEqual(len(tasks), 0)
  609. class TestSerializationIsNotSupportedError(base.BaseTestCase):
  610. def test_error_is_raised(self):
  611. task_type = 'fake_type'
  612. task = {'id': 'fake_task', 'type': task_type}
  613. ts = tasks_serializer.TaskSerializers()
  614. err_msg = 'Serialization of type {0} is not supported.'\
  615. .format(task_type)
  616. with self.assertRaises(errors.SerializerNotSupported) as exc:
  617. ts.get_deploy_serializer(task)
  618. self.assertIn(err_msg, exc.exception.message)