Don't use self.env.clusters[0] in tests where possible

Since self.env.create always return db object now,
we can use this returned value instead of
self.env.clusters list.

It's a refactoring, so no bug or blueprint.

Change-Id: If7c84cb7124bcf08ef5ff110542012564190fae1
This commit is contained in:
Dmitry Guryanov 2016-04-19 18:07:31 +03:00
parent 93eb8fec2a
commit 9eeaa679d1
74 changed files with 434 additions and 659 deletions

View File

@ -77,12 +77,10 @@ class TestNodeReassignHandler(base.BaseIntegrationTest):
@mock.patch('nailgun.task.task.rpc.cast') @mock.patch('nailgun.task.task.rpc.cast')
def test_node_reassign_handler(self, mcast): def test_node_reassign_handler(self, mcast):
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'status': consts.NODE_STATUSES.ready}]) nodes_kwargs=[{'status': consts.NODE_STATUSES.ready}])
self.env.create_cluster() seed_cluster = self.env.create_cluster()
cluster = self.env.clusters[0]
seed_cluster = self.env.clusters[1]
node_id = cluster.nodes[0]['id'] node_id = cluster.nodes[0]['id']
resp = self.app.post( resp = self.app.post(
@ -144,9 +142,7 @@ class TestNodeReassignHandler(base.BaseIntegrationTest):
self.assertEqual(node.roles, ['compute']) self.assertEqual(node.roles, ['compute'])
def test_node_reassign_handler_no_node(self): def test_node_reassign_handler_no_node(self):
self.env.create_cluster() cluster = self.env.create_cluster()
cluster = self.env.clusters[0]
resp = self.app.post( resp = self.app.post(
reverse('NodeReassignHandler', reverse('NodeReassignHandler',
@ -159,10 +155,9 @@ class TestNodeReassignHandler(base.BaseIntegrationTest):
resp.json_body['message']) resp.json_body['message'])
def test_node_reassing_handler_wrong_status(self): def test_node_reassing_handler_wrong_status(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'status': 'discover'}]) nodes_kwargs=[{'status': 'discover'}])
cluster = self.env.clusters[0]
resp = self.app.post( resp = self.app.post(
reverse('NodeReassignHandler', reverse('NodeReassignHandler',
@ -175,11 +170,10 @@ class TestNodeReassignHandler(base.BaseIntegrationTest):
"^Node should be in one of statuses:") "^Node should be in one of statuses:")
def test_node_reassing_handler_wrong_error_type(self): def test_node_reassing_handler_wrong_error_type(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'status': 'error', nodes_kwargs=[{'status': 'error',
'error_type': 'provision'}]) 'error_type': 'provision'}])
cluster = self.env.clusters[0]
resp = self.app.post( resp = self.app.post(
reverse('NodeReassignHandler', reverse('NodeReassignHandler',
@ -192,10 +186,9 @@ class TestNodeReassignHandler(base.BaseIntegrationTest):
"^Node should be in error state") "^Node should be in error state")
def test_node_reassign_handler_to_the_same_cluster(self): def test_node_reassign_handler_to_the_same_cluster(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'status': 'ready'}]) nodes_kwargs=[{'status': 'ready'}])
cluster = self.env.clusters[0]
cluster_id = cluster['id'] cluster_id = cluster['id']
node_id = cluster.nodes[0]['id'] node_id = cluster.nodes[0]['id']

View File

@ -237,7 +237,7 @@ class TestNodeDisksHandlers(BaseIntegrationTest):
self.get_vgs(modified_roles_response)) self.get_vgs(modified_roles_response))
def test_volumes_update_after_roles_assignment(self): def test_volumes_update_after_roles_assignment(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"cluster_id": None} {"cluster_id": None}
] ]
@ -255,7 +255,7 @@ class TestNodeDisksHandlers(BaseIntegrationTest):
self.app.post( self.app.post(
reverse( reverse(
'NodeAssignmentHandler', 'NodeAssignmentHandler',
kwargs={'cluster_id': self.env.clusters[0].id} kwargs={'cluster_id': cluster.id}
), ),
jsonutils.dumps(assignment_data), jsonutils.dumps(assignment_data),
headers=self.default_headers headers=self.default_headers

View File

@ -51,7 +51,7 @@ class TestActionLogs(BaseMasterNodeSettignsTest):
@fake_tasks() @fake_tasks()
def test_only_utc_datetime_used(self): def test_only_utc_datetime_used(self):
start_dt = datetime.datetime.utcnow() start_dt = datetime.datetime.utcnow()
self.env.create( cluster = self.env.create(
api=True, api=True,
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
@ -62,7 +62,6 @@ class TestActionLogs(BaseMasterNodeSettignsTest):
task = self.env.launch_deployment() task = self.env.launch_deployment()
self.assertEqual(task.status, consts.TASK_STATUSES.ready) self.assertEqual(task.status, consts.TASK_STATUSES.ready)
cluster = self.env.clusters[0]
self.app.delete( self.app.delete(
reverse('ClusterHandler', kwargs={'obj_id': cluster.id}), reverse('ClusterHandler', kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
@ -80,7 +79,7 @@ class TestActionLogs(BaseMasterNodeSettignsTest):
@fake_tasks() @fake_tasks()
def test_all_action_logs_types_saved(self): def test_all_action_logs_types_saved(self):
# Creating nailgun_tasks # Creating nailgun_tasks
self.env.create( cluster = self.env.create(
api=True, api=True,
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
@ -92,7 +91,6 @@ class TestActionLogs(BaseMasterNodeSettignsTest):
self.assertEqual(task.status, consts.TASK_STATUSES.ready) self.assertEqual(task.status, consts.TASK_STATUSES.ready)
# Creating http_request # Creating http_request
cluster = self.env.clusters[0]
self.app.delete( self.app.delete(
reverse('ClusterHandler', kwargs={'obj_id': cluster.id}), reverse('ClusterHandler', kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers

View File

@ -30,7 +30,6 @@ class TestClusterAttributes(BaseIntegrationTest):
def test_attributes_creation(self): def test_attributes_creation(self):
cluster = self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0]
resp = self.app.get( resp = self.app.get(
reverse( reverse(
'ClusterAttributesHandler', 'ClusterAttributesHandler',
@ -42,13 +41,13 @@ class TestClusterAttributes(BaseIntegrationTest):
self._compare_editable( self._compare_editable(
release.attributes_metadata['editable'], release.attributes_metadata['editable'],
resp.json_body['editable'], resp.json_body['editable'],
cluster_db cluster
) )
attrs = objects.Cluster.get_attributes(cluster_db) attrs = objects.Cluster.get_attributes(cluster)
self._compare_generated( self._compare_generated(
release.attributes_metadata['generated'], release.attributes_metadata['generated'],
attrs['generated'], attrs['generated'],
cluster_db cluster
) )
def test_500_if_no_attributes(self): def test_500_if_no_attributes(self):
@ -70,8 +69,8 @@ class TestClusterAttributes(BaseIntegrationTest):
self.assertEqual(500, resp.status_code) self.assertEqual(500, resp.status_code)
def test_attributes_update_put(self): def test_attributes_update_put(self):
cluster_id = self.env.create_cluster(api=True)['id'] cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0] cluster_id = cluster['id']
resp = self.app.get( resp = self.app.get(
reverse( reverse(
'ClusterAttributesHandler', 'ClusterAttributesHandler',
@ -91,7 +90,7 @@ class TestClusterAttributes(BaseIntegrationTest):
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
attrs = objects.Cluster.get_editable_attributes(cluster_db) attrs = objects.Cluster.get_editable_attributes(cluster)
self.assertEqual({'bar': None}, attrs["foo"]) self.assertEqual({'bar': None}, attrs["foo"])
attrs.pop('foo') attrs.pop('foo')
@ -123,19 +122,18 @@ class TestClusterAttributes(BaseIntegrationTest):
self.assertEqual(400, resp.status_code) self.assertEqual(400, resp.status_code)
def test_attributes_update_patch(self): def test_attributes_update_patch(self):
cluster_id = self.env.create_cluster(api=True)['id'] cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0]
resp = self.app.get( resp = self.app.get(
reverse( reverse(
'ClusterAttributesHandler', 'ClusterAttributesHandler',
kwargs={'cluster_id': cluster_id}), kwargs={'cluster_id': cluster['id']}),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
resp = self.app.patch( resp = self.app.patch(
reverse( reverse(
'ClusterAttributesHandler', 'ClusterAttributesHandler',
kwargs={'cluster_id': cluster_id}), kwargs={'cluster_id': cluster['id']}),
params=jsonutils.dumps({ params=jsonutils.dumps({
'editable': { 'editable': {
'foo': {'bar': None} 'foo': {'bar': None}
@ -144,7 +142,7 @@ class TestClusterAttributes(BaseIntegrationTest):
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
attrs = objects.Cluster.get_editable_attributes(cluster_db) attrs = objects.Cluster.get_editable_attributes(cluster)
self.assertEqual({'bar': None}, attrs["foo"]) self.assertEqual({'bar': None}, attrs["foo"])
attrs.pop('foo') attrs.pop('foo')
self.assertNotEqual(attrs, {}) self.assertNotEqual(attrs, {})
@ -249,12 +247,11 @@ class TestClusterAttributes(BaseIntegrationTest):
self._compare_editable( self._compare_editable(
release.attributes_metadata['editable'], release.attributes_metadata['editable'],
resp.json_body['editable'], resp.json_body['editable'],
self.env.clusters[0] cluster
) )
def test_get_last_deployed_attributes(self): def test_get_last_deployed_attributes(self):
self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster = self.env.clusters[-1]
cluster_attrs = objects.Cluster.get_editable_attributes( cluster_attrs = objects.Cluster.get_editable_attributes(
self.env.clusters[-1] self.env.clusters[-1]
) )
@ -291,7 +288,6 @@ class TestClusterAttributes(BaseIntegrationTest):
def test_attributes_set_defaults(self): def test_attributes_set_defaults(self):
cluster = self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0]
# Change editable attributes. # Change editable attributes.
resp = self.app.put( resp = self.app.put(
reverse( reverse(
@ -306,7 +302,7 @@ class TestClusterAttributes(BaseIntegrationTest):
expect_errors=True expect_errors=True
) )
self.assertEqual(200, resp.status_code, resp.body) self.assertEqual(200, resp.status_code, resp.body)
attrs = objects.Cluster.get_editable_attributes(cluster_db) attrs = objects.Cluster.get_editable_attributes(cluster)
self.assertEqual({'bar': None}, attrs['foo']) self.assertEqual({'bar': None}, attrs['foo'])
# Set attributes to defaults. # Set attributes to defaults.
resp = self.app.put( resp = self.app.put(
@ -322,7 +318,7 @@ class TestClusterAttributes(BaseIntegrationTest):
self._compare_editable( self._compare_editable(
release.attributes_metadata['editable'], release.attributes_metadata['editable'],
resp.json_body['editable'], resp.json_body['editable'],
cluster_db cluster
) )
def test_attributes_merged_values(self): def test_attributes_merged_values(self):
@ -437,8 +433,7 @@ class TestClusterAttributes(BaseIntegrationTest):
AssertionError, self._compare_editable, r_attrs, c_attrs) AssertionError, self._compare_editable, r_attrs, c_attrs)
def test_editable_attributes_generators(self): def test_editable_attributes_generators(self):
self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster = self.env.clusters[0]
editable = objects.Cluster.get_editable_attributes(cluster) editable = objects.Cluster.get_editable_attributes(cluster)
self.assertEqual( self.assertEqual(
editable["external_dns"]["dns_list"]["value"], editable["external_dns"]["dns_list"]["value"],
@ -450,8 +445,7 @@ class TestClusterAttributes(BaseIntegrationTest):
) )
def test_workloads_collector_attributes(self): def test_workloads_collector_attributes(self):
self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster = self.env.clusters[0]
editable = objects.Cluster.get_editable_attributes(cluster) editable = objects.Cluster.get_editable_attributes(cluster)
self.assertEqual( self.assertEqual(
editable["workloads_collector"]["enabled"]["value"], editable["workloads_collector"]["enabled"]["value"],
@ -498,11 +492,10 @@ class TestAlwaysEditable(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestAlwaysEditable, self).setUp() super(TestAlwaysEditable, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'version': 'liberty-8.0', 'version': 'liberty-8.0',
'operating_system': consts.RELEASE_OS.centos}) 'operating_system': consts.RELEASE_OS.centos})
self.cluster = self.env.clusters[0]
def _put(self, data, expect_code=200): def _put(self, data, expect_code=200):
resp = self.app.put( resp = self.app.put(
@ -538,10 +531,9 @@ class TestVmwareAttributes(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestVmwareAttributes, self).setUp() super(TestVmwareAttributes, self).setUp()
self.cluster = self.env.create_cluster(api=True) self.cluster = self.env.create_cluster(api=True)
self.cluster_db = self.env.clusters[0]
def test_vmware_attributes_creation(self): def test_vmware_attributes_creation(self):
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
resp = self.app.get( resp = self.app.get(
reverse( reverse(
@ -552,7 +544,7 @@ class TestVmwareAttributes(BaseIntegrationTest):
release = objects.Release.get_by_uid(self.cluster['release_id']) release = objects.Release.get_by_uid(self.cluster['release_id'])
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
attrs = objects.Cluster.get_vmware_attributes(self.cluster_db) attrs = objects.Cluster.get_vmware_attributes(self.cluster)
# TODO(apopovych): use dictdiffer 0.3.0 to compare atttributes # TODO(apopovych): use dictdiffer 0.3.0 to compare atttributes
# one-by-one # one-by-one
self.assertEqual( self.assertEqual(
@ -561,7 +553,7 @@ class TestVmwareAttributes(BaseIntegrationTest):
) )
def test_vmware_attributes_update(self): def test_vmware_attributes_update(self):
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
resp = self.app.put( resp = self.app.put(
reverse( reverse(
@ -576,13 +568,13 @@ class TestVmwareAttributes(BaseIntegrationTest):
) )
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
attrs = objects.Cluster.get_vmware_attributes(self.cluster_db) attrs = objects.Cluster.get_vmware_attributes(self.cluster)
self.assertEqual('bar', attrs.editable.get('value', {}).get('foo')) self.assertEqual('bar', attrs.editable.get('value', {}).get('foo'))
attrs.editable.get('value', {}).pop('foo') attrs.editable.get('value', {}).pop('foo')
self.assertEqual(attrs.editable.get('value'), {}) self.assertEqual(attrs.editable.get('value'), {})
def test_vmware_attributes_update_with_invalid_json_format(self): def test_vmware_attributes_update_with_invalid_json_format(self):
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
resp = self.app.put( resp = self.app.put(
reverse( reverse(
@ -599,7 +591,7 @@ class TestVmwareAttributes(BaseIntegrationTest):
self.assertEqual( self.assertEqual(
"'editable' is a required property", resp.json_body["message"]) "'editable' is a required property", resp.json_body["message"])
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
resp = self.app.put( resp = self.app.put(
reverse( reverse(
@ -673,11 +665,11 @@ class TestVmwareAttributes(BaseIntegrationTest):
@patch('nailgun.db.sqlalchemy.models.Cluster.is_locked', return_value=True) @patch('nailgun.db.sqlalchemy.models.Cluster.is_locked', return_value=True)
def test_vmware_attributes_update_for_locked_cluster_403(self, locked): def test_vmware_attributes_update_for_locked_cluster_403(self, locked):
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'VmwareAttributesHandler', 'VmwareAttributesHandler',
kwargs={'cluster_id': self.cluster_db.id}), kwargs={'cluster_id': self.cluster.id}),
params=jsonutils.dumps({ params=jsonutils.dumps({
"editable": { "editable": {
"value": {"foo": "bar"} "value": {"foo": "bar"}
@ -694,7 +686,7 @@ class TestVmwareAttributes(BaseIntegrationTest):
@patch('nailgun.db.sqlalchemy.models.Cluster.is_locked', return_value=True) @patch('nailgun.db.sqlalchemy.models.Cluster.is_locked', return_value=True)
def test_vmware_attributes_update_for_locked_cluster_200( def test_vmware_attributes_update_for_locked_cluster_200(
self, is_locked_mock, has_compute_mock): self, is_locked_mock, has_compute_mock):
self._set_use_vcenter(self.cluster_db) self._set_use_vcenter(self.cluster)
params = { params = {
"editable": { "editable": {
"value": {"foo": "bar"} "value": {"foo": "bar"}
@ -704,12 +696,12 @@ class TestVmwareAttributes(BaseIntegrationTest):
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'VmwareAttributesHandler', 'VmwareAttributesHandler',
kwargs={'cluster_id': self.cluster_db.id}), kwargs={'cluster_id': self.cluster.id}),
params=jsonutils.dumps(params), params=jsonutils.dumps(params),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
attrs = objects.Cluster.get_vmware_attributes(self.cluster_db) attrs = objects.Cluster.get_vmware_attributes(self.cluster)
self.assertEqual('bar', attrs.editable.get('value', {}).get('foo')) self.assertEqual('bar', attrs.editable.get('value', {}).get('foo'))
attrs.editable.get('value', {}).pop('foo') attrs.editable.get('value', {}).pop('foo')
self.assertEqual(attrs.editable.get('value'), {}) self.assertEqual(attrs.editable.get('value'), {})
@ -725,11 +717,10 @@ class TestVmwareAttributesDefaults(BaseIntegrationTest):
def test_get_default_vmware_attributes(self): def test_get_default_vmware_attributes(self):
cluster = self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0] cluster_attrs = objects.Cluster.get_editable_attributes(cluster)
cluster_attrs = objects.Cluster.get_editable_attributes(cluster_db)
cluster_attrs['common']['use_vcenter']['value'] = True cluster_attrs['common']['use_vcenter']['value'] = True
objects.Cluster.update_attributes( objects.Cluster.update_attributes(
cluster_db, {'editable': cluster_attrs}) cluster, {'editable': cluster_attrs})
resp = self.app.get( resp = self.app.get(
reverse( reverse(
'VmwareAttributesDefaultsHandler', 'VmwareAttributesDefaultsHandler',
@ -765,7 +756,7 @@ class TestAttributesWithPlugins(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestAttributesWithPlugins, self).setUp() super(TestAttributesWithPlugins, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'operating_system': consts.RELEASE_OS.ubuntu, 'operating_system': consts.RELEASE_OS.ubuntu,
'version': '2015.1.0-7.0', 'version': '2015.1.0-7.0',
@ -781,8 +772,6 @@ class TestAttributesWithPlugins(BaseIntegrationTest):
] ]
) )
self.cluster = self.env.clusters[0]
self.plugin_data = { self.plugin_data = {
'releases': [ 'releases': [
{ {

View File

@ -93,8 +93,7 @@ class TestClusterChanges(BaseIntegrationTest):
def test_attributes_changing_adds_pending_changes(self): def test_attributes_changing_adds_pending_changes(self):
cluster = self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0] objects.Cluster.clear_pending_changes(cluster)
objects.Cluster.clear_pending_changes(cluster_db)
all_changes = self.db.query(ClusterChanges).all() all_changes = self.db.query(ClusterChanges).all()
self.assertEqual(len(all_changes), 0) self.assertEqual(len(all_changes), 0)
self.app.put( self.app.put(
@ -115,8 +114,7 @@ class TestClusterChanges(BaseIntegrationTest):
def test_default_attributes_adds_pending_changes(self): def test_default_attributes_adds_pending_changes(self):
cluster = self.env.create_cluster(api=True) cluster = self.env.create_cluster(api=True)
cluster_db = self.env.clusters[0] objects.Cluster.clear_pending_changes(cluster)
objects.Cluster.clear_pending_changes(cluster_db)
all_changes = self.db.query(ClusterChanges).all() all_changes = self.db.query(ClusterChanges).all()
self.assertEqual(len(all_changes), 0) self.assertEqual(len(all_changes), 0)
self.app.put( self.app.put(
@ -132,7 +130,7 @@ class TestClusterChanges(BaseIntegrationTest):
@fake_tasks(override_state={"progress": 100, "status": "ready"}) @fake_tasks(override_state={"progress": 100, "status": "ready"})
def test_successful_deployment_drops_all_changes(self): def test_successful_deployment_drops_all_changes(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"api": True, "pending_addition": True} {"api": True, "pending_addition": True}
] ]
@ -140,9 +138,7 @@ class TestClusterChanges(BaseIntegrationTest):
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.ready) self.assertEqual(supertask.status, consts.TASK_STATUSES.ready)
cluster_db = self.db.query(Cluster).get( cluster_db = self.db.query(Cluster).get(cluster.id)
self.env.clusters[0].id
)
self.assertEqual(list(cluster_db.changes), []) self.assertEqual(list(cluster_db.changes), [])
@fake_tasks(error="deployment", error_msg="Terrible error") @fake_tasks(error="deployment", error_msg="Terrible error")
@ -178,7 +174,7 @@ class TestClusterChanges(BaseIntegrationTest):
@fake_tasks(override_state={"progress": 100, "status": "ready"}) @fake_tasks(override_state={"progress": 100, "status": "ready"})
def test_role_unassignment_drops_changes(self): def test_role_unassignment_drops_changes(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, "api": True} {"pending_addition": True, "api": True}
] ]
@ -186,7 +182,7 @@ class TestClusterChanges(BaseIntegrationTest):
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.ready) self.assertEqual(supertask.status, consts.TASK_STATUSES.ready)
new_node = self.env.create_node( new_node = self.env.create_node(
cluster_id=self.env.clusters[0].id, cluster_id=cluster.id,
pending_addition=True, pending_addition=True,
api=True api=True
) )
@ -201,7 +197,7 @@ class TestClusterChanges(BaseIntegrationTest):
headers=self.default_headers headers=self.default_headers
) )
all_changes = self.db.query(ClusterChanges).filter_by( all_changes = self.db.query(ClusterChanges).filter_by(
cluster_id=self.env.clusters[0].id, cluster_id=cluster.id,
node_id=new_node["id"] node_id=new_node["id"]
).all() ).all()
self.assertEqual(all_changes, []) self.assertEqual(all_changes, [])

View File

@ -48,7 +48,7 @@ class TestCharsetIssues(BaseIntegrationTest):
@fake_tasks(fake_rpc=False) @fake_tasks(fake_rpc=False)
def test_deletion_during_deployment(self, mock_rpc): def test_deletion_during_deployment(self, mock_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
"name": u"Вася" "name": u"Вася"
}, },
@ -56,11 +56,10 @@ class TestCharsetIssues(BaseIntegrationTest):
{"status": "ready", "pending_addition": True}, {"status": "ready", "pending_addition": True},
] ]
) )
cluster_id = self.env.clusters[0].id
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'ClusterChangesHandler', 'ClusterChangesHandler',
kwargs={'cluster_id': cluster_id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
deploy_uuid = resp.json_body['uuid'] deploy_uuid = resp.json_body['uuid']
@ -73,7 +72,7 @@ class TestCharsetIssues(BaseIntegrationTest):
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster_id}), kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
task_delete = self.db.query(models.Task).filter_by( task_delete = self.db.query(models.Task).filter_by(
@ -86,5 +85,5 @@ class TestCharsetIssues(BaseIntegrationTest):
) )
cluster = self.db.query(models.Cluster).filter_by( cluster = self.db.query(models.Cluster).filter_by(
id=cluster_id).first() id=cluster.id).first()
self.assertIsNone(cluster) self.assertIsNone(cluster)

View File

@ -35,7 +35,7 @@ class TestCluster(BaseIntegrationTest):
self.assertIsNotNone(primary_node) self.assertIsNotNone(primary_node)
def test_adjust_nodes_lists_on_controller_removing(self): def test_adjust_nodes_lists_on_controller_removing(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']}, {'roles': ['controller']},
{'pending_roles': ['controller']}, {'pending_roles': ['controller']},
@ -44,7 +44,6 @@ class TestCluster(BaseIntegrationTest):
{'roles': ['compute']}, {'roles': ['compute']},
] ]
) )
cluster = self.env.clusters[0]
controllers = filter(lambda x: 'controller' in x.all_roles, controllers = filter(lambda x: 'controller' in x.all_roles,
cluster.nodes) cluster.nodes)
@ -77,12 +76,11 @@ class TestCluster(BaseIntegrationTest):
self.assertItemsEqual(controllers[1:], n_deploy) self.assertItemsEqual(controllers[1:], n_deploy)
def test_adjust_nodes_lists_on_controller_removing_no_cluster(self): def test_adjust_nodes_lists_on_controller_removing_no_cluster(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']} {'roles': ['controller']}
] ]
) )
cluster = self.env.clusters[0]
for node in cluster.nodes: for node in cluster.nodes:
self.assertIn('controller', node.all_roles) self.assertIn('controller', node.all_roles)
@ -96,7 +94,7 @@ class TestCluster(BaseIntegrationTest):
@fake_tasks(override_state={'progress': 100, @fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready}) 'status': consts.TASK_STATUSES.ready})
def test_get_primary_node(self): def test_get_primary_node(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'pending_roles': ['controller'], {'pending_roles': ['controller'],
'pending_addition': True}, 'pending_addition': True},
@ -108,7 +106,6 @@ class TestCluster(BaseIntegrationTest):
'pending_addition': True}, 'pending_addition': True},
] ]
) )
cluster = self.env.clusters[0]
# Checking no primary nodes before deployment # Checking no primary nodes before deployment
self.check_no_primary_node( self.check_no_primary_node(
@ -124,14 +121,13 @@ class TestCluster(BaseIntegrationTest):
@fake_tasks(override_state={'progress': 100, @fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready}) 'status': consts.TASK_STATUSES.ready})
def test_get_primary_node_pending_deletion(self): def test_get_primary_node_pending_deletion(self):
self.env.create( cluster = self.env.create(
api=True, api=True,
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
{'roles': ['compute'], 'pending_addition': True} {'roles': ['compute'], 'pending_addition': True}
] ]
) )
cluster = self.env.clusters[0]
# Checking primary present # Checking primary present
deploy = self.env.launch_deployment() deploy = self.env.launch_deployment()

View File

@ -38,7 +38,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_nova_deploy_cast_with_right_args(self, mocked_rpc): def test_nova_deploy_cast_with_right_args(self, mocked_rpc):
self.env.create( cluster_db = self.env.create(
release_kwargs={ release_kwargs={
'version': "2014.2-6.0" 'version': "2014.2-6.0"
}, },
@ -55,8 +55,6 @@ class TestHandlers(BaseIntegrationTest):
] ]
) )
cluster_db = self.env.clusters[0]
common_attrs = { common_attrs = {
'deployment_mode': consts.CLUSTER_MODES.ha_compact, 'deployment_mode': consts.CLUSTER_MODES.ha_compact,
@ -392,7 +390,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_neutron_deploy_cast_with_right_args_for_5_1_1(self, mocked_rpc): def test_neutron_deploy_cast_with_right_args_for_5_1_1(self, mocked_rpc):
self.env.create( cluster_db = self.env.create(
release_kwargs={ release_kwargs={
'version': "2014.1.3-5.1.1" 'version': "2014.1.3-5.1.1"
}, },
@ -412,7 +410,6 @@ class TestHandlers(BaseIntegrationTest):
] ]
) )
cluster_db = self.env.clusters[0]
self.env.disable_task_deploy(cluster_db) self.env.disable_task_deploy(cluster_db)
# This is here to work around the fact that we use the same fixture # This is here to work around the fact that we use the same fixture
@ -882,7 +879,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_neutron_deploy_cast_with_right_args_for_6_0(self, mocked_rpc): def test_neutron_deploy_cast_with_right_args_for_6_0(self, mocked_rpc):
self.env.create( cluster_db = self.env.create(
release_kwargs={ release_kwargs={
'version': "2014.2-6.0" 'version': "2014.2-6.0"
}, },
@ -902,7 +899,6 @@ class TestHandlers(BaseIntegrationTest):
] ]
) )
cluster_db = self.env.clusters[0]
self.env.disable_task_deploy(cluster_db) self.env.disable_task_deploy(cluster_db)
# This is here to work around the fact that we use the same fixture # This is here to work around the fact that we use the same fixture
@ -1422,7 +1418,7 @@ class TestHandlers(BaseIntegrationTest):
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_deploy_multinode_neutron_gre_w_custom_public_ranges(self, def test_deploy_multinode_neutron_gre_w_custom_public_ranges(self,
mocked_rpc): mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron', cluster_kwargs={'net_provider': 'neutron',
'net_segment_type': 'gre'}, 'net_segment_type': 'gre'},
nodes_kwargs=[{"pending_addition": True}, nodes_kwargs=[{"pending_addition": True},
@ -1432,15 +1428,13 @@ class TestHandlers(BaseIntegrationTest):
{"pending_addition": True}] {"pending_addition": True}]
) )
net_data = self.env.neutron_networks_get( net_data = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id
).json_body
pub = filter(lambda ng: ng['name'] == 'public', pub = filter(lambda ng: ng['name'] == 'public',
net_data['networks'])[0] net_data['networks'])[0]
pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.13'], pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.13'],
['172.16.0.20', '172.16.0.22']]}) ['172.16.0.20', '172.16.0.22']]})
resp = self.env.neutron_networks_put(self.env.clusters[0].id, net_data) resp = self.env.neutron_networks_put(cluster.id, net_data)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.env.launch_deployment() self.env.launch_deployment()
@ -1466,7 +1460,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_deploy_ha_neutron_gre_w_custom_public_ranges(self, mocked_rpc): def test_deploy_ha_neutron_gre_w_custom_public_ranges(self, mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact, cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact,
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'gre'}, 'net_segment_type': 'gre'},
@ -1477,15 +1471,13 @@ class TestHandlers(BaseIntegrationTest):
{"pending_addition": True}] {"pending_addition": True}]
) )
net_data = self.env.neutron_networks_get( net_data = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id
).json_body
pub = filter(lambda ng: ng['name'] == 'public', pub = filter(lambda ng: ng['name'] == 'public',
net_data['networks'])[0] net_data['networks'])[0]
pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.13'], pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.13'],
['172.16.0.20', '172.16.0.22']]}) ['172.16.0.20', '172.16.0.22']]})
resp = self.env.neutron_networks_put(self.env.clusters[0].id, net_data) resp = self.env.neutron_networks_put(cluster.id, net_data)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.env.launch_deployment() self.env.launch_deployment()
@ -1509,16 +1501,14 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_deploy_neutron_gre_w_changed_public_cidr(self, mocked_rpc): def test_deploy_neutron_gre_w_changed_public_cidr(self, mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron', cluster_kwargs={'net_provider': 'neutron',
'net_segment_type': 'gre'}, 'net_segment_type': 'gre'},
nodes_kwargs=[{"pending_addition": True}, nodes_kwargs=[{"pending_addition": True},
{"pending_addition": True}] {"pending_addition": True}]
) )
net_data = self.env.neutron_networks_get( net_data = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id
).json_body
pub = filter(lambda ng: ng['name'] == 'public', pub = filter(lambda ng: ng['name'] == 'public',
net_data['networks'])[0] net_data['networks'])[0]
pub.update({'ip_ranges': [['172.16.10.10', '172.16.10.122']], pub.update({'ip_ranges': [['172.16.10.10', '172.16.10.122']],
@ -1527,7 +1517,7 @@ class TestHandlers(BaseIntegrationTest):
net_data['networking_parameters']['floating_ranges'] = \ net_data['networking_parameters']['floating_ranges'] = \
[['172.16.10.130', '172.16.10.254']] [['172.16.10.130', '172.16.10.254']]
resp = self.env.neutron_networks_put(self.env.clusters[0].id, net_data) resp = self.env.neutron_networks_put(cluster.id, net_data)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.env.launch_deployment() self.env.launch_deployment()
@ -1546,7 +1536,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_deploy_neutron_error_not_enough_ip_addresses(self, mocked_rpc): def test_deploy_neutron_error_not_enough_ip_addresses(self, mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron', cluster_kwargs={'net_provider': 'neutron',
'net_segment_type': 'gre'}, 'net_segment_type': 'gre'},
nodes_kwargs=[{"pending_addition": True}, nodes_kwargs=[{"pending_addition": True},
@ -1554,14 +1544,12 @@ class TestHandlers(BaseIntegrationTest):
{"pending_addition": True}] {"pending_addition": True}]
) )
net_data = self.env.neutron_networks_get( net_data = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id
).json_body
pub = filter(lambda ng: ng['name'] == 'public', pub = filter(lambda ng: ng['name'] == 'public',
net_data['networks'])[0] net_data['networks'])[0]
pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.11']]}) pub.update({'ip_ranges': [['172.16.0.10', '172.16.0.11']]})
resp = self.env.neutron_networks_put(self.env.clusters[0].id, net_data) resp = self.env.neutron_networks_put(cluster.id, net_data)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
task = self.env.launch_deployment() task = self.env.launch_deployment()
@ -1573,7 +1561,7 @@ class TestHandlers(BaseIntegrationTest):
'at least 3 IP addresses for the current environment.') 'at least 3 IP addresses for the current environment.')
def test_occurs_error_not_enough_ip_addresses(self): def test_occurs_error_not_enough_ip_addresses(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network, 'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network,
}, },
@ -1582,8 +1570,6 @@ class TestHandlers(BaseIntegrationTest):
{'pending_addition': True}, {'pending_addition': True},
{'pending_addition': True}]) {'pending_addition': True}])
cluster = self.env.clusters[0]
public_network = self.db.query( public_network = self.db.query(
NetworkGroup).filter_by(name='public').first() NetworkGroup).filter_by(name='public').first()
@ -1668,16 +1654,16 @@ class TestHandlers(BaseIntegrationTest):
'or reduce Ceph replication factor in the Settings tab.') 'or reduce Ceph replication factor in the Settings tab.')
def test_occurs_error_release_is_unavailable(self): def test_occurs_error_release_is_unavailable(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}]) {'roles': ['controller'], 'pending_addition': True}])
self.env.clusters[0].release.state = consts.RELEASE_STATES.unavailable cluster.release.state = consts.RELEASE_STATES.unavailable
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'ClusterChangesHandler', 'ClusterChangesHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers, headers=self.default_headers,
expect_errors=True) expect_errors=True)
@ -1685,7 +1671,7 @@ class TestHandlers(BaseIntegrationTest):
self.assertRegexpMatches(resp.body, 'Release .* is unavailable') self.assertRegexpMatches(resp.body, 'Release .* is unavailable')
def test_occurs_error_no_deployment_tasks_for_release(self): def test_occurs_error_no_deployment_tasks_for_release(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}], {'roles': ['controller'], 'pending_addition': True}],
release_kwargs={ release_kwargs={
@ -1696,7 +1682,7 @@ class TestHandlers(BaseIntegrationTest):
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'ClusterChangesHandler', 'ClusterChangesHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers, headers=self.default_headers,
expect_errors=True) expect_errors=True)
@ -1741,7 +1727,7 @@ class TestHandlers(BaseIntegrationTest):
"name": "eth1", "name": "eth1",
"current_speed": None}]) "current_speed": None}])
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{ {
'api': True, 'api': True,
@ -1752,24 +1738,23 @@ class TestHandlers(BaseIntegrationTest):
} }
] ]
) )
cluster_id = self.env.clusters[0].id
resp = self.env.neutron_networks_get(cluster_id) resp = self.env.neutron_networks_get(cluster.id)
nets = resp.json_body nets = resp.json_body
for net in nets["networks"]: for net in nets["networks"]:
if net["name"] in ["management", ]: if net["name"] in ["management", ]:
net["vlan_start"] = None net["vlan_start"] = None
self.env.neutron_networks_put(cluster_id, nets) self.env.neutron_networks_put(cluster.id, nets)
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.error) self.assertEqual(supertask.status, consts.TASK_STATUSES.error)
def test_empty_cluster_deploy_error(self): def test_empty_cluster_deploy_error(self):
self.env.create(nodes_kwargs=[]) cluster = self.env.create(nodes_kwargs=[])
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'ClusterChangesHandler', 'ClusterChangesHandler',
kwargs={'cluster_id': self.env.clusters[0].id} kwargs={'cluster_id': cluster.id}
), ),
headers=self.default_headers, headers=self.default_headers,
expect_errors=True expect_errors=True

View File

@ -155,17 +155,16 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_cluster_deletion(self): def test_cluster_deletion(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True}, {"pending_addition": True},
{"status": "ready"}]) {"status": "ready"}])
cluster_id = self.env.clusters[0].id resp = self.delete(cluster.id)
resp = self.delete(cluster_id)
self.assertEqual(resp.status_code, 202) self.assertEqual(resp.status_code, 202)
self.assertIsNone(self.db.query(Cluster).get(cluster_id)) self.assertIsNone(self.db.query(Cluster).get(cluster.id))
# Nodes should be in discover status # Nodes should be in discover status
self.assertEqual(self.db.query(Node).count(), 2) self.assertEqual(self.db.query(Node).count(), 2)
@ -179,17 +178,16 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks(recover_offline_nodes=False) @fake_tasks(recover_offline_nodes=False)
def test_cluster_deletion_with_offline_nodes(self): def test_cluster_deletion_with_offline_nodes(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{'pending_addition': True}, {'pending_addition': True},
{'online': False, 'status': 'ready'}]) {'online': False, 'status': 'ready'}])
cluster_id = self.env.clusters[0].id resp = self.delete(cluster.id)
resp = self.delete(cluster_id)
self.assertEqual(resp.status_code, 202) self.assertEqual(resp.status_code, 202)
self.assertIsNone(self.db.query(Cluster).get(cluster_id)) self.assertIsNone(self.db.query(Cluster).get(cluster.id))
self.assertEqual(self.db.query(Node).count(), 1) self.assertEqual(self.db.query(Node).count(), 1)
node = self.db.query(Node).first() node = self.db.query(Node).first()
@ -208,11 +206,10 @@ class TestHandlers(BaseIntegrationTest):
self.assertEqual(ngs, []) self.assertEqual(ngs, [])
def test_cluster_generated_data_handler(self): def test_cluster_generated_data_handler(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'pending_addition': True}, {'pending_addition': True},
{'online': False, 'status': 'ready'}]) {'online': False, 'status': 'ready'}])
cluster = self.env.clusters[0]
get_resp = self.app.get( get_resp = self.app.get(
reverse('ClusterGeneratedData', reverse('ClusterGeneratedData',
kwargs={'cluster_id': cluster.id}), kwargs={'cluster_id': cluster.id}),

View File

@ -56,7 +56,7 @@ class TestAssignmentHandlers(BaseIntegrationTest):
) )
self.assertEqual(201, resp.status_code) self.assertEqual(201, resp.status_code)
plugin_link = self.env.clusters[0].plugin_links[0] plugin_link = self.cluster.plugin_links[0]
self.assertEqual(self.link_data['title'], plugin_link.title) self.assertEqual(self.link_data['title'], plugin_link.title)
self.assertEqual(self.link_data['url'], plugin_link.url) self.assertEqual(self.link_data['url'], plugin_link.url)
self.assertEqual(self.link_data['hidden'], plugin_link.hidden) self.assertEqual(self.link_data['hidden'], plugin_link.hidden)
@ -79,4 +79,4 @@ class TestAssignmentHandlers(BaseIntegrationTest):
expect_errors=True expect_errors=True
) )
self.assertEqual(400, resp.status_code) self.assertEqual(400, resp.status_code)
self.assertItemsEqual([], self.env.clusters[0].plugin_links) self.assertItemsEqual([], self.cluster.plugin_links)

View File

@ -47,8 +47,7 @@ class TestClusterRolesHandler(base.BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterRolesHandler, self).setUp() super(TestClusterRolesHandler, self).setUp()
self.env.create_cluster(api=False) self.cluster = self.env.create_cluster(api=False)
self.cluster = self.env.clusters[0]
self.expected_roles_data = self.cluster.release.roles_metadata self.expected_roles_data = self.cluster.release.roles_metadata
self.expected_volumes_data = \ self.expected_volumes_data = \
self.cluster.release.volumes_metadata['volumes_roles_mapping'] self.cluster.release.volumes_metadata['volumes_roles_mapping']

View File

@ -33,11 +33,10 @@ class BaseGraphTasksTests(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(BaseGraphTasksTests, self).setUp() super(BaseGraphTasksTests, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['test-controller'], 'pending_addition': True}, {'roles': ['test-controller'], 'pending_addition': True},
]) ])
self.cluster = self.env.clusters[0]
def get_correct_tasks(self): def get_correct_tasks(self):
yaml_tasks = """ yaml_tasks = """
@ -644,9 +643,8 @@ class TestTaskDeployGraph(BaseGraphTasksTests):
def setUp(self): def setUp(self):
super(TestTaskDeployGraph, self).setUp() super(TestTaskDeployGraph, self).setUp()
self.env.create() self.cluster = self.env.create()
self.cluster = self.env.clusters[0]
self.tasks = [ self.tasks = [
{'id': 'pre_deployment', 'type': 'stage'}, {'id': 'pre_deployment', 'type': 'stage'},
{'id': 'deploy', 'type': 'stage'}, {'id': 'deploy', 'type': 'stage'},
@ -835,8 +833,7 @@ class TestTaskDeployCustomGraph(BaseGraphTasksTests):
def setUp(self): def setUp(self):
super(TestTaskDeployCustomGraph, self).setUp() super(TestTaskDeployCustomGraph, self).setUp()
self.env.create() self.cluster = self.env.create()
self.cluster = self.env.clusters[-1]
def test_get_custom_tasks(self): def test_get_custom_tasks(self):
objects.DeploymentGraph.create_for_model( objects.DeploymentGraph.create_for_model(

View File

@ -35,15 +35,14 @@ class TestMongoNodes(base.BaseTestCase):
return attr_meta return attr_meta
def test_get_zero_mongo_nodes(self): def test_get_zero_mongo_nodes(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[{}] nodes_kwargs=[{}]
) )
cluster = self.env.clusters[0]
nodes = objects.Cluster.get_nodes_by_role(cluster, 'mongo') nodes = objects.Cluster.get_nodes_by_role(cluster, 'mongo')
self.assertEqual(len(nodes), 0) self.assertEqual(len(nodes), 0)
def test_get_mongo_nodes(self): def test_get_mongo_nodes(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'pending_roles': ['mongo'], {'pending_roles': ['mongo'],
'status': 'discover', 'status': 'discover',
@ -53,12 +52,11 @@ class TestMongoNodes(base.BaseTestCase):
'pending_addition': True} 'pending_addition': True}
] ]
) )
cluster = self.env.clusters[0]
nodes = objects.Cluster.get_nodes_by_role(cluster, 'mongo') nodes = objects.Cluster.get_nodes_by_role(cluster, 'mongo')
self.assertEqual(len(nodes), 2) self.assertEqual(len(nodes), 2)
def test_mongo_node_with_ext_mongo(self): def test_mongo_node_with_ext_mongo(self):
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'attributes_metadata': self.get_custom_meta(True, True)}, 'attributes_metadata': self.get_custom_meta(True, True)},
nodes_kwargs=[ nodes_kwargs=[
@ -67,36 +65,33 @@ class TestMongoNodes(base.BaseTestCase):
'pending_addition': True} 'pending_addition': True}
] ]
) )
cluster = self.env.clusters[0]
task = Task(name=TASK_NAMES.deploy, cluster=cluster) task = Task(name=TASK_NAMES.deploy, cluster=cluster)
self.assertRaises(errors.ExtMongoCheckerError, self.assertRaises(errors.ExtMongoCheckerError,
CheckBeforeDeploymentTask._check_mongo_nodes, CheckBeforeDeploymentTask._check_mongo_nodes,
task) task)
def test_ext_mongo_without_mongo_node(self): def test_ext_mongo_without_mongo_node(self):
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'attributes_metadata': self.get_custom_meta(True, True)}, 'attributes_metadata': self.get_custom_meta(True, True)},
nodes_kwargs=[] nodes_kwargs=[]
) )
cluster = self.env.clusters[0]
task = Task(name=TASK_NAMES.deploy, cluster=cluster) task = Task(name=TASK_NAMES.deploy, cluster=cluster)
CheckBeforeDeploymentTask._check_mongo_nodes(task) CheckBeforeDeploymentTask._check_mongo_nodes(task)
def test_without_any_mongo(self): def test_without_any_mongo(self):
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'attributes_metadata': self.get_custom_meta(True, False)}, 'attributes_metadata': self.get_custom_meta(True, False)},
nodes_kwargs=[] nodes_kwargs=[]
) )
cluster = self.env.clusters[0]
task = Task(name=TASK_NAMES.deploy, cluster=cluster) task = Task(name=TASK_NAMES.deploy, cluster=cluster)
self.assertRaises(errors.MongoNodesCheckError, self.assertRaises(errors.MongoNodesCheckError,
CheckBeforeDeploymentTask._check_mongo_nodes, CheckBeforeDeploymentTask._check_mongo_nodes,
task) task)
def test_mongo_node_without_ext_mongo(self): def test_mongo_node_without_ext_mongo(self):
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'attributes_metadata': self.get_custom_meta(True, False)}, 'attributes_metadata': self.get_custom_meta(True, False)},
nodes_kwargs=[ nodes_kwargs=[
@ -105,6 +100,5 @@ class TestMongoNodes(base.BaseTestCase):
'pending_addition': True} 'pending_addition': True}
] ]
) )
cluster = self.env.clusters[0]
task = Task(name=TASK_NAMES.deploy, cluster=cluster) task = Task(name=TASK_NAMES.deploy, cluster=cluster)
CheckBeforeDeploymentTask._check_mongo_nodes(task) CheckBeforeDeploymentTask._check_mongo_nodes(task)

View File

@ -255,7 +255,7 @@ class TestNeutronNetworkConfigurationHandler(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestNeutronNetworkConfigurationHandler, self).setUp() super(TestNeutronNetworkConfigurationHandler, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={'version': '1111-8.0'}, release_kwargs={'version': '1111-8.0'},
cluster_kwargs={ cluster_kwargs={
'api': True, 'api': True,
@ -267,7 +267,6 @@ class TestNeutronNetworkConfigurationHandler(BaseIntegrationTest):
# node group which network VIP must be allocated in) # node group which network VIP must be allocated in)
nodes_kwargs=[{'roles': ['controller']}] nodes_kwargs=[{'roles': ['controller']}]
) )
self.cluster = self.env.clusters[0]
def test_get_request_should_return_net_provider_segment_and_networks(self): def test_get_request_should_return_net_provider_segment_and_networks(self):
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)

View File

@ -58,7 +58,7 @@ class TestNetworkManager(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_assign_ips(self, mocked_rpc): def test_assign_ips(self, mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, "api": True}, {"pending_addition": True, "api": True},
@ -75,7 +75,7 @@ class TestNetworkManager(BaseIntegrationTest):
management_net = self.db.query(NetworkGroup).filter( management_net = self.db.query(NetworkGroup).filter(
NetworkGroup.group_id == NetworkGroup.group_id ==
objects.Cluster.get_default_group(self.env.clusters[0]).id objects.Cluster.get_default_group(cluster).id
).filter_by( ).filter_by(
name=consts.NETWORKS.management name=consts.NETWORKS.management
).first() ).first()

View File

@ -53,7 +53,7 @@ class TestNetworkModels(BaseIntegrationTest):
} }
def create_env_using_statuses(self, cluster_status, node_status): def create_env_using_statuses(self, cluster_status, node_status):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.gre, 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.gre,
@ -63,27 +63,28 @@ class TestNetworkModels(BaseIntegrationTest):
{'pending_addition': False, 'status': node_status}, {'pending_addition': False, 'status': node_status},
{'pending_addition': False, 'status': node_status}, {'pending_addition': False, 'status': node_status},
{'pending_deletion': False, 'status': node_status}]) {'pending_deletion': False, 'status': node_status}])
return cluster
def test_cluster_locking_during_deployment(self): def test_cluster_locking_during_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.deployment, cluster = self.create_env_using_statuses(
consts.NODE_STATUSES.deploying) consts.CLUSTER_STATUSES.deployment,
consts.NODE_STATUSES.deploying)
test_nets = self.env.neutron_networks_get( test_nets = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id).json_body
resp_nova_net = self.env.nova_networks_put( resp_nova_net = self.env.nova_networks_put(
self.env.clusters[0].id, cluster.id,
test_nets, test_nets,
expect_errors=True) expect_errors=True)
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, cluster.id,
test_nets, test_nets,
expect_errors=True) expect_errors=True)
resp_cluster = self.app.put( resp_cluster = self.app.put(
reverse('ClusterAttributesHandler', reverse('ClusterAttributesHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': cluster.id}),
jsonutils.dumps({ jsonutils.dumps({
'editable': { 'editable': {
"foo": {"bar": None} "foo": {"bar": None}
@ -94,7 +95,7 @@ class TestNetworkModels(BaseIntegrationTest):
resp_cluster_get = self.app.get( resp_cluster_get = self.app.get(
reverse('ClusterHandler', reverse('ClusterHandler',
kwargs={'obj_id': self.env.clusters[0].id}), kwargs={'obj_id': cluster.id}),
headers=self.default_headers) headers=self.default_headers)
self.assertTrue(resp_cluster_get.json_body['is_locked']) self.assertTrue(resp_cluster_get.json_body['is_locked'])
@ -105,11 +106,11 @@ class TestNetworkModels(BaseIntegrationTest):
self.assertEqual(resp_cluster.status_code, 403) self.assertEqual(resp_cluster.status_code, 403)
def test_networks_update_after_deployment(self): def test_networks_update_after_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.operational, cluster = self.create_env_using_statuses(
consts.NODE_STATUSES.ready) consts.CLUSTER_STATUSES.operational,
consts.NODE_STATUSES.ready)
test_nets = self.env.neutron_networks_get( test_nets = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id).json_body
# change something from 'networking_parameters' # change something from 'networking_parameters'
test_nets['networking_parameters']['dns_nameservers'] = \ test_nets['networking_parameters']['dns_nameservers'] = \
@ -123,7 +124,7 @@ class TestNetworkModels(BaseIntegrationTest):
mgmt_net['cidr'] = u'1.1.1.0/24' mgmt_net['cidr'] = u'1.1.1.0/24'
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets, expect_errors=True) cluster.id, test_nets, expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code) self.assertEqual(400, resp_neutron_net.status_code)
self.assertEqual( self.assertEqual(
@ -133,13 +134,11 @@ class TestNetworkModels(BaseIntegrationTest):
mgmt_net['cidr'] = u'192.168.0.0/30' mgmt_net['cidr'] = u'192.168.0.0/30'
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(cluster.id, test_nets)
self.env.clusters[0].id, test_nets)
self.assertEqual(200, resp_neutron_net.status_code) self.assertEqual(200, resp_neutron_net.status_code)
new_nets = self.env.neutron_networks_get( new_nets = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id).json_body
# test that network was changed # test that network was changed
modified_net = filter(lambda x: x['name'] == test_network_name, modified_net = filter(lambda x: x['name'] == test_network_name,
@ -151,11 +150,11 @@ class TestNetworkModels(BaseIntegrationTest):
new_nets['networking_parameters']) new_nets['networking_parameters'])
def test_admin_network_update_after_deployment(self): def test_admin_network_update_after_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.operational, cluster = self.create_env_using_statuses(
consts.NODE_STATUSES.ready) consts.CLUSTER_STATUSES.operational,
consts.NODE_STATUSES.ready)
test_nets = self.env.neutron_networks_get( test_nets = self.env.neutron_networks_get(cluster.id).json_body
self.env.clusters[0].id).json_body
admin_net = filter( admin_net = filter(
lambda x: x['name'] == consts.NETWORKS.fuelweb_admin, lambda x: x['name'] == consts.NETWORKS.fuelweb_admin,
@ -165,7 +164,7 @@ class TestNetworkModels(BaseIntegrationTest):
admin_net['ip_ranges'] = [[u'191.111.0.5', u'191.111.0.62']] admin_net['ip_ranges'] = [[u'191.111.0.5', u'191.111.0.62']]
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets, expect_errors=True) cluster.id, test_nets, expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code) self.assertEqual(400, resp_neutron_net.status_code)
self.assertEqual( self.assertEqual(
"New IP ranges for network '{0}'({1}) do not cover already " "New IP ranges for network '{0}'({1}) do not cover already "
@ -177,7 +176,7 @@ class TestNetworkModels(BaseIntegrationTest):
self.db.commit() self.db.commit()
with patch('task.task.rpc.cast'): with patch('task.task.rpc.cast'):
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets) cluster.id, test_nets)
self.assertEqual(200, resp_neutron_net.status_code) self.assertEqual(200, resp_neutron_net.status_code)
def test_nova_net_networking_parameters(self): def test_nova_net_networking_parameters(self):

View File

@ -98,7 +98,7 @@ class TestNovaHandlers(TestNetworkChecking):
self.env.set_interfaces_in_meta(meta, [ self.env.set_interfaces_in_meta(meta, [
{"name": "eth0", "mac": "00:00:00:00:00:66"}, {"name": "eth0", "mac": "00:00:00:00:00:66"},
{"name": "eth1", "mac": "00:00:00:00:00:77"}]) {"name": "eth1", "mac": "00:00:00:00:00:77"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network, 'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network,
}, },
@ -108,7 +108,6 @@ class TestNovaHandlers(TestNetworkChecking):
"pending_addition": True}, "pending_addition": True},
] ]
) )
self.cluster = self.env.clusters[0]
resp = self.env.nova_networks_get(self.cluster.id) resp = self.env.nova_networks_get(self.cluster.id)
self.nets = resp.json_body self.nets = resp.json_body
@ -410,7 +409,7 @@ class TestNeutronHandlersGre(TestNetworkChecking):
self.env.set_interfaces_in_meta(meta, [ self.env.set_interfaces_in_meta(meta, [
{"name": "eth0", "mac": "00:00:00:00:00:66"}, {"name": "eth0", "mac": "00:00:00:00:00:66"},
{"name": "eth1", "mac": "00:00:00:00:00:77"}]) {"name": "eth1", "mac": "00:00:00:00:00:77"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'gre' 'net_segment_type': 'gre'
@ -421,7 +420,6 @@ class TestNeutronHandlersGre(TestNetworkChecking):
'meta': meta} 'meta': meta}
] ]
) )
self.cluster = self.env.clusters[0]
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)
self.nets = resp.json_body self.nets = resp.json_body
@ -803,7 +801,7 @@ class TestNeutronHandlersVlan(TestNetworkChecking):
self.env.set_interfaces_in_meta(meta, [ self.env.set_interfaces_in_meta(meta, [
{"name": "eth0", "mac": "00:00:00:00:00:66"}, {"name": "eth0", "mac": "00:00:00:00:00:66"},
{"name": "eth1", "mac": "00:00:00:00:00:77"}]) {"name": "eth1", "mac": "00:00:00:00:00:77"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'vlan' 'net_segment_type': 'vlan'
@ -814,7 +812,6 @@ class TestNeutronHandlersVlan(TestNetworkChecking):
'meta': meta} 'meta': meta}
] ]
) )
self.cluster = self.env.clusters[0]
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)
self.nets = resp.json_body self.nets = resp.json_body
@ -846,7 +843,7 @@ class TestNeutronHandlersTun(TestNetworkChecking):
self.env.set_interfaces_in_meta(meta, [ self.env.set_interfaces_in_meta(meta, [
{"name": "eth0", "mac": "00:00:00:00:00:66"}, {"name": "eth0", "mac": "00:00:00:00:00:66"},
{"name": "eth1", "mac": "00:00:00:00:00:77"}]) {"name": "eth1", "mac": "00:00:00:00:00:77"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'tun' 'net_segment_type': 'tun'
@ -857,7 +854,6 @@ class TestNeutronHandlersTun(TestNetworkChecking):
'meta': meta} 'meta': meta}
] ]
) )
self.cluster = self.env.clusters[0]
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)
self.nets = resp.json_body self.nets = resp.json_body
@ -877,7 +873,7 @@ class TestNetworkConfigurationVerifyHandler(TestNetworkChecking):
self.env.set_interfaces_in_meta(meta, [ self.env.set_interfaces_in_meta(meta, [
{"name": "eth0", "mac": "00:00:00:00:00:66"}, {"name": "eth0", "mac": "00:00:00:00:00:66"},
{"name": "eth1", "mac": "00:00:00:00:00:77"}]) {"name": "eth1", "mac": "00:00:00:00:00:77"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'gre' 'net_segment_type': 'gre'
@ -888,7 +884,6 @@ class TestNetworkConfigurationVerifyHandler(TestNetworkChecking):
'meta': meta} 'meta': meta}
] ]
) )
self.cluster = self.env.clusters[0]
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)
self.nets = resp.json_body self.nets = resp.json_body
@ -914,7 +909,7 @@ class TestNeutronHandlers90(TestNetworkChecking):
net_meta['neutron']['networks'])[0] net_meta['neutron']['networks'])[0]
bm.update(baremetal_parameters) bm.update(baremetal_parameters)
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'networks_metadata': net_meta, 'networks_metadata': net_meta,
'version': 'mitaka-9.0', 'version': 'mitaka-9.0',
@ -925,7 +920,6 @@ class TestNeutronHandlers90(TestNetworkChecking):
'net_segment_type': 'vlan', 'net_segment_type': 'vlan',
} }
) )
cluster = self.env.clusters[0]
self.env._set_additional_component(cluster, 'ironic', True) self.env._set_additional_component(cluster, 'ironic', True)

View File

@ -48,14 +48,13 @@ class TestHandlers(BaseIntegrationTest):
self.assertEqual(node['id'], notif_api['node_id']) self.assertEqual(node['id'], notif_api['node_id'])
def test_node_get_with_cluster(self): def test_node_get_with_cluster(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={"api": True}, cluster_kwargs={"api": True},
nodes_kwargs=[ nodes_kwargs=[
{"cluster_id": None}, {"cluster_id": None},
{}, {},
] ]
) )
cluster = self.env.clusters[0]
resp = self.app.get( resp = self.app.get(
reverse('NodeCollectionHandler'), reverse('NodeCollectionHandler'),

View File

@ -33,8 +33,8 @@ class TestNodeDeletion(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestNodeDeletion, self).setUp() super(TestNodeDeletion, self).setUp()
self.env.create(nodes_kwargs=[{"pending_addition": True}]) self.cluster = self.env.create(
self.cluster = self.env.clusters[0] nodes_kwargs=[{"pending_addition": True}])
self.node_ids = [node.id for node in self.cluster.nodes] self.node_ids = [node.id for node in self.cluster.nodes]
@fake_tasks() @fake_tasks()
@ -127,13 +127,12 @@ class TestNodeDeletion(BaseIntegrationTest):
class TestNodeDeletionBadRequest(BaseIntegrationTest): class TestNodeDeletionBadRequest(BaseIntegrationTest):
def test_node_handlers_deletion_bad_request(self): def test_node_handlers_deletion_bad_request(self):
self.env.create(nodes_kwargs=[ cluster = self.env.create(nodes_kwargs=[
{'roles': ['controller'], 'status': consts.NODE_STATUSES.error} {'roles': ['controller'], 'status': consts.NODE_STATUSES.error}
]) ])
cluster_db = self.env.clusters[0]
node_to_delete = self.env.create_node( node_to_delete = self.env.create_node(
cluster_id=cluster_db.id, cluster_id=cluster.id,
roles=['controller'], roles=['controller'],
status=consts.NODE_STATUSES.ready status=consts.NODE_STATUSES.ready
) )

View File

@ -284,7 +284,7 @@ class TestHandlers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_interface_changes_for_new_node(self): def test_interface_changes_for_new_node(self):
# Creating cluster with node # Creating cluster with node
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'name': 'test_name' 'name': 'test_name'
}, },
@ -292,7 +292,6 @@ class TestHandlers(BaseIntegrationTest):
{'roles': ['controller'], 'pending_addition': True} {'roles': ['controller'], 'pending_addition': True}
] ]
) )
cluster = self.env.clusters[0]
def filter_changes(chg_type, chg_list): def filter_changes(chg_type, chg_list):
return filter(lambda x: x.get('name') == chg_type, chg_list) return filter(lambda x: x.get('name') == chg_type, chg_list)

View File

@ -21,7 +21,6 @@ from netaddr import IPNetwork
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
from nailgun import consts from nailgun import consts
from nailgun.db.sqlalchemy.models import Cluster
from nailgun.db.sqlalchemy.models import NetworkNICAssignment from nailgun.db.sqlalchemy.models import NetworkNICAssignment
from nailgun import objects from nailgun import objects
from nailgun.test.base import BaseIntegrationTest from nailgun.test.base import BaseIntegrationTest
@ -92,8 +91,7 @@ class TestClusterHandlers(BaseIntegrationTest):
{'name': 'eth1', 'mac': self.env.generate_random_mac()}]) {'name': 'eth1', 'mac': self.env.generate_random_mac()}])
node = self.env.create_node(api=True, meta=meta, mac=mac) node = self.env.create_node(api=True, meta=meta, mac=mac)
cluster = self.env.create_cluster(api=True, nodes=[node['id']]) cluster = self.env.create_cluster(api=True, nodes=[node['id']])
cluster_db = self.db.query(Cluster).get(cluster['id']) self.db.delete(cluster)
self.db.delete(cluster_db)
self.db.commit() self.db.commit()
net_assignment = self.db.query(NetworkNICAssignment).all() net_assignment = self.db.query(NetworkNICAssignment).all()
@ -479,7 +477,7 @@ class TestNodeNICsSerialization(BaseIntegrationTest):
def check_nics_interface_properties(self, handler): def check_nics_interface_properties(self, handler):
for ver, present in self.versions: for ver, present in self.versions:
self.env.create( cluster = self.env.create(
release_kwargs={'version': ver}, release_kwargs={'version': ver},
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
@ -497,7 +495,7 @@ class TestNodeNICsSerialization(BaseIntegrationTest):
self.assertEqual('interface_properties' in resp.json_body[0], self.assertEqual('interface_properties' in resp.json_body[0],
present) present)
objects.Node.delete(node) objects.Node.delete(node)
objects.Cluster.delete(self.env.clusters[0]) objects.Cluster.delete(cluster)
self.env.nodes = [] self.env.nodes = []
self.env.clusters = [] self.env.clusters = []
@ -575,7 +573,7 @@ class TestNodeNICAdminAssigning(BaseIntegrationTest):
class TestNodePublicNetworkToNICAssignment(BaseIntegrationTest): class TestNodePublicNetworkToNICAssignment(BaseIntegrationTest):
def create_node_and_check_assignment(self): def create_node_and_check_assignment(self, cluster):
meta = self.env.default_metadata() meta = self.env.default_metadata()
admin_ip = str(IPNetwork( admin_ip = str(IPNetwork(
objects.NetworkGroup.get_admin_network_group().cidr)[1]) objects.NetworkGroup.get_admin_network_group().cidr)[1])
@ -589,7 +587,7 @@ class TestNodePublicNetworkToNICAssignment(BaseIntegrationTest):
] ]
node = self.env.create_node( node = self.env.create_node(
api=True, meta=meta, mac=admin_mac, ip=admin_ip, api=True, meta=meta, mac=admin_mac, ip=admin_ip,
cluster_id=self.env.clusters[0].id) cluster_id=cluster.id)
resp = self.app.get( resp = self.app.get(
reverse('NodeNICsHandler', kwargs={'node_id': node['id']}), reverse('NodeNICsHandler', kwargs={'node_id': node['id']}),
@ -604,39 +602,39 @@ class TestNodePublicNetworkToNICAssignment(BaseIntegrationTest):
1) 1)
def test_nova_net_public_network_assigned_to_second_nic_by_name(self): def test_nova_net_public_network_assigned_to_second_nic_by_name(self):
self.env.create_cluster( cluster = self.env.create_cluster(
api=True, api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network, net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network,
editable_attributes={'public_network_assignment': { editable_attributes={'public_network_assignment': {
'assign_to_all_nodes': {'value': True}}}) 'assign_to_all_nodes': {'value': True}}})
self.create_node_and_check_assignment() self.create_node_and_check_assignment(cluster)
def test_neutron_gre_public_network_assigned_to_second_nic_by_name(self): def test_neutron_gre_public_network_assigned_to_second_nic_by_name(self):
self.env.create_cluster( cluster = self.env.create_cluster(
api=True, api=True,
net_provider='neutron', net_provider='neutron',
net_segment_type='gre', net_segment_type='gre',
editable_attributes={'public_network_assignment': { editable_attributes={'public_network_assignment': {
'assign_to_all_nodes': {'value': True}}}) 'assign_to_all_nodes': {'value': True}}})
self.create_node_and_check_assignment() self.create_node_and_check_assignment(cluster)
def test_neutron_tun_public_network_assigned_to_second_nic_by_name(self): def test_neutron_tun_public_network_assigned_to_second_nic_by_name(self):
self.env.create_cluster( cluster = self.env.create_cluster(
api=True, api=True,
net_provider='neutron', net_provider='neutron',
net_segment_type='tun', net_segment_type='tun',
editable_attributes={'public_network_assignment': { editable_attributes={'public_network_assignment': {
'assign_to_all_nodes': {'value': True}}}) 'assign_to_all_nodes': {'value': True}}})
self.create_node_and_check_assignment() self.create_node_and_check_assignment(cluster)
def test_neutron_vlan_public_network_assigned_to_second_nic_by_name(self): def test_neutron_vlan_public_network_assigned_to_second_nic_by_name(self):
self.env.create_cluster( cluster = self.env.create_cluster(
api=True, api=True,
net_provider='neutron', net_provider='neutron',
net_segment_type='vlan', net_segment_type='vlan',
editable_attributes={'public_network_assignment': { editable_attributes={'public_network_assignment': {
'assign_to_all_nodes': {'value': True}}}) 'assign_to_all_nodes': {'value': True}}})
self.create_node_and_check_assignment() self.create_node_and_check_assignment(cluster)
class TestNodeNICsHandlersValidation(BaseIntegrationTest): class TestNodeNICsHandlersValidation(BaseIntegrationTest):

View File

@ -84,8 +84,7 @@ class TestNodeCollectionNICsHandler(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_interface_changes_added(self): def test_interface_changes_added(self):
# Creating cluster with node # Creating cluster with node
self.env.create_cluster() cluster = self.env.create_cluster()
cluster = self.env.clusters[0]
self.env.create_nodes_w_interfaces_count( self.env.create_nodes_w_interfaces_count(
roles=['controller'], roles=['controller'],
pending_addition=True, pending_addition=True,

View File

@ -653,12 +653,12 @@ class TestHandlers(BaseIntegrationTest):
def get_nodes(): def get_nodes():
resp = self.app.get( resp = self.app.get(
reverse('NodeCollectionHandler', reverse('NodeCollectionHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers, headers=self.default_headers,
) )
return resp.json_body return resp.json_body
self.env.create(nodes_kwargs=[{'api': True}]) cluster = self.env.create(nodes_kwargs=[{'api': True}])
# check all possible handlers # check all possible handlers
for handler in ('NodeAgentHandler', for handler in ('NodeAgentHandler',
@ -724,7 +724,7 @@ class TestHandlers(BaseIntegrationTest):
def get_nodes(): def get_nodes():
resp = self.app.get( resp = self.app.get(
reverse('NodeCollectionHandler', reverse('NodeCollectionHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers, headers=self.default_headers,
) )
return resp.json_body return resp.json_body
@ -738,7 +738,7 @@ class TestHandlers(BaseIntegrationTest):
{'name': 'eth3', 'mac': self.env.generate_random_mac()}, {'name': 'eth3', 'mac': self.env.generate_random_mac()},
{'name': 'eth4', 'mac': self.env.generate_random_mac()}, {'name': 'eth4', 'mac': self.env.generate_random_mac()},
] ]
self.env.create(nodes_kwargs=[{'api': True, 'meta': meta}]) cluster = self.env.create(nodes_kwargs=[{'api': True, 'meta': meta}])
# check all possible handlers # check all possible handlers
for handler in ('NodeAgentHandler', for handler in ('NodeAgentHandler',
@ -791,9 +791,8 @@ class TestHandlers(BaseIntegrationTest):
{'name': 'eth0', 'mac': self.env.generate_random_mac(), {'name': 'eth0', 'mac': self.env.generate_random_mac(),
'pxe': False}, 'pxe': False},
] ]
self.env.create(nodes_kwargs=[{'api': False, 'meta': meta}]) cluster = self.env.create(nodes_kwargs=[{'api': False, 'meta': meta}])
cluster = self.env.clusters[0]
node = cluster.nodes[0] node = cluster.nodes[0]
# Processing data through NodeHandler # Processing data through NodeHandler
@ -829,7 +828,7 @@ class TestSriovHandlers(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestSriovHandlers, self).setUp() super(TestSriovHandlers, self).setUp()
self.env.create_cluster( cluster = self.env.create_cluster(
editable_attributes={ editable_attributes={
'common': { 'common': {
'libvirt_type': { 'libvirt_type': {
@ -839,7 +838,7 @@ class TestSriovHandlers(BaseIntegrationTest):
} }
) )
self.env.create_nodes_w_interfaces_count( self.env.create_nodes_w_interfaces_count(
1, 3, cluster_id=self.env.clusters[0].id, api=True) 1, 3, cluster_id=cluster.id, api=True)
self.nics = self.get_node_interfaces() self.nics = self.get_node_interfaces()
def get_node_interfaces(self): def get_node_interfaces(self):

View File

@ -96,7 +96,7 @@ class TestNodeNICsBonding(BaseIntegrationTest):
}} }}
]) ])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
"net_provider": "neutron", "net_provider": "neutron",
"net_segment_type": "gre" "net_segment_type": "gre"
@ -267,7 +267,7 @@ class TestNodeNICsBonding(BaseIntegrationTest):
resp = self.app.post( resp = self.app.post(
reverse( reverse(
'NodeUnassignmentHandler', 'NodeUnassignmentHandler',
kwargs={'cluster_id': self.env.clusters[0]['id']} kwargs={'cluster_id': self.cluster.id}
), ),
jsonutils.dumps([{'id': node.id}]), jsonutils.dumps([{'id': node.id}]),
headers=self.default_headers headers=self.default_headers
@ -288,7 +288,7 @@ class TestNodeNICsBonding(BaseIntegrationTest):
node = self.env.nodes[0] node = self.env.nodes[0]
resp = self.app.put( resp = self.app.put(
reverse('ClusterHandler', reverse('ClusterHandler',
kwargs={'obj_id': self.env.clusters[0]['id']}), kwargs={'obj_id': self.cluster.id}),
jsonutils.dumps({'nodes': []}), jsonutils.dumps({'nodes': []}),
headers=self.default_headers, headers=self.default_headers,
expect_errors=True expect_errors=True

View File

@ -88,13 +88,13 @@ class TestNotification(BaseIntegrationTest):
self.assertEqual(len(notifications), 0) self.assertEqual(len(notifications), 0)
def test_notification_deploy_error_with_nodes(self): def test_notification_deploy_error_with_nodes(self):
self.env.create(api=False, cluster = self.env.create(
nodes_kwargs=[ api=False,
{ nodes_kwargs=[
'status': consts.NODE_STATUSES.error, {
'error_type': consts.NODE_ERRORS.deploy 'status': consts.NODE_STATUSES.error,
}]) 'error_type': consts.NODE_ERRORS.deploy
cluster = self.env.clusters[0] }])
receiver = rcvr.NailgunReceiver() receiver = rcvr.NailgunReceiver()
task = Task( task = Task(

View File

@ -28,7 +28,7 @@ class TestOpenstackConfigTaskManager80(base.BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestOpenstackConfigTaskManager80, self).setUp() super(TestOpenstackConfigTaskManager80, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron', cluster_kwargs={'net_provider': 'neutron',
'net_segment_type': 'gre'}, 'net_segment_type': 'gre'},
release_kwargs={'version': self.env_version, release_kwargs={'version': self.env_version,
@ -42,7 +42,6 @@ class TestOpenstackConfigTaskManager80(base.BaseIntegrationTest):
) )
self.release = self.env.releases[0] self.release = self.env.releases[0]
self.cluster = self.env.clusters[0]
self.nodes = self.env.nodes self.nodes = self.env.nodes
# this mock configuration is used to insert into DB # this mock configuration is used to insert into DB

View File

@ -171,7 +171,7 @@ class BaseSelectedNodesTest(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(BaseSelectedNodesTest, self).setUp() super(BaseSelectedNodesTest, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
@ -182,7 +182,6 @@ class BaseSelectedNodesTest(BaseIntegrationTest):
{'roles': ['mongo'], 'pending_addition': True}, {'roles': ['mongo'], 'pending_addition': True},
{'roles': ['cinder'], 'pending_addition': True}]) {'roles': ['cinder'], 'pending_addition': True}])
self.cluster = self.env.clusters[0]
self.nodes = [n for n in self.cluster.nodes][:3] self.nodes = [n for n in self.cluster.nodes][:3]
self.node_uids = [n.uid for n in self.nodes] self.node_uids = [n.uid for n in self.nodes]
@ -610,11 +609,10 @@ class TestSerializedTasksHandler(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestSerializedTasksHandler, self).setUp() super(TestSerializedTasksHandler, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
{'roles': ['compute'], 'pending_addition': True}]) {'roles': ['compute'], 'pending_addition': True}])
self.cluster = self.env.clusters[-1]
self.nodes = self.cluster.nodes self.nodes = self.cluster.nodes
objects.Cluster.prepare_for_deployment( objects.Cluster.prepare_for_deployment(
self.cluster, self.cluster.nodes) self.cluster, self.cluster.nodes)

View File

@ -2577,7 +2577,7 @@ class TestDeploymentAttributesSerialization61(BaseDeploymentSerializer):
return_value=False) return_value=False)
def test_serialize_workloads_collector_user_opted_out(self, _): def test_serialize_workloads_collector_user_opted_out(self, _):
oswl_user = self.serializer.get_common_attrs( oswl_user = self.serializer.get_common_attrs(
self.env.clusters[0] self.cluster
)['workloads_collector'] )['workloads_collector']
self.assertEqual(set(oswl_user.keys()), self.assertEqual(set(oswl_user.keys()),
set(['username', set(['username',
@ -2596,7 +2596,7 @@ class TestDeploymentAttributesSerialization61(BaseDeploymentSerializer):
return_value=True) return_value=True)
def test_serialize_workloads_collector_user_opted_in(self, _): def test_serialize_workloads_collector_user_opted_in(self, _):
oswl_user = self.serializer.get_common_attrs( oswl_user = self.serializer.get_common_attrs(
self.env.clusters[0] self.cluster
)['workloads_collector'] )['workloads_collector']
self.assertEqual(set(oswl_user.keys()), self.assertEqual(set(oswl_user.keys()),
set(['username', set(['username',

View File

@ -613,7 +613,7 @@ class TestDeploymentSerializationForNovaNetwork70(
self.assertEqual(roles, expected_roles) self.assertEqual(roles, expected_roles)
def test_network_metadata(self): def test_network_metadata(self):
nm = objects.Cluster.get_network_manager(self.env.clusters[0]) nm = objects.Cluster.get_network_manager(self.cluster)
ip_by_net = { ip_by_net = {
'fuelweb_admin': None, 'fuelweb_admin': None,
'storage': None, 'storage': None,
@ -746,7 +746,7 @@ class TestPluginDeploymentTasksInjection70(base.BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestPluginDeploymentTasksInjection70, self).setUp() super(TestPluginDeploymentTasksInjection70, self).setUp()
# Plugin task injection for Task based is checked in task based tests # Plugin task injection for Task based is checked in task based tests
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'deployment_tasks': self.release_deployment_tasks, 'deployment_tasks': self.release_deployment_tasks,
'version': self.env_version 'version': self.env_version
@ -762,8 +762,6 @@ class TestPluginDeploymentTasksInjection70(base.BaseIntegrationTest):
] ]
) )
self.cluster = self.env.clusters[0]
self.plugin_data = { self.plugin_data = {
'package_version': '3.0.0', 'package_version': '3.0.0',
'releases': [ 'releases': [
@ -1077,7 +1075,7 @@ class TestRolesSerializationWithPlugins(BaseDeploymentSerializer,
super(TestRolesSerializationWithPlugins, self).setUp() super(TestRolesSerializationWithPlugins, self).setUp()
release = self.patch_net_roles_for_release() release = self.patch_net_roles_for_release()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'release_id': release.id, 'release_id': release.id,
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
@ -1085,8 +1083,6 @@ class TestRolesSerializationWithPlugins(BaseDeploymentSerializer,
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan, 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan,
}) })
self.cluster = self.env.clusters[0]
self.plugin_data = { self.plugin_data = {
'package_version': '3.0.0', 'package_version': '3.0.0',
'releases': [ 'releases': [
@ -1572,7 +1568,7 @@ class TestNetworkTemplateSerializer70(BaseDeploymentSerializer,
self.assertEqual(node_attrs['swift_zone'], node.uid) self.assertEqual(node_attrs['swift_zone'], node.uid)
def test_multiple_node_roles_network_metadata_roles(self): def test_multiple_node_roles_network_metadata_roles(self):
nm = objects.Cluster.get_network_manager(self.env.clusters[0]) nm = objects.Cluster.get_network_manager(self.cluster)
ip_by_net = {} ip_by_net = {}
for node_data in self.serialized_for_astute: for node_data in self.serialized_for_astute:
nodes = node_data['network_metadata']['nodes'] nodes = node_data['network_metadata']['nodes']
@ -1664,7 +1660,7 @@ class TestNetworkTemplateSerializer70(BaseDeploymentSerializer,
# networks to interfaces mapping # networks to interfaces mapping
resp = self.app.get( resp = self.app.get(
reverse('NetworkGroupCollectionHandler', reverse('NetworkGroupCollectionHandler',
kwargs=self.env.clusters[0]), kwargs=self.cluster),
headers=self.default_headers, headers=self.default_headers,
expect_errors=False expect_errors=False
) )
@ -1688,7 +1684,7 @@ class TestNetworkTemplateSerializer70(BaseDeploymentSerializer,
) )
resp = self.app.get( resp = self.app.get(
reverse('NetworkGroupCollectionHandler', reverse('NetworkGroupCollectionHandler',
kwargs=self.env.clusters[0]), kwargs=self.cluster),
headers=self.default_headers, headers=self.default_headers,
expect_errors=False expect_errors=False
) )
@ -1756,7 +1752,7 @@ class TestNetworkTemplateSerializer70(BaseDeploymentSerializer,
net_template net_template
) )
cluster_db = self.db.query(models.Cluster).get(self.cluster['id']) cluster_db = self.db.query(models.Cluster).get(self.cluster['id'])
nm = objects.Cluster.get_network_manager(self.env.clusters[0]) nm = objects.Cluster.get_network_manager(self.cluster)
serializer = get_serializer_for_cluster(self.cluster) serializer = get_serializer_for_cluster(self.cluster)
self.serialized_for_astute = serializer( self.serialized_for_astute = serializer(
AstuteGraph(cluster_db)).serialize(self.cluster, cluster_db.nodes) AstuteGraph(cluster_db)).serialize(self.cluster, cluster_db.nodes)

View File

@ -91,14 +91,13 @@ class TestNetworkTemplateSerializer80(
def setUp(self, *args): def setUp(self, *args):
super(TestNetworkTemplateSerializer80, self).setUp() super(TestNetworkTemplateSerializer80, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={'version': self.env_version}, release_kwargs={'version': self.env_version},
cluster_kwargs={ cluster_kwargs={
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan}) 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
self.net_template = self.env.read_fixtures(['network_template_80'])[0] self.net_template = self.env.read_fixtures(['network_template_80'])[0]
self.cluster = self.env.clusters[-1]
self.serializer = self.create_serializer(self.cluster) self.serializer = self.create_serializer(self.cluster)
def test_get_net_provider_serializer(self): def test_get_net_provider_serializer(self):
@ -210,7 +209,7 @@ class TestDeploymentTasksSerialization80(
def setUp(self): def setUp(self):
super(TestDeploymentTasksSerialization80, self).setUp() super(TestDeploymentTasksSerialization80, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={'version': self.env_version}, release_kwargs={'version': self.env_version},
cluster_kwargs={ cluster_kwargs={
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
@ -222,7 +221,6 @@ class TestDeploymentTasksSerialization80(
'status': consts.NODE_STATUSES.ready}] 'status': consts.NODE_STATUSES.ready}]
) )
self.cluster = self.env.clusters[-1]
if not self.task_deploy: if not self.task_deploy:
self.env.disable_task_deploy(self.cluster) self.env.disable_task_deploy(self.cluster)

View File

@ -610,7 +610,7 @@ class TestDeploymentHASerializer90(
def test_ceph_keys(self): def test_ceph_keys(self):
storage_attrs = self.serializer.get_common_attrs( storage_attrs = self.serializer.get_common_attrs(
self.env.clusters[0] self.cluster
)['storage'] )['storage']
expected_keys = ( expected_keys = (
'fsid', 'mon_key', 'admin_key', 'bootstrap_osd_key', 'radosgw_key' 'fsid', 'mon_key', 'admin_key', 'bootstrap_osd_key', 'radosgw_key'
@ -764,7 +764,7 @@ class TestSriovSerialization90(
): ):
def setUp(self, *args): def setUp(self, *args):
super(TestSriovSerialization90, self).setUp() super(TestSriovSerialization90, self).setUp()
cluster = self.env.create( self.cluster_db = self.env.create(
release_kwargs={'version': self.env_version}, release_kwargs={'version': self.env_version},
cluster_kwargs={ cluster_kwargs={
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
@ -772,15 +772,14 @@ class TestSriovSerialization90(
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan, 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan,
'status': consts.CLUSTER_STATUSES.new}, 'status': consts.CLUSTER_STATUSES.new},
) )
self.cluster_db = objects.Cluster.get_by_uid(cluster['id'])
self.env.create_nodes_w_interfaces_count( self.env.create_nodes_w_interfaces_count(
nodes_count=1, if_count=3, cluster_id=self.env.clusters[0].id, nodes_count=1, if_count=3, cluster_id=self.cluster_db.id,
pending_roles=['compute'], pending_addition=True) pending_roles=['compute'], pending_addition=True)
def serialize(self): def serialize(self):
objects.Cluster.prepare_for_deployment(self.env.clusters[0]) objects.Cluster.prepare_for_deployment(self.cluster_db)
serializer = self.create_serializer(self.env.clusters[0]) serializer = self.create_serializer(self.cluster_db)
return serializer.serialize(self.env.clusters[0], self.env.nodes) return serializer.serialize(self.cluster_db, self.env.nodes)
def test_nic_sriov_info_is_serialized(self): def test_nic_sriov_info_is_serialized(self):
for nic in self.env.nodes[0].nic_interfaces: for nic in self.env.nodes[0].nic_interfaces:

View File

@ -29,13 +29,12 @@ class TestPluginManager(base.BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestPluginManager, self).setUp() super(TestPluginManager, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'version': '2015.1-8.0', 'version': '2015.1-8.0',
'operating_system': 'Ubuntu'}) 'operating_system': 'Ubuntu'})
self.release = self.env.releases[0] self.release = self.env.releases[0]
self.cluster = self.env.clusters[0]
# Create two plugins with package verion 3.0.0 # Create two plugins with package verion 3.0.0
for name in ['test_plugin_1', 'test_plugin_2']: for name in ['test_plugin_1', 'test_plugin_2']:
@ -286,8 +285,7 @@ class TestPluginManager(base.BaseIntegrationTest):
self.assertItemsEqual([plugin], enabled_plugins) self.assertItemsEqual([plugin], enabled_plugins)
def test_get_plugins_attributes_when_cluster_is_locked(self): def test_get_plugins_attributes_when_cluster_is_locked(self):
self.env.create(api=False) cluster = self.env.create(api=False)
cluster = self.env.clusters[-1]
plugin_a1 = self.env.create_plugin( plugin_a1 = self.env.create_plugin(
name='plugin_a', version='1.0.1', name='plugin_a', version='1.0.1',
cluster=cluster, enabled=False cluster=cluster, enabled=False
@ -361,8 +359,7 @@ class TestPluginManager(base.BaseIntegrationTest):
) )
def test_get_plugins_attributes_when_cluster_is_not_locked(self): def test_get_plugins_attributes_when_cluster_is_not_locked(self):
self.env.create(api=False) cluster = self.env.create(api=False)
cluster = self.env.clusters[-1]
plugin_a1 = self.env.create_plugin( plugin_a1 = self.env.create_plugin(
name='plugin_a', version='1.0.1', name='plugin_a', version='1.0.1',
cluster=cluster, enabled=False cluster=cluster, enabled=False
@ -459,14 +456,13 @@ class TestClusterPluginIntegration(base.BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterPluginIntegration, self).setUp() super(TestClusterPluginIntegration, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'operating_system': consts.RELEASE_OS.ubuntu, 'operating_system': consts.RELEASE_OS.ubuntu,
'version': '2015.1-8.0'}, 'version': '2015.1-8.0'},
cluster_kwargs={ cluster_kwargs={
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
}) })
self.cluster = self.env.clusters[0]
def _create_plugin(self, **kwargs): def _create_plugin(self, **kwargs):
plugin = self.env.create_plugin(name=uuid.uuid4().get_hex(), **kwargs) plugin = self.env.create_plugin(name=uuid.uuid4().get_hex(), **kwargs)
@ -484,12 +480,11 @@ class TestClusterPluginIntegration(base.BaseTestCase):
plugin_b = self._create_plugin(**self._compat_meta) plugin_b = self._create_plugin(**self._compat_meta)
self._create_plugin(**self._uncompat_meta) self._create_plugin(**self._uncompat_meta)
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'release_id': self.cluster.release.id, 'release_id': self.cluster.release.id,
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
}) })
cluster = self.env.clusters[1]
compat_plugins = ClusterPlugins.get_compatible_plugins(cluster) compat_plugins = ClusterPlugins.get_compatible_plugins(cluster)
self.assertItemsEqual(compat_plugins, [plugin_a, plugin_b]) self.assertItemsEqual(compat_plugins, [plugin_a, plugin_b])

View File

@ -52,12 +52,12 @@ class BasePluginTest(base.BaseIntegrationTest):
def create_cluster(self, nodes=None): def create_cluster(self, nodes=None):
nodes = nodes if nodes else [] nodes = nodes if nodes else []
self.env.create( cluster = self.env.create(
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu', 'operating_system': 'Ubuntu',
'deployment_tasks': []}, 'deployment_tasks': []},
nodes_kwargs=nodes) nodes_kwargs=nodes)
return self.env.clusters[0] return cluster
def default_attributes(self, cluster): def default_attributes(self, cluster):
resp = self.app.get( resp = self.app.get(

View File

@ -28,7 +28,7 @@ class TestProvisioning(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_nodes_in_cluster(self, mocked_rpc): def test_nodes_in_cluster(self, mocked_rpc):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
@ -36,18 +36,17 @@ class TestProvisioning(BaseIntegrationTest):
{"api": False, "cluster_id": None} {"api": False, "cluster_id": None}
] ]
) )
cluster_db = self.env.clusters[0]
for node in self.env.nodes[:2]: for node in self.env.nodes[:2]:
cluster_db.nodes.append(node) cluster.nodes.append(node)
self.db.add(cluster_db) self.db.add(cluster)
self.db.commit() self.db.commit()
self.assertEqual(len(cluster_db.nodes), 2) self.assertEqual(len(cluster.nodes), 2)
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast') @patch('nailgun.rpc.cast')
def test_node_status_changes_to_provision(self, mocked_rpc=None): def test_node_status_changes_to_provision(self, mocked_rpc=None):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, "status": consts.NODE_STATUSES.ready}, {"api": False, "status": consts.NODE_STATUSES.ready},
@ -67,7 +66,6 @@ class TestProvisioning(BaseIntegrationTest):
"error_type": "provision"} "error_type": "provision"}
] ]
) )
cluster = self.env.clusters[0]
objects.Cluster.clear_pending_changes(cluster) objects.Cluster.clear_pending_changes(cluster)
self.env.network_manager.assign_ips( self.env.network_manager.assign_ips(
cluster, self.env.nodes, consts.NETWORKS.fuelweb_admin cluster, self.env.nodes, consts.NETWORKS.fuelweb_admin

View File

@ -97,8 +97,7 @@ class TestProvisioningSerializer(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestProvisioningSerializer, self).setUp() super(TestProvisioningSerializer, self).setUp()
self.env.create() self.cluster_db = self.env.create()
self.cluster_db = self.env.clusters[0]
self.env.create_nodes_w_interfaces_count( self.env.create_nodes_w_interfaces_count(
1, 1, 1, 1,
**{ **{
@ -191,7 +190,6 @@ class TestProvisioningSerializer(BaseIntegrationTest):
self.assertEqual(node['ks_meta']['mco_identity'], node_db.id) self.assertEqual(node['ks_meta']['mco_identity'], node_db.id)
def test_node_serialization_w_bonded_admin_iface(self): def test_node_serialization_w_bonded_admin_iface(self):
self.cluster_db = self.env.clusters[0]
# create additional node to test bonding # create additional node to test bonding
admin_mac = self.env.generate_random_mac() admin_mac = self.env.generate_random_mac()
meta = { meta = {
@ -409,10 +407,9 @@ class TestProvisioningSerializer90(BaseIntegrationTest):
serializer = ps.ProvisioningSerializer90 serializer = ps.ProvisioningSerializer90
def test_user_account_info(self): def test_user_account_info(self):
self.env.create( self.cluster_db = self.env.create(
release_kwargs={'version': 'liberty-9.0'}, release_kwargs={'version': 'liberty-9.0'},
) )
self.cluster_db = self.env.clusters[0]
self.env.create_nodes_w_interfaces_count( self.env.create_nodes_w_interfaces_count(
1, 1, 1, 1,
roles=['controller'], roles=['controller'],
@ -470,7 +467,7 @@ class TestProvisioningSerializer90(BaseIntegrationTest):
) )
def test_serialize_iommu_parameters_for_sriov(self): def test_serialize_iommu_parameters_for_sriov(self):
self.env.create( cluster = self.env.create(
release_kwargs={ release_kwargs={
'version': 'liberty-9.0', 'version': 'liberty-9.0',
'operating_system': consts.RELEASE_OS.ubuntu}, 'operating_system': consts.RELEASE_OS.ubuntu},
@ -484,7 +481,7 @@ class TestProvisioningSerializer90(BaseIntegrationTest):
objects.NIC.update(sriov_nic, {}) objects.NIC.update(sriov_nic, {})
serialized_node = self.serializer.serialize( serialized_node = self.serializer.serialize(
self.env.clusters[0], self.env.nodes)['nodes'][0] cluster, self.env.nodes)['nodes'][0]
kernel_opts = serialized_node['ks_meta']['pm_data']['kernel_params'] kernel_opts = serialized_node['ks_meta']['pm_data']['kernel_params']
self.assertIn("intel_iommu=on", kernel_opts) self.assertIn("intel_iommu=on", kernel_opts)
self.assertIn("amd_iommu=on", kernel_opts) self.assertIn("amd_iommu=on", kernel_opts)

View File

@ -43,7 +43,6 @@ class TestPutSameJson(base.BaseIntegrationTest):
{'api': True, 'pending_addition': True, 'meta': meta_p}, {'api': True, 'pending_addition': True, 'meta': meta_p},
] ]
) )
self.cluster = self.env.clusters[0]
def assertHttpPut(self, name, arguments, data, expected_status): def assertHttpPut(self, name, arguments, data, expected_status):
"""Helper assert for checking HTTP PUT. """Helper assert for checking HTTP PUT.

View File

@ -33,7 +33,7 @@ class TestResetEnvironment(BaseIntegrationTest):
ia_nodes_count=1 ia_nodes_count=1
) )
def test_reset_environment(self): def test_reset_environment(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"name": "First", {"name": "First",
@ -43,7 +43,6 @@ class TestResetEnvironment(BaseIntegrationTest):
"pending_addition": True} "pending_addition": True}
] ]
) )
cluster_db = self.env.clusters[0]
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.ready) self.assertEqual(supertask.status, consts.TASK_STATUSES.ready)
@ -92,13 +91,12 @@ class TestResetEnvironment(BaseIntegrationTest):
ia_nodes_count=1 ia_nodes_count=1
) )
def test_reset_node_pending_statuses(self): def test_reset_node_pending_statuses(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True}, {"pending_addition": True},
] ]
) )
cluster_db = self.env.clusters[0]
node_db = self.env.nodes[0] node_db = self.env.nodes[0]
# deploy environment # deploy environment
@ -131,7 +129,7 @@ class TestResetEnvironment(BaseIntegrationTest):
ia_nodes_count=1 ia_nodes_count=1
) )
def test_reset_environment_tasks(self): def test_reset_environment_tasks(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"name": "First", {"name": "First",
@ -141,7 +139,6 @@ class TestResetEnvironment(BaseIntegrationTest):
"pending_addition": True} "pending_addition": True}
] ]
) )
cluster_db = self.env.clusters[0]
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.ready) self.assertEqual(supertask.status, consts.TASK_STATUSES.ready)

View File

@ -57,14 +57,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
return nodes_message return nodes_message
def test_verify_networks_resp(self): def test_verify_networks_resp(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -92,14 +91,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.assertEqual(task.message, '') self.assertEqual(task.message, '')
def test_verify_networks_error_and_notice_are_concatenated(self): def test_verify_networks_error_and_notice_are_concatenated(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False}, {"api": False},
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -132,14 +130,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
'\n'.join((custom_error, offline_notice))) '\n'.join((custom_error, offline_notice)))
def test_verify_networks_resp_error(self): def test_verify_networks_resp_error(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
nets_resp = [{'iface': 'eth0', 'vlans': range(100, 104)}] nets_resp = [{'iface': 'eth0', 'vlans': range(100, 104)}]
@ -173,7 +170,7 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.assertEqual(task.result, error_nodes) self.assertEqual(task.result, error_nodes)
def test_verify_networks_resp_error_with_removed_node(self): def test_verify_networks_resp_error_with_removed_node(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
@ -181,7 +178,6 @@ class TestVerifyNetworks(BaseReciverTestCase):
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
nets_resp = [{'iface': 'eth0', 'vlans': range(100, 104)}] nets_resp = [{'iface': 'eth0', 'vlans': range(100, 104)}]
@ -227,14 +223,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.assertEqual(task['result'], error_nodes) self.assertEqual(task['result'], error_nodes)
def test_verify_networks_resp_empty_nodes_custom_error(self): def test_verify_networks_resp_empty_nodes_custom_error(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -263,14 +258,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.assertEqual(task.message, error_msg) self.assertEqual(task.message, error_msg)
def test_verify_networks_resp_extra_nodes_error(self): def test_verify_networks_resp_extra_nodes_error(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
node3 = self.env.create_node(api=False) node3 = self.env.create_node(api=False)
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -304,14 +298,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
Test verifies that when dhcp subtask is ready and Test verifies that when dhcp subtask is ready and
verify_networks errored - verify_networks will be in error verify_networks errored - verify_networks will be in error
""" """
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -343,14 +336,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.assertEqual(task.status, "error") self.assertEqual(task.status, "error")
def test_verify_networks_with_dhcp_subtask_erred(self): def test_verify_networks_with_dhcp_subtask_erred(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -393,7 +385,7 @@ class TestVerifyNetworks(BaseReciverTestCase):
u'uid': node2.id}]) u'uid': node2.id}])
def test_verify_networks_resp_forgotten_node_error(self): def test_verify_networks_resp_forgotten_node_error(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, 'name': 'node1'}, {"api": False, 'name': 'node1'},
@ -401,7 +393,6 @@ class TestVerifyNetworks(BaseReciverTestCase):
{"api": False, 'name': 'node3'} {"api": False, 'name': 'node3'}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2, node3 = self.env.nodes node1, node2, node3 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -436,7 +427,7 @@ class TestVerifyNetworks(BaseReciverTestCase):
self.env.set_interfaces_in_meta( self.env.set_interfaces_in_meta(
meta, [{'name': 'eth0', 'mac': mac}]) meta, [{'name': 'eth0', 'mac': mac}])
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, 'name': 'node1'}, {"api": False, 'name': 'node1'},
@ -444,7 +435,6 @@ class TestVerifyNetworks(BaseReciverTestCase):
{"api": False, 'name': 'node3'} {"api": False, 'name': 'node3'}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2, node3 = self.env.nodes node1, node2, node3 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}, nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)},
@ -495,14 +485,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
def test_verify_networks_resp_incomplete_network_data_on_first_node(self): def test_verify_networks_resp_incomplete_network_data_on_first_node(self):
"""First node network data incompletion causes task fail""" """First node network data incompletion causes task fail"""
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, 'name': 'node1'}, {"api": False, 'name': 'node1'},
{"api": False, 'name': 'node2'}, {"api": False, 'name': 'node2'},
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}] nets_sent = [{'iface': 'eth0', 'vlans': range(100, 105)}]
@ -543,14 +532,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
Passes when only iface without vlans configured Passes when only iface without vlans configured
""" """
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': [0]}, nets_sent = [{'iface': 'eth0', 'vlans': [0]},
{'iface': 'eth1', 'vlans': [0]}] {'iface': 'eth1', 'vlans': [0]}]
@ -578,14 +566,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
def test_verify_networks_resp_without_vlans_only_erred(self): def test_verify_networks_resp_without_vlans_only_erred(self):
"""Net verification without vlans fails when not all info received""" """Net verification without vlans fails when not all info received"""
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': [0]}] nets_sent = [{'iface': 'eth0', 'vlans': [0]}]
nets_resp = [{'iface': 'eth0', 'vlans': []}] nets_resp = [{'iface': 'eth0', 'vlans': []}]
@ -624,14 +611,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
def test_verify_networks_resp_partially_without_vlans(self): def test_verify_networks_resp_partially_without_vlans(self):
"""Verify that network verification partially without vlans passes""" """Verify that network verification partially without vlans passes"""
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': [0]}, nets_sent = [{'iface': 'eth0', 'vlans': [0]},
{'iface': 'eth1', 'vlans': range(100, 104)}] {'iface': 'eth1', 'vlans': range(100, 104)}]
@ -659,14 +645,13 @@ class TestVerifyNetworks(BaseReciverTestCase):
def test_verify_networks_with_excluded_networks(self): def test_verify_networks_with_excluded_networks(self):
"""Verify that network verification can exclude interfaces""" """Verify that network verification can exclude interfaces"""
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
nets_sent = [{'iface': 'eth0', 'vlans': [0]}, nets_sent = [{'iface': 'eth0', 'vlans': [0]},
{'iface': 'eth1', 'vlans': range(100, 104)}] {'iface': 'eth1', 'vlans': range(100, 104)}]
@ -733,14 +718,13 @@ class TestDhcpCheckTask(BaseReciverTestCase):
def setUp(self): def setUp(self):
super(TestDhcpCheckTask, self).setUp() super(TestDhcpCheckTask, self).setUp()
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
self.node1, self.node2 = self.env.nodes self.node1, self.node2 = self.env.nodes
self.task = Task( self.task = Task(
@ -822,7 +806,7 @@ class TestDhcpCheckTask(BaseReciverTestCase):
class TestConsumer(BaseReciverTestCase): class TestConsumer(BaseReciverTestCase):
def test_node_deploy_resp(self): def test_node_deploy_resp(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
@ -834,7 +818,7 @@ class TestConsumer(BaseReciverTestCase):
task = Task( task = Task(
uuid=str(uuid.uuid4()), uuid=str(uuid.uuid4()),
name="deploy", name="deploy",
cluster_id=self.env.clusters[0].id cluster_id=cluster.id
) )
self.db.add(task) self.db.add(task)
self.db.commit() self.db.commit()
@ -852,7 +836,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(task.status, "running") self.assertEqual(task.status, "running")
def test_node_provision_resp(self): def test_node_provision_resp(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
@ -862,7 +846,7 @@ class TestConsumer(BaseReciverTestCase):
task = Task( task = Task(
name='provision', name='provision',
cluster_id=self.env.clusters[0].id) cluster_id=cluster.id)
self.db.add(task) self.db.add(task)
self.db.commit() self.db.commit()
@ -932,7 +916,7 @@ class TestConsumer(BaseReciverTestCase):
self.db.delete(al) self.db.delete(al)
self.db.commit() self.db.commit()
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'api': False}, {'api': False},
{'api': False} {'api': False}
@ -953,19 +937,19 @@ class TestConsumer(BaseReciverTestCase):
] ]
for kw in test_cases_kwargs: for kw in test_cases_kwargs:
kw['cluster_id'] = self.env.clusters[0].id kw['cluster_id'] = cluster.id
kw['node_ids'] = [node.id, node2.id] kw['node_ids'] = [node.id, node2.id]
check_write_logs_from_receiver(**kw) check_write_logs_from_receiver(**kw)
def test_task_progress(self): def test_task_progress(self):
self.env.create_cluster() cluster = self.env.create_cluster()
task = Task( task = Task(
uuid=str(uuid.uuid4()), uuid=str(uuid.uuid4()),
name="super", name="super",
status="running", status="running",
cluster_id=self.env.clusters[0].id cluster_id=cluster.id
) )
self.db.add(task) self.db.add(task)
self.db.commit() self.db.commit()
@ -1080,7 +1064,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(supertask.progress, calculated_progress) self.assertEqual(supertask.progress, calculated_progress)
def _prepare_task(self, name): def _prepare_task(self, name):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
@ -1091,7 +1075,7 @@ class TestConsumer(BaseReciverTestCase):
uuid=str(uuid.uuid4()), uuid=str(uuid.uuid4()),
name=name, name=name,
status=consts.TASK_STATUSES.running, status=consts.TASK_STATUSES.running,
cluster_id=self.env.clusters[0].id cluster_id=cluster.id
) )
self.db.add(task) self.db.add(task)
self.db.flush() self.db.flush()
@ -1228,14 +1212,13 @@ class TestConsumer(BaseReciverTestCase):
u"Provision has failed\. Check these nodes:\n'(.*)', '(.*)'") u"Provision has failed\. Check these nodes:\n'(.*)', '(.*)'")
def test_remove_nodes_resp(self): def test_remove_nodes_resp(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
task = Task( task = Task(
@ -1267,14 +1250,13 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(len(nodes_db), 0) self.assertEqual(len(nodes_db), 0)
def test_remove_nodes_resp_failure(self): def test_remove_nodes_resp_failure(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
task = Task( task = Task(
@ -1366,8 +1348,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertIsNone(cluster_db) self.assertIsNone(cluster_db)
def test_remove_images_resp(self): def test_remove_images_resp(self):
self.env.create() cluster_db = self.env.create()
cluster_db = self.env.clusters[0]
task = Task( task = Task(
name=consts.TASK_NAMES.remove_images, name=consts.TASK_NAMES.remove_images,
@ -1388,8 +1369,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(consts.TASK_STATUSES.ready, task.status) self.assertEqual(consts.TASK_STATUSES.ready, task.status)
def test_remove_images_resp_failed(self): def test_remove_images_resp_failed(self):
self.env.create() cluster_db = self.env.create()
cluster_db = self.env.clusters[0]
task = Task( task = Task(
name=consts.TASK_NAMES.remove_images, name=consts.TASK_NAMES.remove_images,
@ -1410,14 +1390,13 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(consts.TASK_STATUSES.error, task.status) self.assertEqual(consts.TASK_STATUSES.error, task.status)
def test_remove_cluster_resp_failed(self): def test_remove_cluster_resp_failed(self):
self.env.create( cluster_db = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False}, {"api": False},
{"api": False} {"api": False}
] ]
) )
cluster_db = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
self.env.create_notification( self.env.create_notification(
cluster_id=cluster_db.id cluster_id=cluster_db.id
@ -1458,19 +1437,18 @@ class TestConsumer(BaseReciverTestCase):
nets_db = self.db.query(NetworkGroup).\ nets_db = self.db.query(NetworkGroup).\
filter(NetworkGroup.group_id == filter(NetworkGroup.group_id ==
objects.Cluster.get_default_group( objects.Cluster.get_default_group(
self.env.clusters[0]).id).\ cluster_db).id).\
all() all()
self.assertNotEqual(len(nets_db), 0) self.assertNotEqual(len(nets_db), 0)
def test_provision_resp_master_uid(self): def test_provision_resp_master_uid(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, "status": consts.NODE_STATUSES.provisioning}, {"api": False, "status": consts.NODE_STATUSES.provisioning},
{"api": False, "status": consts.NODE_STATUSES.provisioning}, {"api": False, "status": consts.NODE_STATUSES.provisioning},
] ]
) )
cluster = self.env.clusters[0]
node1, node2 = self.env.nodes node1, node2 = self.env.nodes
task = Task( task = Task(
@ -1501,7 +1479,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(node2.error_type, consts.NODE_ERRORS.provision) self.assertEqual(node2.error_type, consts.NODE_ERRORS.provision)
def test_update_config_resp(self): def test_update_config_resp(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{'api': False, 'roles': ['controller'], {'api': False, 'roles': ['controller'],
@ -1515,7 +1493,7 @@ class TestConsumer(BaseReciverTestCase):
task = Task( task = Task(
uuid=str(uuid.uuid4()), uuid=str(uuid.uuid4()),
name=consts.TASK_NAMES.deployment, name=consts.TASK_NAMES.deployment,
cluster_id=self.env.clusters[0].id cluster_id=cluster.id
) )
task.cache = {'nodes': [nodes[0].uid, nodes[1].uid]} task.cache = {'nodes': [nodes[0].uid, nodes[1].uid]}
self.db.add(task) self.db.add(task)
@ -1536,7 +1514,7 @@ class TestConsumer(BaseReciverTestCase):
self.assertEqual(task.status, consts.TASK_STATUSES.ready) self.assertEqual(task.status, consts.TASK_STATUSES.ready)
def _check_success_message(self, callback, task_name, c_status, n_status): def _check_success_message(self, callback, task_name, c_status, n_status):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{'api': False, 'roles': ['controller'], {'api': False, 'roles': ['controller'],
@ -1544,7 +1522,6 @@ class TestConsumer(BaseReciverTestCase):
{'api': False, 'roles': ['compute'], {'api': False, 'roles': ['compute'],
'status': consts.NODE_STATUSES.discover}, 'status': consts.NODE_STATUSES.discover},
]) ])
cluster = self.env.clusters[-1]
nodes = self.env.nodes nodes = self.env.nodes
task_title = task_name.title() task_title = task_name.title()
task = Task( task = Task(
@ -1608,13 +1585,12 @@ class TestResetEnvironment(BaseReciverTestCase):
@mock.patch('nailgun.rpc.receiver.logs_utils.delete_node_logs') @mock.patch('nailgun.rpc.receiver.logs_utils.delete_node_logs')
def test_delete_logs_after_reset(self, mock_delete_logs): def test_delete_logs_after_reset(self, mock_delete_logs):
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": False, "status": consts.NODE_STATUSES.ready}, {"api": False, "status": consts.NODE_STATUSES.ready},
] ]
) )
cluster = self.env.clusters[0]
node = self.env.nodes[0] node = self.env.nodes[0]

View File

@ -27,13 +27,12 @@ class TestSpawnVMs(BaseIntegrationTest):
@fake_tasks(recover_nodes=False) @fake_tasks(recover_nodes=False)
def test_spawn_vms(self): def test_spawn_vms(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"status": "ready", "pending_addition": True, {"status": "ready", "pending_addition": True,
"roles": ["virt"]}, "roles": ["virt"]},
] ]
) )
cluster = self.env.clusters[0]
cluster.nodes[0].vms_conf = [{'id': 1, 'cluster_id': cluster.id}] cluster.nodes[0].vms_conf = [{'id': 1, 'cluster_id': cluster.id}]
resp = self.app.put( resp = self.app.put(
@ -51,13 +50,12 @@ class TestSpawnVMs(BaseIntegrationTest):
self.assertEqual(len(task_deploy.subtasks), 2) self.assertEqual(len(task_deploy.subtasks), 2)
def test_create_vms_conf(self): def test_create_vms_conf(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"status": "ready", "pending_addition": True, {"status": "ready", "pending_addition": True,
"roles": ["virt"]}, "roles": ["virt"]},
] ]
) )
cluster = self.env.clusters[0]
vms_conf = {"vms_conf": [{'id': 1, 'cluster_id': cluster.id}]} vms_conf = {"vms_conf": [{'id': 1, 'cluster_id': cluster.id}]}
self.app.put( self.app.put(
reverse( reverse(
@ -75,13 +73,12 @@ class TestSpawnVMs(BaseIntegrationTest):
self.assertEqual(spawning_nodes.json, vms_conf) self.assertEqual(spawning_nodes.json, vms_conf)
def test_spawn_vms_error(self): def test_spawn_vms_error(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, {"pending_addition": True,
"roles": ["compute"]}, "roles": ["compute"]},
] ]
) )
cluster = self.env.clusters[0]
resp = self.app.put( resp = self.app.put(
reverse( reverse(

View File

@ -114,7 +114,7 @@ class TestStatsUserTaskManagers(BaseMasterNodeSettignsTest):
@fake_tasks(override_state={'progress': 100, @fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready}) 'status': consts.TASK_STATUSES.ready})
def test_no_tasks_for_non_operational_clusters(self): def test_no_tasks_for_non_operational_clusters(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
] ]
@ -123,8 +123,6 @@ class TestStatsUserTaskManagers(BaseMasterNodeSettignsTest):
deploy_task = self.env.launch_deployment() deploy_task = self.env.launch_deployment()
self.assertEqual(deploy_task.status, consts.TASK_STATUSES.ready) self.assertEqual(deploy_task.status, consts.TASK_STATUSES.ready)
cluster = self.env.clusters[0]
# Tuple of tuples (task_name, must_send_stats) # Tuple of tuples (task_name, must_send_stats)
tasks_params = ( tasks_params = (
(consts.TASK_NAMES.create_stats_user, True), (consts.TASK_NAMES.create_stats_user, True),

View File

@ -31,7 +31,7 @@ class TestStopDeployment(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestStopDeployment, self).setUp() super(TestStopDeployment, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"name": "First", {"name": "First",
"pending_addition": True}, "pending_addition": True},
@ -40,7 +40,6 @@ class TestStopDeployment(BaseIntegrationTest):
"pending_addition": True} "pending_addition": True}
] ]
) )
self.cluster = self.env.clusters[0]
self.controller = self.env.nodes[0] self.controller = self.env.nodes[0]
self.compute = self.env.nodes[1] self.compute = self.env.nodes[1]
self.node_uids = [n.uid for n in self.cluster.nodes][:3] self.node_uids = [n.uid for n in self.cluster.nodes][:3]

View File

@ -28,7 +28,7 @@ from nailgun.utils import reverse
class TestTaskDeploy(BaseIntegrationTest): class TestTaskDeploy(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestTaskDeploy, self).setUp() super(TestTaskDeploy, self).setUp()
self.env.create( self.cluster = self.env.create(
api=False, api=False,
nodes_kwargs=[ nodes_kwargs=[
{"name": "First", {"name": "First",
@ -42,7 +42,6 @@ class TestTaskDeploy(BaseIntegrationTest):
'version': '2015.1.0-8.0', 'version': '2015.1.0-8.0',
}, },
) )
self.cluster = self.env.clusters[-1]
def add_plugin_with_tasks(self, task_id): def add_plugin_with_tasks(self, task_id):
deployment_tasks = self.env.get_default_plugin_deployment_tasks( deployment_tasks = self.env.get_default_plugin_deployment_tasks(

View File

@ -146,7 +146,7 @@ class TestTasksLogging(BaseIntegrationTest):
@fake_tasks(god_mode=True, recover_nodes=False) @fake_tasks(god_mode=True, recover_nodes=False)
@patch.object(TaskHelper, 'update_action_log') @patch.object(TaskHelper, 'update_action_log')
def test_stop_task_logging(self, logger): def test_stop_task_logging(self, logger):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, "pending_roles": ["controller"]}, {"pending_addition": True, "pending_roles": ["controller"]},
{"pending_addition": True, "pending_roles": ["cinder"]}, {"pending_addition": True, "pending_roles": ["cinder"]},
@ -161,7 +161,6 @@ class TestTasksLogging(BaseIntegrationTest):
# FIXME(aroma): remove when stop action will be reworked for ha # FIXME(aroma): remove when stop action will be reworked for ha
# cluster. To get more details, please, refer to [1] # cluster. To get more details, please, refer to [1]
# [1]: https://bugs.launchpad.net/fuel/+bug/1529691 # [1]: https://bugs.launchpad.net/fuel/+bug/1529691
cluster = self.env.clusters[0]
objects.Cluster.set_deployed_before_flag(cluster, value=False) objects.Cluster.set_deployed_before_flag(cluster, value=False)
self.env.stop_deployment() self.env.stop_deployment()
@ -258,7 +257,7 @@ class TestTasksLogging(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_update_task_logging_on_deployment(self): def test_update_task_logging_on_deployment(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, "pending_roles": ["controller"]} {"pending_addition": True, "pending_roles": ["controller"]}
] ]
@ -274,7 +273,6 @@ class TestTasksLogging(BaseIntegrationTest):
# FIXME(aroma): remove when stop action will be reworked for ha # FIXME(aroma): remove when stop action will be reworked for ha
# cluster. To get more details, please, refer to [1] # cluster. To get more details, please, refer to [1]
# [1]: https://bugs.launchpad.net/fuel/+bug/1529691 # [1]: https://bugs.launchpad.net/fuel/+bug/1529691
cluster = self.env.clusters[0]
objects.Cluster.set_deployed_before_flag(cluster, value=False) objects.Cluster.set_deployed_before_flag(cluster, value=False)
# Stopping deployment # Stopping deployment

View File

@ -102,14 +102,13 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch('nailgun.task.task.rpc.cast') @mock.patch('nailgun.task.task.rpc.cast')
def test_settings_saved_in_transaction(self, _): def test_settings_saved_in_transaction(self, _):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True}, {"pending_addition": True},
{"pending_deletion": True, {"pending_deletion": True,
'status': NODE_STATUSES.provisioned}, 'status': NODE_STATUSES.provisioned},
] ]
) )
cluster = self.env.clusters[-1]
supertask = self.env.launch_deployment(cluster.id) supertask = self.env.launch_deployment(cluster.id)
self.assertNotEqual(TASK_STATUSES.error, supertask.status) self.assertNotEqual(TASK_STATUSES.error, supertask.status)
deployment_task = next( deployment_task = next(
@ -139,7 +138,7 @@ class TestTaskManagers(BaseIntegrationTest):
def check_deployment_info_was_saved_in_transaction( def check_deployment_info_was_saved_in_transaction(
self, release_ver, is_task_deploy, is_lcm self, release_ver, is_task_deploy, is_lcm
): ):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True}, {"pending_addition": True},
{"pending_deletion": True, {"pending_deletion": True,
@ -150,7 +149,6 @@ class TestTaskManagers(BaseIntegrationTest):
'version': release_ver 'version': release_ver
}, },
) )
cluster = self.env.clusters[-1]
if not is_task_deploy: if not is_task_deploy:
self.env.disable_task_deploy(cluster) self.env.disable_task_deploy(cluster)
@ -183,7 +181,7 @@ class TestTaskManagers(BaseIntegrationTest):
'roles': ['controller'], 'version': '2.1.0', 'roles': ['controller'], 'version': '2.1.0',
} }
] ]
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True, "pending_roles": ['controller']}, {"pending_addition": True, "pending_roles": ['controller']},
{"pending_addition": True, "pending_roles": ['controller']}, {"pending_addition": True, "pending_roles": ['controller']},
@ -193,7 +191,6 @@ class TestTaskManagers(BaseIntegrationTest):
'version': 'mitaka-9.0', 'version': 'mitaka-9.0',
}, },
) )
cluster = self.env.clusters[-1]
supertask = self.env.launch_deployment(cluster.id) supertask = self.env.launch_deployment(cluster.id)
self.assertNotEqual(TASK_STATUSES.error, supertask.status) self.assertNotEqual(TASK_STATUSES.error, supertask.status)
tasks_graph = rpc_mock.call_args[0][1][1]['args']['tasks_graph'] tasks_graph = rpc_mock.call_args[0][1][1]['args']['tasks_graph']
@ -367,7 +364,7 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(fake_rpc=False, mock_rpc=False) @fake_tasks(fake_rpc=False, mock_rpc=False)
@mock.patch('nailgun.rpc.cast') @mock.patch('nailgun.rpc.cast')
def test_update_nodes_info_on_node_removal(self, _): def test_update_nodes_info_on_node_removal(self, _):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'status': consts.CLUSTER_STATUSES.operational, 'status': consts.CLUSTER_STATUSES.operational,
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
@ -382,7 +379,7 @@ class TestTaskManagers(BaseIntegrationTest):
{'status': consts.NODE_STATUSES.ready, 'roles': ['compute']}, {'status': consts.NODE_STATUSES.ready, 'roles': ['compute']},
]) ])
objects.Cluster.prepare_for_deployment(self.env.clusters[0]) objects.Cluster.prepare_for_deployment(cluster)
self.env.launch_deployment() self.env.launch_deployment()
args, _ = nailgun.task.manager.rpc.cast.call_args_list[1] args, _ = nailgun.task.manager.rpc.cast.call_args_list[1]
@ -463,7 +460,7 @@ class TestTaskManagers(BaseIntegrationTest):
# Do not move cluster to error state # Do not move cluster to error state
# in case if cluster new and before # in case if cluster new and before
# validation failed # validation failed
self.assertEqual(self.env.clusters[0].status, 'new') self.assertEqual(cluster.status, 'new')
@fake_tasks() @fake_tasks()
def test_deployment_fails_if_node_to_redeploy_is_offline(self): def test_deployment_fails_if_node_to_redeploy_is_offline(self):
@ -491,11 +488,11 @@ class TestTaskManagers(BaseIntegrationTest):
'and try again.'.format(offline_node.full_name)) 'and try again.'.format(offline_node.full_name))
self.assertEqual(supertask.message, msg) self.assertEqual(supertask.message, msg)
self.assertEqual(self.env.clusters[0].status, 'error') self.assertEqual(cluster.status, 'error')
@fake_tasks(override_state={"progress": 100, "status": "ready"}) @fake_tasks(override_state={"progress": 100, "status": "ready"})
def test_redeployment_works(self): def test_redeployment_works(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_addition": True}, {"pending_addition": True},
{"pending_addition": True}, {"pending_addition": True},
@ -508,7 +505,7 @@ class TestTaskManagers(BaseIntegrationTest):
self.env.refresh_nodes() self.env.refresh_nodes()
self.env.create_node( self.env.create_node(
cluster_id=self.env.clusters[0].id, cluster_id=cluster.id,
roles=["controller"], roles=["controller"],
pending_addition=True pending_addition=True
) )
@ -546,19 +543,17 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_deletion_cluster_task_manager(self): def test_deletion_cluster_task_manager(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"status": "ready", "progress": 100}, {"status": "ready", "progress": 100},
{"roles": ["compute"], "status": "ready", "progress": 100}, {"roles": ["compute"], "status": "ready", "progress": 100},
{"roles": ["compute"], "pending_addition": True}, {"roles": ["compute"], "pending_addition": True},
] ]
) )
cluster_id = self.env.clusters[0].id
cluster_name = self.env.clusters[0].name
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster_id}), kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(202, resp.status_code) self.assertEqual(202, resp.status_code)
@ -566,9 +561,9 @@ class TestTaskManagers(BaseIntegrationTest):
notification = self.db.query(models.Notification)\ notification = self.db.query(models.Notification)\
.filter(models.Notification.topic == "done")\ .filter(models.Notification.topic == "done")\
.filter(models.Notification.message == "Environment '%s' and all " .filter(models.Notification.message == "Environment '%s' and all "
"its nodes are deleted" % cluster_name).first() "its nodes are deleted" % cluster.name).first()
self.assertIsNotNone(notification) self.assertIsNotNone(notification)
self.assertIsNone(self.db.query(models.Cluster).get(cluster_id)) self.assertIsNone(self.db.query(models.Cluster).get(cluster.id))
tasks = self.db.query(models.Task).all() tasks = self.db.query(models.Task).all()
self.assertEqual(len(tasks), 1) self.assertEqual(len(tasks), 1)
@ -577,7 +572,7 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(tick_interval=10, tick_count=5) @fake_tasks(tick_interval=10, tick_count=5)
def test_deletion_clusters_one_by_one(self): def test_deletion_clusters_one_by_one(self):
self.env.create( cluster1 = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"roles": ["compute"], "status": "ready", "progress": 100}, {"roles": ["compute"], "status": "ready", "progress": 100},
{"roles": ["compute"], "status": "ready", "progress": 100}, {"roles": ["compute"], "status": "ready", "progress": 100},
@ -587,15 +582,13 @@ class TestTaskManagers(BaseIntegrationTest):
{"roles": ["cinder"], "status": "ready", "progress": 100}, {"roles": ["cinder"], "status": "ready", "progress": 100},
] ]
) )
cluster1_id = self.env.clusters[0].id cluster2 = self.env.create_cluster(api=True)
self.env.create_cluster(api=True)
cluster2_id = self.env.clusters[1].id
cluster_names = [cluster.name for cluster in self.env.clusters] cluster_names = [cluster.name for cluster in self.env.clusters]
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster1_id}), kwargs={'obj_id': cluster1.id}),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(202, resp.status_code) self.assertEqual(202, resp.status_code)
@ -603,7 +596,7 @@ class TestTaskManagers(BaseIntegrationTest):
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster2_id}), kwargs={'obj_id': cluster2.id}),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(202, resp.status_code) self.assertEqual(202, resp.status_code)
@ -611,8 +604,8 @@ class TestTaskManagers(BaseIntegrationTest):
timer = time.time() timer = time.time()
timeout = 15 timeout = 15
clstr1 = self.db.query(models.Cluster).get(cluster1_id) clstr1 = self.db.query(models.Cluster).get(cluster1.id)
clstr2 = self.db.query(models.Cluster).get(cluster2_id) clstr2 = self.db.query(models.Cluster).get(cluster2.id)
while clstr1 or clstr2: while clstr1 or clstr2:
time.sleep(1) time.sleep(1)
try: try:
@ -637,16 +630,15 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_nodes=False, fake_rpc=False) @fake_tasks(recover_nodes=False, fake_rpc=False)
def test_deletion_during_deployment(self, mock_rpc): def test_deletion_during_deployment(self, mock_rpc):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"status": "ready", "pending_addition": True}, {"status": "ready", "pending_addition": True},
] ]
) )
cluster_id = self.env.clusters[0].id
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'ClusterChangesHandler', 'ClusterChangesHandler',
kwargs={'cluster_id': cluster_id}), kwargs={'cluster_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
deploy_uuid = resp.json_body['uuid'] deploy_uuid = resp.json_body['uuid']
@ -659,7 +651,7 @@ class TestTaskManagers(BaseIntegrationTest):
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster_id}), kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
task_delete = self.db.query(models.Task).filter_by( task_delete = self.db.query(models.Task).filter_by(
@ -676,14 +668,14 @@ class TestTaskManagers(BaseIntegrationTest):
).first() ).first()
self.assertIsNone(task_deploy) self.assertIsNone(task_deploy)
task_delete = self.db.query(models.Task).filter_by( task_delete = self.db.query(models.Task).filter_by(
cluster_id=cluster_id, cluster_id=cluster.id,
name="cluster_deletion" name="cluster_deletion"
).first() ).first()
self.assertIsNone(task_delete) self.assertIsNone(task_delete)
@fake_tasks(override_state={"progress": 100, "status": "ready"}) @fake_tasks(override_state={"progress": 100, "status": "ready"})
def test_deletion_cluster_ha_3x3(self): def test_deletion_cluster_ha_3x3(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
"api": True, "api": True,
}, },
@ -692,22 +684,20 @@ class TestTaskManagers(BaseIntegrationTest):
{"roles": ["compute"], "pending_addition": True} {"roles": ["compute"], "pending_addition": True}
] * 3 ] * 3
) )
cluster_id = self.env.clusters[0].id
cluster_name = self.env.clusters[0].name
supertask = self.env.launch_deployment() supertask = self.env.launch_deployment()
self.assertEqual(supertask.status, consts.TASK_STATUSES.ready) self.assertEqual(supertask.status, consts.TASK_STATUSES.ready)
resp = self.app.delete( resp = self.app.delete(
reverse( reverse(
'ClusterHandler', 'ClusterHandler',
kwargs={'obj_id': cluster_id}), kwargs={'obj_id': cluster.id}),
headers=self.default_headers headers=self.default_headers
) )
self.assertEqual(202, resp.status_code) self.assertEqual(202, resp.status_code)
timer = time.time() timer = time.time()
timeout = 15 timeout = 15
clstr = self.db.query(models.Cluster).get(cluster_id) clstr = self.db.query(models.Cluster).get(cluster.id)
while clstr: while clstr:
time.sleep(1) time.sleep(1)
try: try:
@ -720,7 +710,7 @@ class TestTaskManagers(BaseIntegrationTest):
notification = self.db.query(models.Notification)\ notification = self.db.query(models.Notification)\
.filter(models.Notification.topic == "done")\ .filter(models.Notification.topic == "done")\
.filter(models.Notification.message == "Environment '%s' and all " .filter(models.Notification.message == "Environment '%s' and all "
"its nodes are deleted" % cluster_name).first() "its nodes are deleted" % cluster.name).first()
self.assertIsNotNone(notification) self.assertIsNotNone(notification)
tasks = self.db.query(models.Task).all() tasks = self.db.query(models.Task).all()
@ -752,16 +742,15 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch.object(task.DeletionTask, 'execute') @mock.patch.object(task.DeletionTask, 'execute')
def test_deletion_task_called(self, mdeletion_execute): def test_deletion_task_called(self, mdeletion_execute):
cluster = self.env.create_cluster() cluster = self.env.create_cluster()
cluster_id = cluster['id']
node_db = self.env.create_node( node_db = self.env.create_node(
api=False, api=False,
cluster_id=cluster['id'], cluster_id=cluster.id,
pending_addition=False, pending_addition=False,
pending_deletion=True, pending_deletion=True,
status=NODE_STATUSES.ready, status=NODE_STATUSES.ready,
roles=['controller']) roles=['controller'])
manager_ = manager.ApplyChangesTaskManager(cluster_id) manager_ = manager.ApplyChangesTaskManager(cluster.id)
manager_.execute() manager_.execute()
self.assertEqual(mdeletion_execute.call_count, 1) self.assertEqual(mdeletion_execute.call_count, 1)
@ -782,16 +771,15 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch.object(task.DeletionTask, 'execute') @mock.patch.object(task.DeletionTask, 'execute')
def test_deletion_task_w_check_ceph(self, mdeletion_execute): def test_deletion_task_w_check_ceph(self, mdeletion_execute):
cluster = self.env.create_cluster() cluster = self.env.create_cluster()
cluster_id = cluster['id']
self.env.create_node( self.env.create_node(
api=False, api=False,
cluster_id=cluster['id'], cluster_id=cluster.id,
pending_addition=False, pending_addition=False,
pending_deletion=True, pending_deletion=True,
status=NODE_STATUSES.ready, status=NODE_STATUSES.ready,
roles=['controller']) roles=['controller'])
manager_ = manager.ApplyChangesTaskManager(cluster_id) manager_ = manager.ApplyChangesTaskManager(cluster.id)
manager_.execute() manager_.execute()
self.assertEqual(mdeletion_execute.call_count, 1) self.assertEqual(mdeletion_execute.call_count, 1)
@ -846,12 +834,11 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
@mock.patch('nailgun.task.manager.tasks.DeletionTask.execute') @mock.patch('nailgun.task.manager.tasks.DeletionTask.execute')
def test_apply_changes_exception_caught(self, mdeletion_execute): def test_apply_changes_exception_caught(self, mdeletion_execute):
self.env.create( cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_deletion": True, "status": NODE_STATUSES.ready}, {"pending_deletion": True, "status": NODE_STATUSES.ready},
] ]
) )
cluster_db = self.env.clusters[0]
objects.Cluster.clear_pending_changes(cluster_db) objects.Cluster.clear_pending_changes(cluster_db)
manager_ = manager.ApplyChangesTaskManager(cluster_db.id) manager_ = manager.ApplyChangesTaskManager(cluster_db.id)
mdeletion_execute.side_effect = Exception('exception') mdeletion_execute.side_effect = Exception('exception')
@ -860,14 +847,14 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_offline_nodes=False) @fake_tasks(recover_offline_nodes=False)
def test_deletion_offline_node(self): def test_deletion_offline_node(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"online": False, "pending_deletion": True}, {"online": False, "pending_deletion": True},
{"status": "ready"} {"status": "ready"}
] ]
) )
to_delete = TaskHelper.nodes_to_delete(self.env.clusters[0]) to_delete = TaskHelper.nodes_to_delete(cluster)
to_delete_ids = [node.id for node in to_delete] to_delete_ids = [node.id for node in to_delete]
self.assertEqual(len(to_delete_ids), 1) self.assertEqual(len(to_delete_ids), 1)
@ -942,7 +929,7 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_offline_nodes=False) @fake_tasks(recover_offline_nodes=False)
def test_deletion_offline_node_when_cluster_has_only_one_node(self): def test_deletion_offline_node_when_cluster_has_only_one_node(self):
cluster = self.env.create_cluster() cluster = self.env.create_cluster()
objects.Cluster.clear_pending_changes(self.env.clusters[0]) objects.Cluster.clear_pending_changes(cluster)
self.env.create_node( self.env.create_node(
cluster_id=cluster['id'], cluster_id=cluster['id'],
online=False, online=False,
@ -957,12 +944,11 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_nodes=False) @fake_tasks(recover_nodes=False)
def test_node_deletion_task_manager(self): def test_node_deletion_task_manager(self):
self.env.create( cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_deletion": True, "status": "ready"} {"pending_deletion": True, "status": "ready"}
] ]
) )
cluster_db = self.env.clusters[0]
objects.Cluster.clear_pending_changes(cluster_db) objects.Cluster.clear_pending_changes(cluster_db)
manager_ = manager.NodeDeletionTaskManager(cluster_id=cluster_db.id) manager_ = manager.NodeDeletionTaskManager(cluster_id=cluster_db.id)
task = manager_.execute(cluster_db.nodes) task = manager_.execute(cluster_db.nodes)
@ -973,12 +959,11 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_nodes=False) @fake_tasks(recover_nodes=False)
def test_node_deletion_task_mgr_works_for_nodes_not_in_cluster(self): def test_node_deletion_task_mgr_works_for_nodes_not_in_cluster(self):
self.env.create( cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_deletion": True, "status": "ready"} {"pending_deletion": True, "status": "ready"}
] ]
) )
cluster_db = self.env.clusters[0]
objects.Cluster.clear_pending_changes(cluster_db) objects.Cluster.clear_pending_changes(cluster_db)
node = cluster_db.nodes[0] node = cluster_db.nodes[0]
objects.Node.update(node, {'cluster_id': None}) objects.Node.update(node, {'cluster_id': None})
@ -993,12 +978,11 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks(recover_nodes=False) @fake_tasks(recover_nodes=False)
def test_node_deletion_task_manager_invalid_cluster(self): def test_node_deletion_task_manager_invalid_cluster(self):
self.env.create( cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"pending_deletion": True, "status": "ready"} {"pending_deletion": True, "status": "ready"}
] ]
) )
cluster_db = self.env.clusters[0]
objects.Cluster.clear_pending_changes(cluster_db) objects.Cluster.clear_pending_changes(cluster_db)
manager_ = manager.NodeDeletionTaskManager() manager_ = manager.NodeDeletionTaskManager()
@ -1008,13 +992,12 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch('nailgun.task.manager.rpc.cast') @mock.patch('nailgun.task.manager.rpc.cast')
def test_node_deletion_redeploy_started_for_proper_controllers(self, def test_node_deletion_redeploy_started_for_proper_controllers(self,
mcast): mcast):
self.env.create(nodes_kwargs=[ cluster_db = self.env.create(nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': consts.NODE_STATUSES.provisioned}, 'status': consts.NODE_STATUSES.provisioned},
{'roles': ['controller'], {'roles': ['controller'],
'status': consts.NODE_STATUSES.discover}, 'status': consts.NODE_STATUSES.discover},
]) ])
cluster_db = self.env.clusters[0]
node_to_delete = self.env.create_node( node_to_delete = self.env.create_node(
cluster_id=cluster_db.id, cluster_id=cluster_db.id,
@ -1036,11 +1019,10 @@ class TestTaskManagers(BaseIntegrationTest):
self.assertEqual(node_to_deploy.uid, depl_info[0]['uid']) self.assertEqual(node_to_deploy.uid, depl_info[0]['uid'])
def test_node_deletion_task_failed_with_controller_in_error(self): def test_node_deletion_task_failed_with_controller_in_error(self):
self.env.create(nodes_kwargs=[ cluster_db = self.env.create(nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': consts.NODE_STATUSES.error}, 'status': consts.NODE_STATUSES.error},
]) ])
cluster_db = self.env.clusters[0]
node_to_delete = self.env.create_node( node_to_delete = self.env.create_node(
cluster_id=cluster_db.id, cluster_id=cluster_db.id,
@ -1054,7 +1036,7 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_deployment_on_controller_removal_via_apply_changes(self): def test_deployment_on_controller_removal_via_apply_changes(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'pending_deletion': True}, 'pending_deletion': True},
@ -1071,7 +1053,6 @@ class TestTaskManagers(BaseIntegrationTest):
] ]
) )
cluster = self.env.clusters[0]
expected_nodes_to_deploy = filter(lambda n: 'controller' in n.roles expected_nodes_to_deploy = filter(lambda n: 'controller' in n.roles
and not n.pending_deletion, and not n.pending_deletion,
cluster.nodes) cluster.nodes)
@ -1086,7 +1067,7 @@ class TestTaskManagers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_deployment_on_controller_removal_via_node_deletion(self): def test_deployment_on_controller_removal_via_node_deletion(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': consts.NODE_STATUSES.ready}, 'status': consts.NODE_STATUSES.ready},
@ -1101,7 +1082,6 @@ class TestTaskManagers(BaseIntegrationTest):
] ]
) )
cluster = self.env.clusters[0]
controllers = filter(lambda n: 'controller' in n.roles controllers = filter(lambda n: 'controller' in n.roles
and not n.pending_deletion, and not n.pending_deletion,
cluster.nodes) cluster.nodes)
@ -1124,10 +1104,10 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch('nailgun.rpc.cast') @mock.patch('nailgun.rpc.cast')
def test_delete_nodes_do_not_run_if_there_is_deletion_running(self, _): def test_delete_nodes_do_not_run_if_there_is_deletion_running(self, _):
self.env.create( cluster = self.env.create(
nodes_kwargs=[{'roles': ['controller']}] * 3) nodes_kwargs=[{'roles': ['controller']}] * 3)
self.task_manager = manager.NodeDeletionTaskManager( self.task_manager = manager.NodeDeletionTaskManager(
cluster_id=self.env.clusters[0].id) cluster_id=cluster.id)
self.task_manager.execute(self.env.nodes) self.task_manager.execute(self.env.nodes)
self.assertRaisesRegexp( self.assertRaisesRegexp(
@ -1138,10 +1118,9 @@ class TestTaskManagers(BaseIntegrationTest):
@mock.patch('nailgun.rpc.cast') @mock.patch('nailgun.rpc.cast')
def test_delete_nodes_reelection_if_primary_for_deletion(self, _): def test_delete_nodes_reelection_if_primary_for_deletion(self, _):
self.env.create( cluster = self.env.create(
nodes_kwargs=[{'roles': ['controller'], nodes_kwargs=[{'roles': ['controller'],
'status': consts.NODE_STATUSES.ready}] * 3) 'status': consts.NODE_STATUSES.ready}] * 3)
cluster = self.env.clusters[0]
task_manager = manager.NodeDeletionTaskManager(cluster_id=cluster.id) task_manager = manager.NodeDeletionTaskManager(cluster_id=cluster.id)
objects.Cluster.set_primary_roles(cluster, self.env.nodes) objects.Cluster.set_primary_roles(cluster, self.env.nodes)
primary_node = filter( primary_node = filter(

View File

@ -39,14 +39,13 @@ class TestVerifyNetworkTaskManagers(BaseIntegrationTest):
mac1 = meta1['interfaces'][0]['mac'] mac1 = meta1['interfaces'][0]['mac']
mac2 = meta2['interfaces'][0]['mac'] mac2 = meta2['interfaces'][0]['mac']
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron}, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron},
nodes_kwargs=[ nodes_kwargs=[
{"api": True, "meta": meta1, "mac": mac1}, {"api": True, "meta": meta1, "mac": mac1},
{"api": True, "meta": meta2, "mac": mac2}, {"api": True, "meta": meta2, "mac": mac2},
]) ])
self.cluster = self.env.clusters[0]
@fake_tasks() @fake_tasks()
def test_network_verify_task_managers_dhcp_on_master(self): def test_network_verify_task_managers_dhcp_on_master(self):
@ -151,12 +150,11 @@ class TestVerifyNetworkTaskManagers(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_network_verify_when_env_not_ready(self): def test_network_verify_when_env_not_ready(self):
cluster_db = self.env.clusters[0]
blocking_statuses = ( blocking_statuses = (
consts.CLUSTER_STATUSES.deployment, consts.CLUSTER_STATUSES.deployment,
) )
for status in blocking_statuses: for status in blocking_statuses:
cluster_db.status = status self.cluster.status = status
self.db.flush() self.db.flush()
resp = self.env.neutron_networks_get(self.cluster.id) resp = self.env.neutron_networks_get(self.cluster.id)
@ -204,9 +202,9 @@ class TestVerifyNetworkTaskManagers(BaseIntegrationTest):
@unittest2.skip('Multicast is always disabled.') @unittest2.skip('Multicast is always disabled.')
@fake_tasks(fake_rpc=False) @fake_tasks(fake_rpc=False)
def test_multicast_disabled_when_corosync_is_not_present(self, mocked_rpc): def test_multicast_disabled_when_corosync_is_not_present(self, mocked_rpc):
editable = copy.deepcopy(self.env.clusters[0].attributes.editable) editable = copy.deepcopy(self.cluster.attributes.editable)
del editable['corosync'] del editable['corosync']
self.env.clusters[0].attributes.editable = editable self.cluster.attributes.editable = editable
self.env.launch_verify_networks() self.env.launch_verify_networks()
self.assertIn('subtasks', mocked_rpc.call_args[0][1]) self.assertIn('subtasks', mocked_rpc.call_args[0][1])
subtasks = mocked_rpc.call_args[0][1]['subtasks'] subtasks = mocked_rpc.call_args[0][1]['subtasks']
@ -235,7 +233,7 @@ class TestVerifyNetworksDisabled(BaseIntegrationTest):
"max_speed": 1000, "max_speed": 1000,
"name": "eth2", "name": "eth2",
"current_speed": None}]) "current_speed": None}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'status': consts.CLUSTER_STATUSES.operational, cluster_kwargs={'status': consts.CLUSTER_STATUSES.operational,
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'vlan'}, 'net_segment_type': 'vlan'},
@ -248,7 +246,6 @@ class TestVerifyNetworksDisabled(BaseIntegrationTest):
}, },
] ]
) )
self.cluster = self.env.clusters[0]
@fake_tasks() @fake_tasks()
def test_network_verification_neutron_with_vlan_segmentation(self): def test_network_verification_neutron_with_vlan_segmentation(self):
@ -274,7 +271,7 @@ class TestNetworkVerificationWithBonds(BaseIntegrationTest):
{"name": "eth1", "mac": "00:00:00:00:22:77", "current_speed": 100}, {"name": "eth1", "mac": "00:00:00:00:22:77", "current_speed": 100},
{"name": "eth2", "mac": "00:00:00:00:33:88", "current_speed": 100}] {"name": "eth2", "mac": "00:00:00:00:33:88", "current_speed": 100}]
) )
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
'net_segment_type': 'gre' 'net_segment_type': 'gre'
@ -380,14 +377,14 @@ class TestNetworkVerificationWithBonds(BaseIntegrationTest):
resp = self.app.get( resp = self.app.get(
reverse( reverse(
'NeutronNetworkConfigurationHandler', 'NeutronNetworkConfigurationHandler',
kwargs={'cluster_id': self.env.clusters[0].id} kwargs={'cluster_id': self.cluster.id}
), ),
headers=self.default_headers headers=self.default_headers
) )
resp = self.app.put( resp = self.app.put(
reverse( reverse(
'NeutronNetworkConfigurationVerifyHandler', 'NeutronNetworkConfigurationVerifyHandler',
kwargs={'cluster_id': self.env.clusters[0].id}), kwargs={'cluster_id': self.cluster.id}),
resp.body, resp.body,
headers=self.default_headers, headers=self.default_headers,
expect_errors=True expect_errors=True
@ -789,12 +786,11 @@ class TestVerifyNovaFlatDHCP(BaseIntegrationTest):
} }
) )
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network}, 'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network},
nodes_kwargs=nodes_kwargs, nodes_kwargs=nodes_kwargs,
) )
self.cluster = self.env.clusters[0]
@fake_tasks() @fake_tasks()
def test_flat_dhcp_verify(self): def test_flat_dhcp_verify(self):
@ -828,7 +824,7 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
{"name": "eth0", "mac": "00:00:00:00:01:66"}, {"name": "eth0", "mac": "00:00:00:00:01:66"},
{"name": "eth1", "mac": "00:00:00:00:01:77"}, {"name": "eth1", "mac": "00:00:00:00:01:77"},
{"name": "eth2", "mac": "00:00:00:00:01:88"}]) {"name": "eth2", "mac": "00:00:00:00:01:88"}])
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
'net_segment_type': 'vlan' 'net_segment_type': 'vlan'
@ -850,23 +846,22 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_verify_networks_after_stop(self): def test_verify_networks_after_stop(self):
cluster = self.env.clusters[0]
deploy_task = self.env.launch_deployment() deploy_task = self.env.launch_deployment()
self.assertEqual(deploy_task.status, consts.TASK_STATUSES.ready) self.assertEqual(deploy_task.status, consts.TASK_STATUSES.ready)
# FIXME(aroma): remove when stop action will be reworked for ha # FIXME(aroma): remove when stop action will be reworked for ha
# cluster. To get more details, please, refer to [1] # cluster. To get more details, please, refer to [1]
# [1]: https://bugs.launchpad.net/fuel/+bug/1529691 # [1]: https://bugs.launchpad.net/fuel/+bug/1529691
objects.Cluster.set_deployed_before_flag(cluster, value=False) objects.Cluster.set_deployed_before_flag(self.cluster, value=False)
stop_task = self.env.stop_deployment() stop_task = self.env.stop_deployment()
self.assertEqual(stop_task.status, consts.TASK_STATUSES.ready) self.assertEqual(stop_task.status, consts.TASK_STATUSES.ready)
self.db.refresh(cluster) self.db.refresh(self.cluster)
self.assertEqual(cluster.status, consts.CLUSTER_STATUSES.stopped) self.assertEqual(self.cluster.status, consts.CLUSTER_STATUSES.stopped)
self.assertFalse(cluster.is_locked) self.assertFalse(self.cluster.is_locked)
# Moving nodes online by hands. Our fake threads do this with # Moving nodes online by hands. Our fake threads do this with
# random success # random success
for node in sorted(cluster.nodes, key=lambda n: n.id): for node in sorted(self.cluster.nodes, key=lambda n: n.id):
node.online = True node.online = True
self.db.commit() self.db.commit()
verify_task = self.env.launch_verify_networks() verify_task = self.env.launch_verify_networks()
@ -876,7 +871,7 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
def test_network_verification_neutron_with_vlan_segmentation( def test_network_verification_neutron_with_vlan_segmentation(
self, mocked_rpc): self, mocked_rpc):
# get Neutron L2 VLAN ID range # get Neutron L2 VLAN ID range
vlan_rng_be = self.env.clusters[0].network_config.vlan_range vlan_rng_be = self.cluster.network_config.vlan_range
vlan_rng = set(range(vlan_rng_be[0], vlan_rng_be[1] + 1)) vlan_rng = set(range(vlan_rng_be[0], vlan_rng_be[1] + 1))
# get nodes NICs for private network # get nodes NICs for private network
@ -902,13 +897,13 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_network_verification_parameters_w_one_node_having_public(self): def test_network_verification_parameters_w_one_node_having_public(self):
# Decrease VLAN range and set public VLAN # Decrease VLAN range and set public VLAN
resp = self.env.neutron_networks_get(self.env.clusters[0].id) resp = self.env.neutron_networks_get(self.cluster.id)
nets = resp.json_body nets = resp.json_body
nets['networking_parameters']['vlan_range'] = [1000, 1004] nets['networking_parameters']['vlan_range'] = [1000, 1004]
for net in nets['networks']: for net in nets['networks']:
if net['name'] == consts.NETWORKS.public: if net['name'] == consts.NETWORKS.public:
net['vlan_start'] = 333 net['vlan_start'] = 333
resp = self.env.neutron_networks_put(self.env.clusters[0].id, nets) resp = self.env.neutron_networks_put(self.cluster.id, nets)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
task = self.env.launch_verify_networks() task = self.env.launch_verify_networks()
@ -928,15 +923,15 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
api=True, api=True,
pending_addition=True, pending_addition=True,
roles=['controller'], roles=['controller'],
cluster_id=self.env.clusters[0].id) cluster_id=self.cluster.id)
# Decrease VLAN range and set public VLAN # Decrease VLAN range and set public VLAN
resp = self.env.neutron_networks_get(self.env.clusters[0].id) resp = self.env.neutron_networks_get(self.cluster.id)
nets = resp.json_body nets = resp.json_body
nets['networking_parameters']['vlan_range'] = [1000, 1004] nets['networking_parameters']['vlan_range'] = [1000, 1004]
for net in nets['networks']: for net in nets['networks']:
if net['name'] == consts.NETWORKS.public: if net['name'] == consts.NETWORKS.public:
net['vlan_start'] = 333 net['vlan_start'] = 333
resp = self.env.neutron_networks_put(self.env.clusters[0].id, nets) resp = self.env.neutron_networks_put(self.cluster.id, nets)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
task = self.env.launch_verify_networks() task = self.env.launch_verify_networks()
@ -974,7 +969,7 @@ class TestVerifyNeutronVlan(BaseIntegrationTest):
@fake_tasks() @fake_tasks()
def test_repo_availability_tasks_are_not_created(self): def test_repo_availability_tasks_are_not_created(self):
self.env.clusters[0].release.version = '2014.1-6.0' self.cluster.release.version = '2014.1-6.0'
self.db.flush() self.db.flush()
task = self.env.launch_verify_networks() task = self.env.launch_verify_networks()

View File

@ -91,10 +91,9 @@ class TestOpenStackClientProvider(BaseTestCase):
get_kc_mock.assert_called_once_with(auth_kwargs) get_kc_mock.assert_called_once_with(auth_kwargs)
def test_fail_if_no_online_controllers(self): def test_fail_if_no_online_controllers(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[{"online": False, "roles": ["controller"]}] nodes_kwargs=[{"online": False, "roles": ["controller"]}]
) )
cluster = self.env.clusters[0]
client_provider = helpers.ClientProvider(cluster) client_provider = helpers.ClientProvider(cluster)
with self.assertRaises(errors.NoOnlineControllers): with self.assertRaises(errors.NoOnlineControllers):

View File

@ -139,15 +139,15 @@ class TestInstallationInfo(BaseTestCase):
{'roles': ['compute']}, {'roles': ['compute']},
{'roles': ['controller']} {'roles': ['controller']}
] ]
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'release_id': release[0].id, 'release_id': release[0].id,
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron}, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron},
nodes_kwargs=nodes_params nodes_kwargs=nodes_params
) )
self.env.create_node({'status': consts.NODE_STATUSES.discover}) self.env.create_node(
cluster = self.env.clusters[0] {'status': consts.NODE_STATUSES.discover})
VmwareAttributes.delete(cluster.vmware_attributes) VmwareAttributes.delete(cluster.vmware_attributes)
self.env.db.flush() self.env.db.flush()
self.assertNotRaises(AttributeError, info.get_clusters_info) self.assertNotRaises(AttributeError, info.get_clusters_info)
@ -162,16 +162,16 @@ class TestInstallationInfo(BaseTestCase):
{'roles': ['compute']}, {'roles': ['compute']},
{'roles': ['controller']} {'roles': ['controller']}
] ]
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'release_id': release[0].id, 'release_id': release[0].id,
'mode': consts.CLUSTER_MODES.ha_compact, 'mode': consts.CLUSTER_MODES.ha_compact,
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron}, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron},
nodes_kwargs=nodes_params nodes_kwargs=nodes_params
) )
self.env.create_node({'status': consts.NODE_STATUSES.discover}) self.env.create_node(
{'status': consts.NODE_STATUSES.discover})
clusters_info = info.get_clusters_info() clusters_info = info.get_clusters_info()
cluster = self.env.clusters[0]
self.assertEquals(1, len(clusters_info)) self.assertEquals(1, len(clusters_info))
cluster_info = clusters_info[0] cluster_info = clusters_info[0]
@ -187,7 +187,7 @@ class TestInstallationInfo(BaseTestCase):
self.assertEquals(False, self.assertEquals(False,
cluster_info['is_customized']) cluster_info['is_customized'])
self.assertEquals(cluster.id, self.assertEquals(cluster['id'],
cluster_info['id']) cluster_info['id'])
self.assertEquals(cluster.fuel_version, self.assertEquals(cluster.fuel_version,
cluster_info['fuel_version']) cluster_info['fuel_version'])
@ -442,10 +442,9 @@ class TestInstallationInfo(BaseTestCase):
return filter(lambda x: x not in private_paths, leafs_paths) return filter(lambda x: x not in private_paths, leafs_paths)
def test_all_cluster_attributes_in_white_list(self): def test_all_cluster_attributes_in_white_list(self):
self.env.create(nodes_kwargs=[{'roles': ['compute']}]) cluster = self.env.create(nodes_kwargs=[{'roles': ['compute']}])
self.env.create_node(status=consts.NODE_STATUSES.discover) self.env.create_node(status=consts.NODE_STATUSES.discover)
cluster = self.env.clusters[0]
expected_paths = self._find_leafs_paths(cluster.attributes.editable) expected_paths = self._find_leafs_paths(cluster.attributes.editable)
# Removing 'value' from expected paths # Removing 'value' from expected paths
@ -460,10 +459,9 @@ class TestInstallationInfo(BaseTestCase):
self.assertIn(path, actual_paths) self.assertIn(path, actual_paths)
def test_all_cluster_vmware_attributes_in_white_list(self): def test_all_cluster_vmware_attributes_in_white_list(self):
self.env.create(nodes_kwargs=[{'roles': ['compute']}]) cluster = self.env.create(nodes_kwargs=[{'roles': ['compute']}])
self.env.create_node(status=consts.NODE_STATUSES.discover) self.env.create_node(status=consts.NODE_STATUSES.discover)
cluster = self.env.clusters[0]
expected_paths = self._find_leafs_paths( expected_paths = self._find_leafs_paths(
cluster.vmware_attributes.editable, cluster.vmware_attributes.editable,
leafs_names=('vsphere_cluster', 'enable')) leafs_names=('vsphere_cluster', 'enable'))

View File

@ -276,8 +276,7 @@ class TestOSWLServerInfoSaving(BaseTestCase):
self.assertEqual(False, last_changed.is_sent) self.assertEqual(False, last_changed.is_sent)
def test_oswl_statistics_save_version_info(self): def test_oswl_statistics_save_version_info(self):
self.env.create() cluster = self.env.create()
cluster = self.env.clusters[0]
# Without version info # Without version info
oswl_statistics_save(cluster.id, consts.OSWL_RESOURCE_TYPES.vm, []) oswl_statistics_save(cluster.id, consts.OSWL_RESOURCE_TYPES.vm, [])

View File

@ -99,13 +99,12 @@ class TestUtilsFunctions(BaseTestCase):
def test_get_online_controller(self): def test_get_online_controller(self):
node_name = "test" node_name = "test"
self.env.create( cluster = self.env.create(
nodes_kwargs=[{"online": True, nodes_kwargs=[{"online": True,
"roles": ["controller"], "roles": ["controller"],
"name": node_name}] "name": node_name}]
) )
cluster = self.env.clusters[0]
online_controller = utils.get_online_controller(cluster) online_controller = utils.get_online_controller(cluster)
self.assertIsNotNone(online_controller) self.assertIsNotNone(online_controller)
self.assertEqual(online_controller.name, node_name) self.assertEqual(online_controller.name, node_name)
@ -140,8 +139,7 @@ class TestUtilsFunctions(BaseTestCase):
def test_get_version_info(self): def test_get_version_info(self):
self.assertIsNone(utils.get_version_info(None)) self.assertIsNone(utils.get_version_info(None))
self.env.create() cluster = self.env.create()
cluster = self.env.clusters[0]
version_info = utils.get_version_info(cluster) version_info = utils.get_version_info(cluster)
self.assertItemsEqual( self.assertItemsEqual(
('release_os', 'release_name', 'release_version', ('release_os', 'release_name', 'release_version',

View File

@ -25,7 +25,7 @@ class TestRepoAvailability(BaseTestCase):
def setUp(self): def setUp(self):
super(TestRepoAvailability, self).setUp() super(TestRepoAvailability, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
'net_segment_type': 'gre' 'net_segment_type': 'gre'
@ -35,7 +35,6 @@ class TestRepoAvailability(BaseTestCase):
{'roles': ['compute']}, {'roles': ['compute']},
{'roles': ['compute'], 'online': False}]) {'roles': ['compute'], 'online': False}])
self.cluster = self.env.clusters[0]
self.public_ng = next(ng for ng in self.cluster.network_groups self.public_ng = next(ng for ng in self.cluster.network_groups
if ng.name == 'public') if ng.name == 'public')
self.free_ips = NetworkManager.get_free_ips(self.public_ng, 2) self.free_ips = NetworkManager.get_free_ips(self.public_ng, 2)

View File

@ -250,7 +250,7 @@ class TestDeadlockDetector(BaseTestCase):
db().query(models.Node).with_lockmode('update').get(nodes[1].id) db().query(models.Node).with_lockmode('update').get(nodes[1].id)
def test_updating_already_locked_object(self): def test_updating_already_locked_object(self):
self.env.create_cluster() cluster = self.env.create_cluster()
self.env.create_nodes(2) self.env.create_nodes(2)
self.env.create_cluster() self.env.create_cluster()
self.assertGreater(len(self.env.clusters), 1) self.assertGreater(len(self.env.clusters), 1)
@ -261,17 +261,15 @@ class TestDeadlockDetector(BaseTestCase):
db().query(models.Node).order_by('id').with_lockmode('update').all() db().query(models.Node).order_by('id').with_lockmode('update').all()
# Lock is allowed # Lock is allowed
cluster = self.env.clusters[0]
cluster.status = consts.CLUSTER_STATUSES.error cluster.status = consts.CLUSTER_STATUSES.error
def test_id_traced_on_updating_object(self): def test_id_traced_on_updating_object(self):
self.env.create_cluster() cluster = self.env.create_cluster()
self.env.create_nodes(2) self.env.create_nodes(2)
self.env.create_cluster() self.env.create_cluster()
self.assertGreater(len(self.env.clusters), 1) self.assertGreater(len(self.env.clusters), 1)
# Updating cluster # Updating cluster
cluster = self.env.clusters[0]
cluster.status = consts.CLUSTER_STATUSES.error cluster.status = consts.CLUSTER_STATUSES.error
# Checking locked id trace # Checking locked id trace
@ -296,13 +294,12 @@ class TestDeadlockDetector(BaseTestCase):
node.status = consts.NODE_STATUSES.error node.status = consts.NODE_STATUSES.error
def test_lock_ids_in_non_last_lock_failed(self): def test_lock_ids_in_non_last_lock_failed(self):
self.env.create_cluster() cluster = self.env.create_cluster()
self.env.create_nodes(2) self.env.create_nodes(2)
self.env.create_cluster() another_cluster = self.env.create_cluster()
self.assertGreater(len(self.env.clusters), 1) self.assertGreater(len(self.env.clusters), 1)
# Tracing cluster modification # Tracing cluster modification
cluster = self.env.clusters[0]
cluster.status = consts.CLUSTER_STATUSES.error cluster.status = consts.CLUSTER_STATUSES.error
cluster_lock = dd.find_lock(models.Cluster.__tablename__) cluster_lock = dd.find_lock(models.Cluster.__tablename__)
@ -314,7 +311,6 @@ class TestDeadlockDetector(BaseTestCase):
# Trying to lock ids in non last lock # Trying to lock ids in non last lock
last_lock = dd.context.locks[-1] last_lock = dd.context.locks[-1]
self.assertNotEqual(cluster_lock, last_lock) self.assertNotEqual(cluster_lock, last_lock)
another_cluster = self.env.clusters[1]
with self.assertRaises(dd.TablesLockingOrderViolation): with self.assertRaises(dd.TablesLockingOrderViolation):
another_cluster.status = consts.CLUSTER_STATUSES.error another_cluster.status = consts.CLUSTER_STATUSES.error
@ -433,12 +429,10 @@ class TestDeadlockDetector(BaseTestCase):
db().delete(nodes[1]) db().delete(nodes[1])
def test_deletion_with_non_last_lock_failed(self): def test_deletion_with_non_last_lock_failed(self):
self.env.create_cluster() old_cluster = self.env.create_cluster()
self.env.create_nodes(2) self.env.create_nodes(2)
old_cluster = self.env.clusters[0] new_cluster = self.env.create_cluster()
self.env.create_cluster()
new_cluster = self.env.clusters[1]
# Locking clusters and nodes # Locking clusters and nodes
db().query(models.Cluster).with_lockmode('update').\ db().query(models.Cluster).with_lockmode('update').\

View File

@ -181,8 +181,7 @@ class TestDeploymentGraphModel(
expected_updated_tasks, serialized['tasks']) expected_updated_tasks, serialized['tasks'])
def test_deployment_graph_delete(self): def test_deployment_graph_delete(self):
self.env.create() cluster = self.env.create()
cluster = self.env.clusters[-1]
deployment_graph.DeploymentGraph.create_for_model( deployment_graph.DeploymentGraph.create_for_model(
{'tasks': JSON_TASKS, 'name': 'test_graph'}, cluster, 'test_graph' {'tasks': JSON_TASKS, 'name': 'test_graph'}, cluster, 'test_graph'
) )
@ -197,8 +196,7 @@ class TestDeploymentGraphModel(
self.assertIsNone(dg) self.assertIsNone(dg)
def test_deployment_graph_create_for_model(self): def test_deployment_graph_create_for_model(self):
self.env.create() cluster = self.env.create()
cluster = self.env.clusters[-1]
deployment_graph.DeploymentGraph.create_for_model( deployment_graph.DeploymentGraph.create_for_model(
{'tasks': JSON_TASKS, 'name': 'test_graph'}, cluster, 'test_graph' {'tasks': JSON_TASKS, 'name': 'test_graph'}, cluster, 'test_graph'
) )

View File

@ -26,11 +26,10 @@ class BaseTestNeutronDeploymentSerializer(base.BaseTestCase):
def setUp(self): def setUp(self):
super(BaseTestNeutronDeploymentSerializer, self).setUp() super(BaseTestNeutronDeploymentSerializer, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron'}, cluster_kwargs={'net_provider': 'neutron'},
release_kwargs={'version': self.env_version} release_kwargs={'version': self.env_version}
) )
self.cluster = self.env.clusters[0]
def check_shared_attrs_of_external_network(self, external_net): def check_shared_attrs_of_external_network(self, external_net):
self.assertEqual( self.assertEqual(

View File

@ -22,7 +22,7 @@ class TestDeploymentNodesFiltering(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestDeploymentNodesFiltering, self).setUp() super(TestDeploymentNodesFiltering, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'version': 'liberty-8.0' 'version': 'liberty-8.0'
}, },
@ -37,13 +37,13 @@ class TestDeploymentNodesFiltering(BaseIntegrationTest):
) )
def test_related_pending_deletion_nodes_not_present(self): def test_related_pending_deletion_nodes_not_present(self):
cluster = self.env.clusters[0] cluster = self.cluster
controllers = [n for n in cluster.nodes if 'controller' in n.all_roles] controllers = [n for n in cluster.nodes if 'controller' in n.all_roles]
nodes_to_deploy = TaskHelper.nodes_to_deploy(cluster) nodes_to_deploy = TaskHelper.nodes_to_deploy(cluster)
self.assertItemsEqual(controllers, nodes_to_deploy) self.assertItemsEqual(controllers, nodes_to_deploy)
def test_related_pending_deletion_nodes_not_present_with_force(self): def test_related_pending_deletion_nodes_not_present_with_force(self):
cluster = self.env.clusters[0] cluster = self.cluster
controllers = [n for n in cluster.nodes if 'controller' in n.all_roles] controllers = [n for n in cluster.nodes if 'controller' in n.all_roles]
nodes_to_deploy = TaskHelper.nodes_to_deploy(cluster, force=True) nodes_to_deploy = TaskHelper.nodes_to_deploy(cluster, force=True)
self.assertItemsEqual(controllers, nodes_to_deploy) self.assertItemsEqual(controllers, nodes_to_deploy)

View File

@ -211,11 +211,10 @@ class TestPipeline(BaseExtensionCase):
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
] ]
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=nodes_kwargs) nodes_kwargs=nodes_kwargs)
cluster = self.env.clusters[0]
cluster.extensions = [self.extension.name, 'volume_manager'] cluster.extensions = [self.extension.name, 'volume_manager']
self.db.flush() self.db.flush()
@ -352,11 +351,10 @@ class TestPipeline(BaseExtensionCase):
mfire_callback.call_args[0][2], cluster.nodes[1:]) mfire_callback.call_args[0][2], cluster.nodes[1:])
def test_pipeline_change_data(self): def test_pipeline_change_data(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'roles': ['controller'], 'pending_addition': True}] nodes_kwargs=[{'roles': ['controller'], 'pending_addition': True}]
) )
cluster = self.env.clusters[0]
cluster.extensions = [self.extension.name] cluster.extensions = [self.extension.name]
self.db.flush() self.db.flush()

View File

@ -152,8 +152,7 @@ class TestLogs(BaseIntegrationTest):
'text2', 'text2',
], ],
] ]
self.env.create_cluster() cluster = self.env.create_cluster()
cluster = self.env.clusters[0]
node = self.env.create_node(cluster_id=cluster.id, ip=node_ip) node = self.env.create_node(cluster_id=cluster.id, ip=node_ip)
self._create_logfile_for_node(settings.LOGS[0], log_entries) self._create_logfile_for_node(settings.LOGS[0], log_entries)
self._create_logfile_for_node(settings.LOGS[1], log_entries, node) self._create_logfile_for_node(settings.LOGS[1], log_entries, node)

View File

@ -20,13 +20,12 @@ class TestReplacedDataMigration(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestReplacedDataMigration, self).setUp() super(TestReplacedDataMigration, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller', 'cinder'], 'pending_addition': True}, {'roles': ['controller', 'cinder'], 'pending_addition': True},
] ]
) )
self.cluster = self.env.clusters[0]
self.nodes = self.env.nodes self.nodes = self.env.nodes
self.deployment_info = [] self.deployment_info = []
self.provisioning_info = {'nodes': [], 'engine': {'custom': 'type'}} self.provisioning_info = {'nodes': [], 'engine': {'custom': 'type'}}

View File

@ -24,8 +24,7 @@ class TestMigrationPluginsClusterAttrs(BaseTestCase):
def setUp(self): def setUp(self):
super(TestMigrationPluginsClusterAttrs, self).setUp() super(TestMigrationPluginsClusterAttrs, self).setUp()
self.env.create() self.cluster = self.env.create()
self.cluster = self.env.clusters[0]
self.plugins = [ self.plugins = [
Plugin.create(self.env.get_default_plugin_metadata( Plugin.create(self.env.get_default_plugin_metadata(
name='plugin_name_1')), name='plugin_name_1')),

View File

@ -37,14 +37,14 @@ class TestNetworkCheck(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestNetworkCheck, self).setUp() super(TestNetworkCheck, self).setUp()
self.env.create( cluster = self.env.create(
cluster_kwargs={}, cluster_kwargs={},
nodes_kwargs=[ nodes_kwargs=[
{"api": True, {"api": True,
"pending_addition": True}, "pending_addition": True},
] ]
) )
self.task = FakeTask(self.env.clusters[0]) self.task = FakeTask(cluster)
@patch.object(helpers, 'db') @patch.object(helpers, 'db')
def test_check_untagged_intersection_failed(self, mocked_db): def test_check_untagged_intersection_failed(self, mocked_db):
@ -717,7 +717,7 @@ class TestCheckVIPsNames(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestCheckVIPsNames, self).setUp() super(TestCheckVIPsNames, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={'version': 'liberty-8.0'}, release_kwargs={'version': 'liberty-8.0'},
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
@ -726,7 +726,6 @@ class TestCheckVIPsNames(BaseIntegrationTest):
nodes_kwargs=[{'roles': ['controller']}] nodes_kwargs=[{'roles': ['controller']}]
) )
self.cluster = self.env.clusters[0]
self.plugin = self.env.create_plugin(cluster=self.cluster) self.plugin = self.env.create_plugin(cluster=self.cluster)
self.task = Task(cluster_id=self.cluster.id) self.task = Task(cluster_id=self.cluster.id)
self.db.add(self.task) self.db.add(self.task)

View File

@ -339,7 +339,7 @@ class TestNetworkConfigurationValidator(base.BaseIntegrationTest):
str(netaddr.IPAddress(node_ip) + 10)]] str(netaddr.IPAddress(node_ip) + 10)]]
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, self.cluster.id,
self.config, self.config,
expect_errors=True) expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code) self.assertEqual(400, resp_neutron_net.status_code)
@ -353,7 +353,7 @@ class TestNetworkConfigurationValidator(base.BaseIntegrationTest):
str(netaddr.IPAddress(node_ip) + 10)]] str(netaddr.IPAddress(node_ip) + 10)]]
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, self.cluster.id,
self.config) self.config)
self.assertEqual(200, resp_neutron_net.status_code) self.assertEqual(200, resp_neutron_net.status_code)
@ -363,7 +363,7 @@ class TestNetworkConfigurationValidator(base.BaseIntegrationTest):
str(netaddr.IPAddress(node_ip) + 9)]] str(netaddr.IPAddress(node_ip) + 9)]]
resp_neutron_net = self.env.neutron_networks_put( resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, self.cluster.id,
self.config, self.config,
expect_errors=True) expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code) self.assertEqual(400, resp_neutron_net.status_code)
@ -549,7 +549,7 @@ class TestNeutronNetworkConfigurationValidatorProtocol(
) )
self.nc['networking_parameters'].pop('segmentation_type') self.nc['networking_parameters'].pop('segmentation_type')
serialized_data = self.serialize(self.nc) serialized_data = self.serialize(self.nc)
self.validator(serialized_data, self.env.clusters[0]) self.validator(serialized_data, self.cluster)
# networking parameters # networking parameters
def test_networking_parameters_additional_property(self): def test_networking_parameters_additional_property(self):

View File

@ -38,7 +38,7 @@ class TestAssignmentHandlers(BaseIntegrationTest):
) )
def test_assignment(self): def test_assignment(self):
self.env.create( self.cluster = self.env.create(
cluster_kwargs={"api": True}, cluster_kwargs={"api": True},
nodes_kwargs=[ nodes_kwargs=[
{ {
@ -47,7 +47,6 @@ class TestAssignmentHandlers(BaseIntegrationTest):
} }
] ]
) )
self.cluster = self.env.clusters[0]
node = self.env.nodes[0] node = self.env.nodes[0]
assignment_data = [ assignment_data = [
{ {
@ -162,7 +161,7 @@ class TestAssignmentHandlers(BaseIntegrationTest):
self.assertEquals(404, resp.status_code) self.assertEquals(404, resp.status_code)
def test_assign_conflicting_roles(self): def test_assign_conflicting_roles(self):
self.env.create( self.cluster = self.env.create(
cluster_kwargs={"api": True}, cluster_kwargs={"api": True},
nodes_kwargs=[ nodes_kwargs=[
{ {
@ -171,7 +170,6 @@ class TestAssignmentHandlers(BaseIntegrationTest):
} }
] ]
) )
self.cluster = self.env.clusters[0]
node = self.env.nodes[0] node = self.env.nodes[0]
assignment_data = [ assignment_data = [
{ {
@ -197,7 +195,7 @@ class TestAssignmentHandlers(BaseIntegrationTest):
release = self.env.create_release() release = self.env.create_release()
resp = self.env.create_role(release.id, ROLE) resp = self.env.create_role(release.id, ROLE)
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
"api": True, "api": True,
"release_id": release.id "release_id": release.id
@ -209,7 +207,6 @@ class TestAssignmentHandlers(BaseIntegrationTest):
} }
] ]
) )
self.cluster = self.env.clusters[0]
node = self.env.nodes[0] node = self.env.nodes[0]
assignment_data = [ assignment_data = [
{ {

View File

@ -495,8 +495,7 @@ class TestNodeGroups(BaseIntegrationTest):
self.assertRegexpMatches(message, 'Cannot assign node group') self.assertRegexpMatches(message, 'Cannot assign node group')
def test_default_group_created_at_cluster_creation(self): def test_default_group_created_at_cluster_creation(self):
self.env.create_cluster() cluster = self.env.create_cluster()
cluster = self.env.clusters[0]
self.assertTrue(cluster.node_groups[0].is_default) self.assertTrue(cluster.node_groups[0].is_default)
@patch('nailgun.task.task.rpc.cast') @patch('nailgun.task.task.rpc.cast')
@ -524,7 +523,7 @@ class TestNodeGroups(BaseIntegrationTest):
def test_intersecting_ip_deleted_after_nodegroup_is_created(self): def test_intersecting_ip_deleted_after_nodegroup_is_created(self):
net_roles = copy.copy( net_roles = copy.copy(
self.env.clusters[0].release.network_roles_metadata) self.cluster.release.network_roles_metadata)
net_roles.append({ net_roles.append({
'id': 'stor/vip', 'id': 'stor/vip',
'default_mapping': consts.NETWORKS.storage, 'default_mapping': consts.NETWORKS.storage,
@ -536,7 +535,7 @@ class TestNodeGroups(BaseIntegrationTest):
'node_roles': ['controller'], 'node_roles': ['controller'],
}] }]
}}) }})
self.env.clusters[0].release.network_roles_metadata = net_roles self.cluster.release.network_roles_metadata = net_roles
self.db.flush() self.db.flush()
# allocate VIPs # allocate VIPs

View File

@ -48,7 +48,7 @@ class ExtraFunctions(base.BaseTestCase):
return plugin_ids return plugin_ids
def _create_test_cluster(self): def _create_test_cluster(self):
self.env.create( return self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode}, cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode},
release_kwargs={ release_kwargs={
'name': uuid.uuid4().get_hex(), 'name': uuid.uuid4().get_hex(),
@ -57,8 +57,6 @@ class ExtraFunctions(base.BaseTestCase):
'modes': [consts.CLUSTER_MODES.multinode, 'modes': [consts.CLUSTER_MODES.multinode,
consts.CLUSTER_MODES.ha_compact]}) consts.CLUSTER_MODES.ha_compact]})
return self.env.clusters[0]
class TestPluginCollection(ExtraFunctions): class TestPluginCollection(ExtraFunctions):

View File

@ -278,13 +278,13 @@ class TestNodeObject(BaseIntegrationTest):
def test_get_kernel_params_overwriten(self): def test_get_kernel_params_overwriten(self):
"""Test verifies that overwriten kernel params will be returned.""" """Test verifies that overwriten kernel params will be returned."""
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"role": "controller"} {"role": "controller"}
]) ])
additional_kernel_params = 'intel_iommu=true' additional_kernel_params = 'intel_iommu=true'
default_kernel_params = objects.Cluster.get_default_kernel_params( default_kernel_params = objects.Cluster.get_default_kernel_params(
self.env.clusters[0]) cluster)
kernel_params = '{0} {1}'.format(default_kernel_params, kernel_params = '{0} {1}'.format(default_kernel_params,
additional_kernel_params) additional_kernel_params)
self.env.nodes[0].kernel_params = kernel_params self.env.nodes[0].kernel_params = kernel_params
@ -305,13 +305,12 @@ class TestNodeObject(BaseIntegrationTest):
'pending_addition': True}, 'pending_addition': True},
{'roles': [], 'pending_roles': ['controller'], {'roles': [], 'pending_roles': ['controller'],
'pending_addition': True}] 'pending_addition': True}]
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
}, },
nodes_kwargs=nodes) nodes_kwargs=nodes)
cluster = self.env.clusters[0]
cluster.release.roles_metadata['mongo']['public_ip_required'] = True cluster.release.roles_metadata['mongo']['public_ip_required'] = True
attrs = cluster.attributes.editable attrs = cluster.attributes.editable
self.assertFalse( self.assertFalse(
@ -336,14 +335,14 @@ class TestNodeObject(BaseIntegrationTest):
self.assertEqual(nodes_w_public_count, len(nodes)) self.assertEqual(nodes_w_public_count, len(nodes))
def test_should_have_public_with_ip_with_given_metadata(self): def test_should_have_public_with_ip_with_given_metadata(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
}, },
nodes_kwargs=[{}, {}]) nodes_kwargs=[{}, {}])
node = self.env.nodes[0] node = self.env.nodes[0]
roles_metadata = objects.Cluster.get_roles(self.env.clusters[0]) roles_metadata = objects.Cluster.get_roles(cluster)
with mock.patch.object(objects.Cluster, 'get_roles') as get_roles_mock: with mock.patch.object(objects.Cluster, 'get_roles') as get_roles_mock:
get_roles_mock.return_value = roles_metadata get_roles_mock.return_value = roles_metadata
objects.Node.should_have_public_with_ip(node) objects.Node.should_have_public_with_ip(node)
@ -363,13 +362,12 @@ class TestNodeObject(BaseIntegrationTest):
'pending_addition': True}, 'pending_addition': True},
{'roles': [], 'pending_roles': ['controller'], {'roles': [], 'pending_roles': ['controller'],
'pending_addition': True}] 'pending_addition': True}]
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron, 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
}, },
nodes_kwargs=nodes) nodes_kwargs=nodes)
cluster = self.env.clusters[0]
attrs = copy.deepcopy(cluster.attributes.editable) attrs = copy.deepcopy(cluster.attributes.editable)
attrs['neutron_advanced_configuration']['neutron_dvr']['value'] = True attrs['neutron_advanced_configuration']['neutron_dvr']['value'] = True
resp = self.app.patch( resp = self.app.patch(
@ -693,7 +691,7 @@ class TestTaskObject(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestTaskObject, self).setUp() super(TestTaskObject, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']}, {'roles': ['controller']},
{'roles': ['compute']}, {'roles': ['compute']},
@ -708,10 +706,6 @@ class TestTaskObject(BaseIntegrationTest):
for node in nodes: for node in nodes:
self.assertEquals(node.status, consts.NODE_STATUSES.discover) self.assertEquals(node.status, consts.NODE_STATUSES.discover)
@property
def cluster(self):
return self.env.clusters[0]
def test_update_nodes_to_error_if_deployment_task_failed(self): def test_update_nodes_to_error_if_deployment_task_failed(self):
self.cluster.nodes[0].status = consts.NODE_STATUSES.deploying self.cluster.nodes[0].status = consts.NODE_STATUSES.deploying
self.cluster.nodes[0].progress = 12 self.cluster.nodes[0].progress = 12
@ -914,12 +908,11 @@ class TestTaskObject(BaseIntegrationTest):
class TestTransactionObject(BaseIntegrationTest): class TestTransactionObject(BaseIntegrationTest):
def setUp(self): def setUp(self):
super(TestTransactionObject, self).setUp() super(TestTransactionObject, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']}, {'roles': ['controller']},
{'roles': ['compute']}, {'roles': ['compute']},
{'roles': ['cinder']}]) {'roles': ['cinder']}])
self.cluster = self.env.clusters[-1]
def test_get_last_success_run(self): def test_get_last_success_run(self):
objects.Transaction.create({ objects.Transaction.create({
@ -1094,14 +1087,13 @@ class TestClusterObject(BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterObject, self).setUp() super(TestClusterObject, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron'}, cluster_kwargs={'net_provider': 'neutron'},
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']}, {'roles': ['controller']},
{'roles': ['controller']}, {'roles': ['controller']},
{'roles': ['compute']}, {'roles': ['compute']},
{'roles': ['cinder']}]) {'roles': ['cinder']}])
self.cluster = self.env.clusters[0]
def _create_cluster_with_plugins(self, plugins_kw_list): def _create_cluster_with_plugins(self, plugins_kw_list):
cluster = self.env.create_cluster(api=False) cluster = self.env.create_cluster(api=False)
@ -1188,17 +1180,16 @@ class TestClusterObject(BaseTestCase):
'nailgun.objects.cluster.' 'nailgun.objects.cluster.'
'fire_callback_on_node_collection_delete') 'fire_callback_on_node_collection_delete')
def test_delete(self, mock_node_coll_delete_cb, mock_cluster_delete_cb): def test_delete(self, mock_node_coll_delete_cb, mock_cluster_delete_cb):
cluster = self.env.clusters[0] ids = [node.id for node in self.cluster.nodes]
ids = [node.id for node in cluster.nodes] objects.Cluster.delete(self.cluster)
objects.Cluster.delete(cluster)
mock_node_coll_delete_cb.assert_called_once_with(ids) mock_node_coll_delete_cb.assert_called_once_with(ids)
mock_cluster_delete_cb.assert_called_once_with(cluster) mock_cluster_delete_cb.assert_called_once_with(self.cluster)
self.assertEqual(self.db.query(objects.Node.model).count(), 0) self.assertEqual(self.db.query(objects.Node.model).count(), 0)
self.assertEqual(self.db.query(objects.Cluster.model).count(), 0) self.assertEqual(self.db.query(objects.Cluster.model).count(), 0)
def test_all_controllers(self): def test_all_controllers(self):
self.assertEqual(len(objects.Cluster.get_nodes_by_role( self.assertEqual(len(objects.Cluster.get_nodes_by_role(
self.env.clusters[0], 'controller')), 2) self.cluster, 'controller')), 2)
def test_put_delete_template_after_deployment(self): def test_put_delete_template_after_deployment(self):
allowed = [consts.CLUSTER_STATUSES.new, allowed = [consts.CLUSTER_STATUSES.new,
@ -1207,19 +1198,19 @@ class TestClusterObject(BaseTestCase):
consts.CLUSTER_STATUSES.error, consts.CLUSTER_STATUSES.error,
consts.CLUSTER_STATUSES.partially_deployed] consts.CLUSTER_STATUSES.partially_deployed]
for status in consts.CLUSTER_STATUSES: for status in consts.CLUSTER_STATUSES:
self.env.clusters[0].status = status self.cluster.status = status
self.db.flush() self.db.flush()
self.assertEqual( self.assertEqual(
objects.Cluster.is_network_modification_locked( objects.Cluster.is_network_modification_locked(
self.env.clusters[0]), self.cluster),
status not in allowed status not in allowed
) )
def test_get_controller_group_id(self): def test_get_controller_group_id(self):
controllers = objects.Cluster.get_nodes_by_role( controllers = objects.Cluster.get_nodes_by_role(
self.env.clusters[0], 'controller') self.cluster, 'controller')
group_id = objects.Cluster.get_controllers_group_id( group_id = objects.Cluster.get_controllers_group_id(
self.env.clusters[0]) self.cluster)
self.assertEqual(controllers[0].group_id, group_id) self.assertEqual(controllers[0].group_id, group_id)
def test_get_node_group(self): def test_get_node_group(self):
@ -1284,7 +1275,7 @@ class TestClusterObject(BaseTestCase):
for inf in node.nic_interfaces: for inf in node.nic_interfaces:
interfaces.append(inf) interfaces.append(inf)
nic_interfaces = objects.Cluster.get_nic_interfaces_for_all_nodes( nic_interfaces = objects.Cluster.get_nic_interfaces_for_all_nodes(
self.env.clusters[0]) self.cluster)
self.assertEqual(len(nic_interfaces), len(interfaces)) self.assertEqual(len(nic_interfaces), len(interfaces))
def test_get_bond_interfaces_for_all_nodes(self): def test_get_bond_interfaces_for_all_nodes(self):
@ -1294,14 +1285,13 @@ class TestClusterObject(BaseTestCase):
slaves=node.nic_interfaces)) slaves=node.nic_interfaces))
self.db.flush() self.db.flush()
bond_interfaces = objects.Cluster.get_bond_interfaces_for_all_nodes( bond_interfaces = objects.Cluster.get_bond_interfaces_for_all_nodes(
self.env.clusters[0]) self.cluster)
self.assertEqual(len(bond_interfaces), 1) self.assertEqual(len(bond_interfaces), 1)
def test_get_network_roles(self): def test_get_network_roles(self):
cluster = self.env.clusters[0]
self.assertItemsEqual( self.assertItemsEqual(
objects.Cluster.get_network_roles(cluster), objects.Cluster.get_network_roles(self.cluster),
cluster.release.network_roles_metadata) self.cluster.release.network_roles_metadata)
def test_get_deployment_tasks(self): def test_get_deployment_tasks(self):
deployment_tasks = self.env.get_default_plugin_deployment_tasks() deployment_tasks = self.env.get_default_plugin_deployment_tasks()
@ -1445,17 +1435,16 @@ class TestClusterObject(BaseTestCase):
plugin_volumes_metadata['volumes']) plugin_volumes_metadata['volumes'])
volumes_metadata = objects.Cluster.get_volumes_metadata( volumes_metadata = objects.Cluster.get_volumes_metadata(
self.env.clusters[0]) self.cluster)
self.assertDictEqual( self.assertDictEqual(
volumes_metadata, expected_volumes_metadata) volumes_metadata, expected_volumes_metadata)
def test_cluster_is_component_enabled(self): def test_cluster_is_component_enabled(self):
cluster = self.env.clusters[0] self.assertFalse(objects.Cluster.is_component_enabled(self.cluster,
self.assertFalse(objects.Cluster.is_component_enabled(cluster,
'ironic')) 'ironic'))
self.env._set_additional_component(cluster, 'ironic', True) self.env._set_additional_component(self.cluster, 'ironic', True)
self.assertTrue(objects.Cluster.is_component_enabled(cluster, self.assertTrue(objects.Cluster.is_component_enabled(self.cluster,
'ironic')) 'ironic'))
def test_get_cluster_attributes_by_components(self): def test_get_cluster_attributes_by_components(self):
@ -1621,7 +1610,7 @@ class TestClusterObjectVirtRoles(BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterObjectVirtRoles, self).setUp() super(TestClusterObjectVirtRoles, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['virt']}, {'roles': ['virt']},
{'roles': ['virt']}, {'roles': ['virt']},
@ -1640,14 +1629,14 @@ class TestClusterObjectVirtRoles(BaseTestCase):
] ]
def test_set_vms_created_state(self): def test_set_vms_created_state(self):
objects.Cluster.set_vms_created_state(self.env.clusters[0]) objects.Cluster.set_vms_created_state(self.cluster)
for node in self.env.nodes: for node in self.env.nodes:
for conf in node.vms_conf: for conf in node.vms_conf:
self.assertTrue(conf['created']) self.assertTrue(conf['created'])
def test_reset_vms_created_state(self): def test_reset_vms_created_state(self):
objects.Cluster.set_vms_created_state(self.env.clusters[0]) objects.Cluster.set_vms_created_state(self.cluster)
objects.Node.reset_vms_created_state(self.env.nodes[0]) objects.Node.reset_vms_created_state(self.env.nodes[0])
@ -1663,7 +1652,7 @@ class TestClusterObjectGetRoles(BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterObjectGetRoles, self).setUp() super(TestClusterObjectGetRoles, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={ release_kwargs={
'roles_metadata': { 'roles_metadata': {
'role_a': { 'role_a': {
@ -1672,7 +1661,6 @@ class TestClusterObjectGetRoles(BaseTestCase):
'name': 'Role B', 'description': 'Role B is ...', }, 'name': 'Role B', 'description': 'Role B is ...', },
} }
}) })
self.cluster = self.env.clusters[0]
def create_plugin(self, roles_metadata): def create_plugin(self, roles_metadata):
plugin = objects.Plugin.create(self.env.get_default_plugin_metadata( plugin = objects.Plugin.create(self.env.get_default_plugin_metadata(
@ -1756,7 +1744,8 @@ class TestClusterObjectGetRoles(BaseTestCase):
class TestClusterObjectGetNetworkManager(BaseTestCase): class TestClusterObjectGetNetworkManager(BaseTestCase):
def setUp(self): def setUp(self):
super(TestClusterObjectGetNetworkManager, self).setUp() super(TestClusterObjectGetNetworkManager, self).setUp()
self.env.create(cluster_kwargs={'net_provider': 'neutron'}) self.cluster = self.env.create(
cluster_kwargs={'net_provider': 'neutron'})
def test_get_default(self): def test_get_default(self):
nm = objects.Cluster.get_network_manager() nm = objects.Cluster.get_network_manager()
@ -1764,19 +1753,17 @@ class TestClusterObjectGetNetworkManager(BaseTestCase):
def check_neutron_network_manager( def check_neutron_network_manager(
self, net_provider, version, expected_manager): self, net_provider, version, expected_manager):
cluster = self.env.clusters[0] self.cluster.net_provider = net_provider
cluster.net_provider = net_provider self.cluster.release.version = version
cluster.release.version = version nm = objects.Cluster.get_network_manager(self.cluster)
nm = objects.Cluster.get_network_manager(cluster)
self.assertIs(expected_manager, nm) self.assertIs(expected_manager, nm)
def test_raise_if_unknown(self): def test_raise_if_unknown(self):
cluster = self.env.clusters[0] self.cluster.net_provider = "invalid_data"
cluster.net_provider = "invalid_data"
self.assertRaisesWithMessage( self.assertRaisesWithMessage(
Exception, Exception,
'The network provider "invalid_data" is not supported.', 'The network provider "invalid_data" is not supported.',
objects.Cluster.get_network_manager, cluster objects.Cluster.get_network_manager, self.cluster
) )
def test_neutron_network_managers_by_version(self): def test_neutron_network_managers_by_version(self):
@ -1803,8 +1790,8 @@ class TestClusterObjectGetNetworkManager(BaseTestCase):
) )
def test_get_neutron_80(self): def test_get_neutron_80(self):
self.env.clusters[0].release.version = '2014.2.2-8.0' self.cluster.release.version = '2014.2.2-8.0'
nm = objects.Cluster.get_network_manager(self.env.clusters[0]) nm = objects.Cluster.get_network_manager(self.cluster)
self.assertEqual(nm, neutron.NeutronManager80) self.assertEqual(nm, neutron.NeutronManager80)
@ -1934,14 +1921,13 @@ class TestOpenstackConfig(BaseTestCase):
def setUp(self): def setUp(self):
super(TestOpenstackConfig, self).setUp() super(TestOpenstackConfig, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'role': 'controller', 'status': 'ready'}, {'role': 'controller', 'status': 'ready'},
{'role': 'compute', 'status': 'ready'}, {'role': 'compute', 'status': 'ready'},
{'role': 'cinder', 'status': 'ready'}, {'role': 'cinder', 'status': 'ready'},
]) ])
self.cluster = self.env.clusters[0]
self.nodes = self.env.nodes self.nodes = self.env.nodes
def test_create(self): def test_create(self):
@ -2007,14 +1993,13 @@ class TestOpenstackConfigCollection(BaseTestCase):
def setUp(self): def setUp(self):
super(TestOpenstackConfigCollection, self).setUp() super(TestOpenstackConfigCollection, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'role': 'controller', 'status': 'ready'}, {'role': 'controller', 'status': 'ready'},
{'role': 'compute', 'status': 'ready'}, {'role': 'compute', 'status': 'ready'},
{'role': 'cinder', 'status': 'ready'}, {'role': 'cinder', 'status': 'ready'},
]) ])
self.cluster = self.env.clusters[0]
self.nodes = self.env.nodes self.nodes = self.env.nodes
def test_create(self): def test_create(self):
@ -2093,10 +2078,9 @@ class TestNICObject(BaseTestCase):
def setUp(self): def setUp(self):
super(TestNICObject, self).setUp() super(TestNICObject, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'role': 'controller'}]) nodes_kwargs=[{'role': 'controller'}])
self.cluster = self.env.clusters[0]
def test_replace_assigned_networks(self): def test_replace_assigned_networks(self):
node = self.env.nodes[0] node = self.env.nodes[0]
@ -2133,10 +2117,9 @@ class TestIPAddrObject(BaseTestCase):
def setUp(self): def setUp(self):
super(TestIPAddrObject, self).setUp() super(TestIPAddrObject, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'api': False}, cluster_kwargs={'api': False},
nodes_kwargs=[{'role': 'controller'}]) nodes_kwargs=[{'role': 'controller'}])
self.cluster = self.env.clusters[0]
def test_get_ips_except_admin(self): def test_get_ips_except_admin(self):
node = self.env.nodes[0] node = self.env.nodes[0]

View File

@ -63,14 +63,13 @@ class TestPluginBase(base.BaseTestCase):
} }
) )
self.plugin = Plugin.create(self.plugin_metadata) self.plugin = Plugin.create(self.plugin_metadata)
self.env.create( self.cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode}, cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode},
release_kwargs={ release_kwargs={
'version': '2015.1-8.0', 'version': '2015.1-8.0',
'operating_system': 'Ubuntu', 'operating_system': 'Ubuntu',
'modes': [consts.CLUSTER_MODES.multinode, 'modes': [consts.CLUSTER_MODES.multinode,
consts.CLUSTER_MODES.ha_compact]}) consts.CLUSTER_MODES.ha_compact]})
self.cluster = self.env.clusters[0]
self.plugin_adapter = adapters.wrap_plugin(self.plugin) self.plugin_adapter = adapters.wrap_plugin(self.plugin)
self.env_config = self.env.get_default_plugin_env_config() self.env_config = self.env.get_default_plugin_env_config()
self.get_config = lambda *args: mock.mock_open( self.get_config = lambda *args: mock.mock_open(

View File

@ -38,7 +38,7 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
pass pass
def test_primary_controllers_assigned_for_pendings_roles(self): def test_primary_controllers_assigned_for_pendings_roles(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact}, cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact},
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu'}, 'operating_system': 'Ubuntu'},
@ -49,7 +49,6 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
{'pending_roles': [self.role_name], {'pending_roles': [self.role_name],
'status': consts.NODE_STATUSES.discover, 'status': consts.NODE_STATUSES.discover,
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
objects.Cluster.set_primary_roles(cluster, cluster.nodes) objects.Cluster.set_primary_roles(cluster, cluster.nodes)
nodes = sorted(cluster.nodes, key=lambda node: node.id) nodes = sorted(cluster.nodes, key=lambda node: node.id)
# with lowest uid is assigned as primary # with lowest uid is assigned as primary
@ -59,7 +58,7 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
objects.Node.all_roles(nodes[1]), [self.role_name]) objects.Node.all_roles(nodes[1]), [self.role_name])
def test_primary_controller_assigned_for_ready_node(self): def test_primary_controller_assigned_for_ready_node(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact}, cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact},
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu'}, 'operating_system': 'Ubuntu'},
@ -70,7 +69,6 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
{'roles': [self.role_name], {'roles': [self.role_name],
'status': consts.NODE_STATUSES.ready, 'status': consts.NODE_STATUSES.ready,
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
objects.Cluster.set_primary_roles(cluster, cluster.nodes) objects.Cluster.set_primary_roles(cluster, cluster.nodes)
# primary assigned to node with ready status # primary assigned to node with ready status
nodes = sorted(cluster.nodes, key=lambda node: node.id) nodes = sorted(cluster.nodes, key=lambda node: node.id)
@ -84,7 +82,7 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
def test_primary_assignment_multinode(self): def test_primary_assignment_multinode(self):
"""Primary should not be assigned in multinode env.""" """Primary should not be assigned in multinode env."""
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode}, cluster_kwargs={'mode': consts.CLUSTER_MODES.multinode},
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu', 'operating_system': 'Ubuntu',
@ -97,7 +95,6 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
{'roles': [self.role_name], {'roles': [self.role_name],
'status': consts.NODE_STATUSES.ready, 'status': consts.NODE_STATUSES.ready,
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
objects.Cluster.set_primary_roles(cluster, cluster.nodes) objects.Cluster.set_primary_roles(cluster, cluster.nodes)
self.assertEqual( self.assertEqual(
objects.Node.all_roles(cluster.nodes[0]), [self.role_name]) objects.Node.all_roles(cluster.nodes[0]), [self.role_name])
@ -105,7 +102,7 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
objects.Node.all_roles(cluster.nodes[1]), [self.role_name]) objects.Node.all_roles(cluster.nodes[1]), [self.role_name])
def test_primary_not_assigned_to_pending_deletion(self): def test_primary_not_assigned_to_pending_deletion(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact}, cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact},
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu'}, 'operating_system': 'Ubuntu'},
@ -113,14 +110,13 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
{'roles': [self.role_name], {'roles': [self.role_name],
'status': consts.NODE_STATUSES.ready, 'status': consts.NODE_STATUSES.ready,
'pending_deletion': True}]) 'pending_deletion': True}])
cluster = self.env.clusters[0]
objects.Cluster.set_primary_roles(cluster, cluster.nodes) objects.Cluster.set_primary_roles(cluster, cluster.nodes)
self.assertEqual( self.assertEqual(
objects.Node.all_roles(cluster.nodes[0]), [self.role_name]) objects.Node.all_roles(cluster.nodes[0]), [self.role_name])
@contextmanager @contextmanager
def assert_node_reassigned(self): def assert_node_reassigned(self):
self.env.create( cluster = self.env.create(
cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact}, cluster_kwargs={'mode': consts.CLUSTER_MODES.ha_compact},
release_kwargs={'version': '2014.2-6.0', release_kwargs={'version': '2014.2-6.0',
'operating_system': 'Ubuntu'}, 'operating_system': 'Ubuntu'},
@ -131,7 +127,6 @@ class BasePrimaryRolesAssignmentTestCase(base.BaseTestCase):
{'roles': [self.role_name], {'roles': [self.role_name],
'status': consts.NODE_STATUSES.ready, 'status': consts.NODE_STATUSES.ready,
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
objects.Cluster.set_primary_roles(cluster, cluster.nodes) objects.Cluster.set_primary_roles(cluster, cluster.nodes)
nodes = sorted(cluster.nodes, key=lambda node: node.id) nodes = sorted(cluster.nodes, key=lambda node: node.id)
self.assertEqual( self.assertEqual(

View File

@ -30,12 +30,11 @@ class TestNailgunReceiver(base.BaseTestCase):
def setUp(self): def setUp(self):
super(TestNailgunReceiver, self).setUp() super(TestNailgunReceiver, self).setUp()
self.env.create( self.cluster = self.env.create(
status=consts.CLUSTER_STATUSES.operational, status=consts.CLUSTER_STATUSES.operational,
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': consts.NODE_STATUSES.ready}]) 'status': consts.NODE_STATUSES.ready}])
self.cluster = self.env.clusters[0]
for i in range(2): for i in range(2):
meta = self.env.get_default_plugin_metadata( meta = self.env.get_default_plugin_metadata(

View File

@ -32,45 +32,42 @@ class TestClusterRedeploymentScenario(base.BaseTestCase):
nodes_kwargs=nodes_kwargs) nodes_kwargs=nodes_kwargs)
def test_cluster_deployed_with_computes(self): def test_cluster_deployed_with_computes(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'pending_roles': ['controller'], {'pending_roles': ['controller'],
'status': 'discover', 'status': 'discover',
'pending_addition': True}, 'pending_addition': True},
{'roles': ['compute'], {'roles': ['compute'],
'status': 'ready'}]) 'status': 'ready'}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertEqual(cluster.nodes, nodes) self.assertEqual(cluster.nodes, nodes)
def test_cluster_deployed_with_cinder(self): def test_cluster_deployed_with_cinder(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'pending_roles': ['controller'], {'pending_roles': ['controller'],
'status': 'discover', 'status': 'discover',
'pending_addition': True}, 'pending_addition': True},
{'roles': ['cinder'], {'roles': ['cinder'],
'status': 'ready'}]) 'status': 'ready'}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertEqual(cluster.nodes, nodes) self.assertEqual(cluster.nodes, nodes)
def test_ceph_osd_is_not_affected(self): def test_ceph_osd_is_not_affected(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'pending_roles': ['controller'], {'pending_roles': ['controller'],
'status': 'discover', 'status': 'discover',
'pending_addition': True}, 'pending_addition': True},
{'roles': ['ceph-osd'], {'roles': ['ceph-osd'],
'status': 'ready'}]) 'status': 'ready'}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertNotEqual(cluster.nodes, nodes) self.assertNotEqual(cluster.nodes, nodes)
self.assertEqual(len(nodes), 1) self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].pending_roles, ['controller']) self.assertEqual(nodes[0].pending_roles, ['controller'])
def test_cinder_is_not_affected_when_add_compute(self): def test_cinder_is_not_affected_when_add_compute(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': 'ready'}, 'status': 'ready'},
@ -79,27 +76,25 @@ class TestClusterRedeploymentScenario(base.BaseTestCase):
'pending_addition': True}, 'pending_addition': True},
{'roles': ['cinder'], {'roles': ['cinder'],
'status': 'ready'}]) 'status': 'ready'}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertNotEqual(cluster.nodes, nodes) self.assertNotEqual(cluster.nodes, nodes)
self.assertEqual(len(nodes), 1) self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].pending_roles, ['compute']) self.assertEqual(nodes[0].pending_roles, ['compute'])
def test_controllers_redeployed_if_ceph_added(self): def test_controllers_redeployed_if_ceph_added(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': 'ready'}, 'status': 'ready'},
{'pending_roles': ['ceph-osd'], {'pending_roles': ['ceph-osd'],
'status': 'discover', 'status': 'discover',
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertEqual(len(nodes), 2) self.assertEqual(len(nodes), 2)
self.assertEqual(sorted(cluster.nodes), sorted(nodes)) self.assertEqual(sorted(cluster.nodes), sorted(nodes))
def test_controllers_not_redeployed_if_ceph_previously_in_cluster(self): def test_controllers_not_redeployed_if_ceph_previously_in_cluster(self):
self.create_env( cluster = self.create_env(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller'], {'roles': ['controller'],
'status': 'ready'}, 'status': 'ready'},
@ -108,7 +103,6 @@ class TestClusterRedeploymentScenario(base.BaseTestCase):
{'pending_roles': ['ceph-osd'], {'pending_roles': ['ceph-osd'],
'status': 'discover', 'status': 'discover',
'pending_addition': True}]) 'pending_addition': True}])
cluster = self.env.clusters[0]
nodes = helpers.TaskHelper.nodes_to_deploy(cluster) nodes = helpers.TaskHelper.nodes_to_deploy(cluster)
self.assertEqual(len(nodes), 1) self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].pending_roles, ['ceph-osd']) self.assertEqual(nodes[0].pending_roles, ['ceph-osd'])

View File

@ -32,7 +32,7 @@ class TestClusterDeletionTask(BaseTestCase):
def create_cluster_and_execute_deletion_task( def create_cluster_and_execute_deletion_task(
self, attributes=None, os=consts.RELEASE_OS.centos): self, attributes=None, os=consts.RELEASE_OS.centos):
self.env.create( cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
'editable_attributes': attributes, 'editable_attributes': attributes,
}, },
@ -42,7 +42,7 @@ class TestClusterDeletionTask(BaseTestCase):
}, },
) )
self.fake_task = Task(name=consts.TASK_NAMES.cluster_deletion, self.fake_task = Task(name=consts.TASK_NAMES.cluster_deletion,
cluster=self.env.clusters[0]) cluster=cluster)
task.ClusterDeletionTask.execute(self.fake_task) task.ClusterDeletionTask.execute(self.fake_task)
@mock.patch('nailgun.task.task.DeletionTask', autospec=True) @mock.patch('nailgun.task.task.DeletionTask', autospec=True)
@ -133,7 +133,7 @@ class TestHelperUpdateClusterStatus(BaseTestCase):
def setUp(self): def setUp(self):
super(TestHelperUpdateClusterStatus, self).setUp() super(TestHelperUpdateClusterStatus, self).setUp()
self.env.create( self.cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['controller']}, {'roles': ['controller']},
{'roles': ['compute', 'virt']}, {'roles': ['compute', 'virt']},
@ -148,10 +148,6 @@ class TestHelperUpdateClusterStatus(BaseTestCase):
for node in nodes: for node in nodes:
self.assertEqual(node.status, 'discover') self.assertEqual(node.status, 'discover')
@property
def cluster(self):
return self.env.clusters[0]
def test_update_nodes_to_error_if_deployment_task_failed(self): def test_update_nodes_to_error_if_deployment_task_failed(self):
self.cluster.nodes[0].status = 'deploying' self.cluster.nodes[0].status = 'deploying'
self.cluster.nodes[0].progress = 12 self.cluster.nodes[0].progress = 12
@ -288,7 +284,7 @@ class TestCheckBeforeDeploymentTask(BaseTestCase):
def setUp(self): def setUp(self):
super(TestCheckBeforeDeploymentTask, self).setUp() super(TestCheckBeforeDeploymentTask, self).setUp()
self.env.create( self.cluster = self.env.create(
release_kwargs={'version': '1111-8.0'}, release_kwargs={'version': '1111-8.0'},
cluster_kwargs={ cluster_kwargs={
'net_provider': 'neutron', 'net_provider': 'neutron',
@ -305,8 +301,7 @@ class TestCheckBeforeDeploymentTask(BaseTestCase):
self.env.create_node() self.env.create_node()
self.node = self.env.nodes[0] self.node = self.env.nodes[0]
self.cluster = self.env.clusters[0] self.task = Task(cluster_id=self.cluster.id)
self.task = Task(cluster_id=self.env.clusters[0].id)
self.env.db.add(self.task) self.env.db.add(self.task)
self.env.db.commit() self.env.db.commit()
@ -530,7 +525,7 @@ class TestCheckBeforeDeploymentTask(BaseTestCase):
) )
def test_check_public_networks(self): def test_check_public_networks(self):
cluster = self.env.clusters[0] cluster = self.cluster
self.env.create_nodes( self.env.create_nodes(
2, api=True, roles=['controller'], cluster_id=cluster.id) 2, api=True, roles=['controller'], cluster_id=cluster.id)
self.env.create_nodes( self.env.create_nodes(
@ -674,8 +669,7 @@ class TestCheckBeforeDeploymentTask(BaseTestCase):
class TestDeployTask(BaseTestCase): class TestDeployTask(BaseTestCase):
def create_deploy_tasks(self): def create_deploy_tasks(self):
self.env.create() cluster = self.env.create()
cluster = self.env.clusters[0]
deploy_task = Task(name=consts.TASK_NAMES.deploy, deploy_task = Task(name=consts.TASK_NAMES.deploy,
cluster_id=cluster.id, cluster_id=cluster.id,

View File

@ -24,12 +24,11 @@ class TestTaskHandlers(BaseTestCase):
def setUp(self): def setUp(self):
super(TestTaskHandlers, self).setUp() super(TestTaskHandlers, self).setUp()
self.env.create( self.cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"roles": ["controller"]} {"roles": ["controller"]}
] ]
) )
self.cluster_db = self.env.clusters[0]
def test_task_deletion(self): def test_task_deletion(self):
task = Task( task = Task(

View File

@ -208,12 +208,11 @@ class TestTaskHelpers(BaseTestCase):
self.assertDictEqual(expected, actual) self.assertDictEqual(expected, actual)
def test_prepare_action_log_kwargs_with_web_ctx(self): def test_prepare_action_log_kwargs_with_web_ctx(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['compute'], 'provisioning': True}, {'roles': ['compute'], 'provisioning': True},
] ]
) )
cluster = self.env.clusters[0]
task = Task(name='provision', cluster_id=cluster.id) task = Task(name='provision', cluster_id=cluster.id)
self.db.add(task) self.db.add(task)
self.db.flush() self.db.flush()
@ -231,13 +230,12 @@ class TestTaskHelpers(BaseTestCase):
self.assertIsNone(kwargs['actor_id']) self.assertIsNone(kwargs['actor_id'])
def test_prepare_action_log_kwargs_without_web_ctx(self): def test_prepare_action_log_kwargs_without_web_ctx(self):
self.env.create( cluster = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{'roles': ['compute'], 'pending_addition': True}, {'roles': ['compute'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True}, {'roles': ['controller'], 'pending_addition': True},
] ]
) )
cluster = self.env.clusters[0]
deployment_task = Task(name='deployment', cluster_id=cluster.id) deployment_task = Task(name='deployment', cluster_id=cluster.id)
self.db.add(deployment_task) self.db.add(deployment_task)
self.db.flush() self.db.flush()

View File

@ -27,8 +27,7 @@ class TestDeploymentCheckMixin(BaseTestCase):
def setUp(self): def setUp(self):
super(TestDeploymentCheckMixin, self).setUp() super(TestDeploymentCheckMixin, self).setUp()
self.env.create() self.cluster = self.env.create()
self.cluster = self.env.clusters[0]
def test_fails_if_there_is_task(self): def test_fails_if_there_is_task(self):
for task_name in DeploymentCheckMixin.deployment_tasks: for task_name in DeploymentCheckMixin.deployment_tasks:

View File

@ -25,12 +25,11 @@ class TestTransactionHandlers(BaseTestCase):
def setUp(self): def setUp(self):
super(TestTransactionHandlers, self).setUp() super(TestTransactionHandlers, self).setUp()
self.env.create( self.cluster_db = self.env.create(
nodes_kwargs=[ nodes_kwargs=[
{"roles": ["controller"]} {"roles": ["controller"]}
] ]
) )
self.cluster_db = self.env.clusters[0]
def test_transaction_deletion(self): def test_transaction_deletion(self):
task = Task( task = Task(

View File

@ -26,7 +26,7 @@ from nailgun.test.base import BaseTestCase
class TestAttributesValidator(BaseTestCase): class TestAttributesValidator(BaseTestCase):
def setUp(self): def setUp(self):
super(TestAttributesValidator, self).setUp() super(TestAttributesValidator, self).setUp()
self.env.create( self.cluster = self.env.create(
cluster_kwargs={ cluster_kwargs={
"api": False, "api": False,
"vmware_attributes": { "vmware_attributes": {
@ -38,7 +38,6 @@ class TestAttributesValidator(BaseTestCase):
"status": consts.NODE_STATUSES.ready "status": consts.NODE_STATUSES.ready
}] }]
) )
self.cluster = self.env.clusters[0]
self.ready_compute_node = self.env.create_node( self.ready_compute_node = self.env.create_node(
hostname="node-1", hostname="node-1",
name="Node 1", name="Node 1",