Blackify openstack.instance_ha
Black used with the '-l 79 -S' flags. A future change will ignore this commit in git-blame history by adding a 'git-blame-ignore-revs' file. Change-Id: Ifa4d0af2f9de7bd0635f3f87e2d241f2fe26ddf8 Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
10018dbf5b
commit
33bed57501
@ -122,8 +122,9 @@ class Proxy(proxy.Proxy):
|
||||
attempting to delete a nonexistent segment.
|
||||
:returns: ``None``
|
||||
"""
|
||||
return self._delete(_segment.Segment, segment,
|
||||
ignore_missing=ignore_missing)
|
||||
return self._delete(
|
||||
_segment.Segment, segment, ignore_missing=ignore_missing
|
||||
)
|
||||
|
||||
def hosts(self, segment_id, **query):
|
||||
"""Return a generator of hosts.
|
||||
@ -182,8 +183,9 @@ class Proxy(proxy.Proxy):
|
||||
when segment_id is None.
|
||||
"""
|
||||
host_id = resource.Resource._get_id(host)
|
||||
return self._update(_host.Host, host_id, segment_id=segment_id,
|
||||
**attrs)
|
||||
return self._update(
|
||||
_host.Host, host_id, segment_id=segment_id, **attrs
|
||||
)
|
||||
|
||||
def delete_host(self, host, segment_id=None, ignore_missing=True):
|
||||
"""Delete the host.
|
||||
@ -208,5 +210,9 @@ class Proxy(proxy.Proxy):
|
||||
raise exceptions.InvalidRequest("'segment_id' must be specified.")
|
||||
|
||||
host_id = resource.Resource._get_id(host)
|
||||
return self._delete(_host.Host, host_id, segment_id=segment_id,
|
||||
ignore_missing=ignore_missing)
|
||||
return self._delete(
|
||||
_host.Host,
|
||||
host_id,
|
||||
segment_id=segment_id,
|
||||
ignore_missing=ignore_missing,
|
||||
)
|
||||
|
@ -56,5 +56,10 @@ class Host(resource.Resource):
|
||||
failover_segment_id = resource.Body("failover_segment_id")
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
"sort_key", "sort_dir", failover_segment_id="failover_segment_id",
|
||||
type="type", on_maintenance="on_maintenance", reserved="reserved")
|
||||
"sort_key",
|
||||
"sort_dir",
|
||||
failover_segment_id="failover_segment_id",
|
||||
type="type",
|
||||
on_maintenance="on_maintenance",
|
||||
reserved="reserved",
|
||||
)
|
||||
|
@ -33,7 +33,8 @@ class RecoveryWorkflowDetailItem(resource.Resource):
|
||||
state = resource.Body("state")
|
||||
#: The progress details of this recovery workflow.
|
||||
progress_details = resource.Body(
|
||||
"progress_details", type=list, list_type=ProgressDetailsItem)
|
||||
"progress_details", type=list, list_type=ProgressDetailsItem
|
||||
)
|
||||
|
||||
|
||||
class Notification(resource.Resource):
|
||||
@ -75,8 +76,15 @@ class Notification(resource.Resource):
|
||||
#: The recovery workflow details of this notification.
|
||||
recovery_workflow_details = resource.Body(
|
||||
"recovery_workflow_details",
|
||||
type=list, list_type=RecoveryWorkflowDetailItem)
|
||||
type=list,
|
||||
list_type=RecoveryWorkflowDetailItem,
|
||||
)
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
"sort_key", "sort_dir", source_host_uuid="source_host_uuid",
|
||||
type="type", status="status", generated_since="generated-since")
|
||||
"sort_key",
|
||||
"sort_dir",
|
||||
source_host_uuid="source_host_uuid",
|
||||
type="type",
|
||||
status="status",
|
||||
generated_since="generated-since",
|
||||
)
|
||||
|
@ -55,5 +55,9 @@ class Segment(resource.Resource):
|
||||
is_enabled = resource.Body("enabled", type=bool)
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
"sort_key", "sort_dir", recovery_method="recovery_method",
|
||||
service_type="service_type", is_enabled="enabled")
|
||||
"sort_key",
|
||||
"sort_dir",
|
||||
recovery_method="recovery_method",
|
||||
service_type="service_type",
|
||||
is_enabled="enabled",
|
||||
)
|
||||
|
@ -25,48 +25,62 @@ def hypervisors():
|
||||
if HYPERVISORS:
|
||||
return True
|
||||
HYPERVISORS = connection.Connection.list_hypervisors(
|
||||
connection.from_config(cloud_name=base.TEST_CLOUD_NAME))
|
||||
connection.from_config(cloud_name=base.TEST_CLOUD_NAME)
|
||||
)
|
||||
return bool(HYPERVISORS)
|
||||
|
||||
|
||||
class TestHost(base.BaseFunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestHost, self).setUp()
|
||||
self.require_service('instance-ha')
|
||||
self.NAME = self.getUniqueString()
|
||||
|
||||
if not hypervisors():
|
||||
self.skipTest("Skip TestHost as there are no hypervisors "
|
||||
"configured in nova")
|
||||
self.skipTest(
|
||||
"Skip TestHost as there are no hypervisors "
|
||||
"configured in nova"
|
||||
)
|
||||
|
||||
# Create segment
|
||||
self.segment = self.conn.ha.create_segment(
|
||||
name=self.NAME, recovery_method='auto',
|
||||
service_type='COMPUTE')
|
||||
name=self.NAME, recovery_method='auto', service_type='COMPUTE'
|
||||
)
|
||||
|
||||
# Create valid host
|
||||
self.NAME = HYPERVISORS[0].name
|
||||
self.host = self.conn.ha.create_host(
|
||||
segment_id=self.segment.uuid, name=self.NAME, type='COMPUTE',
|
||||
control_attributes='SSH')
|
||||
segment_id=self.segment.uuid,
|
||||
name=self.NAME,
|
||||
type='COMPUTE',
|
||||
control_attributes='SSH',
|
||||
)
|
||||
|
||||
# Delete host
|
||||
self.addCleanup(self.conn.ha.delete_host, self.segment.uuid,
|
||||
self.host.uuid)
|
||||
self.addCleanup(
|
||||
self.conn.ha.delete_host, self.segment.uuid, self.host.uuid
|
||||
)
|
||||
# Delete segment
|
||||
self.addCleanup(self.conn.ha.delete_segment, self.segment.uuid)
|
||||
|
||||
def test_list(self):
|
||||
names = [o.name for o in self.conn.ha.hosts(
|
||||
self.segment.uuid, failover_segment_id=self.segment.uuid,
|
||||
type='COMPUTE')]
|
||||
names = [
|
||||
o.name
|
||||
for o in self.conn.ha.hosts(
|
||||
self.segment.uuid,
|
||||
failover_segment_id=self.segment.uuid,
|
||||
type='COMPUTE',
|
||||
)
|
||||
]
|
||||
self.assertIn(self.NAME, names)
|
||||
|
||||
def test_update(self):
|
||||
updated_host = self.conn.ha.update_host(self.host['uuid'],
|
||||
segment_id=self.segment.uuid,
|
||||
on_maintenance='True')
|
||||
get_host = self.conn.ha.get_host(updated_host.uuid,
|
||||
updated_host.segment_id)
|
||||
updated_host = self.conn.ha.update_host(
|
||||
self.host['uuid'],
|
||||
segment_id=self.segment.uuid,
|
||||
on_maintenance='True',
|
||||
)
|
||||
get_host = self.conn.ha.get_host(
|
||||
updated_host.uuid, updated_host.segment_id
|
||||
)
|
||||
self.assertEqual(True, get_host.on_maintenance)
|
||||
|
@ -17,7 +17,6 @@ from openstack.tests.functional import base
|
||||
|
||||
|
||||
class TestSegment(base.BaseFunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestSegment, self).setUp()
|
||||
self.require_service('instance-ha')
|
||||
@ -25,19 +24,19 @@ class TestSegment(base.BaseFunctionalTest):
|
||||
|
||||
# Create segment
|
||||
self.segment = self.conn.ha.create_segment(
|
||||
name=self.NAME, recovery_method='auto',
|
||||
service_type='COMPUTE')
|
||||
name=self.NAME, recovery_method='auto', service_type='COMPUTE'
|
||||
)
|
||||
|
||||
# Delete segment
|
||||
self.addCleanup(self.conn.ha.delete_segment, self.segment['uuid'])
|
||||
|
||||
def test_list(self):
|
||||
names = [o.name for o in self.conn.ha.segments(
|
||||
recovery_method='auto')]
|
||||
names = [o.name for o in self.conn.ha.segments(recovery_method='auto')]
|
||||
self.assertIn(self.NAME, names)
|
||||
|
||||
def test_update(self):
|
||||
updated_segment = self.conn.ha.update_segment(self.segment['uuid'],
|
||||
name='UPDATED-NAME')
|
||||
updated_segment = self.conn.ha.update_segment(
|
||||
self.segment['uuid'], name='UPDATED-NAME'
|
||||
)
|
||||
get_updated_segment = self.conn.ha.get_segment(updated_segment.uuid)
|
||||
self.assertEqual('UPDATED-NAME', get_updated_segment.name)
|
||||
|
@ -18,10 +18,7 @@ from openstack.tests.unit import base
|
||||
FAKE_ID = "1c2f1795-ce78-4d4c-afd0-ce141fdb3952"
|
||||
FAKE_UUID = "11f7597f-87d2-4057-b754-ba611f989807"
|
||||
FAKE_HOST_ID = "c27dec16-ed4d-4ebe-8e77-f1e28ec32417"
|
||||
FAKE_CONTROL_ATTRIBUTES = {
|
||||
"mcastaddr": "239.255.1.1",
|
||||
"mcastport": "5405"
|
||||
}
|
||||
FAKE_CONTROL_ATTRIBUTES = {"mcastaddr": "239.255.1.1", "mcastport": "5405"}
|
||||
HOST = {
|
||||
"id": FAKE_ID,
|
||||
"uuid": FAKE_UUID,
|
||||
@ -33,12 +30,11 @@ HOST = {
|
||||
"control_attributes": FAKE_CONTROL_ATTRIBUTES,
|
||||
"on_maintenance": False,
|
||||
"reserved": False,
|
||||
"failover_segment_id": FAKE_HOST_ID
|
||||
"failover_segment_id": FAKE_HOST_ID,
|
||||
}
|
||||
|
||||
|
||||
class TestHost(base.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
sot = host.Host(HOST)
|
||||
self.assertEqual("host", sot.resource_key)
|
||||
@ -50,15 +46,19 @@ class TestHost(base.TestCase):
|
||||
self.assertTrue(sot.allow_commit)
|
||||
self.assertTrue(sot.allow_delete)
|
||||
|
||||
self.assertDictEqual({"failover_segment_id": "failover_segment_id",
|
||||
"limit": "limit",
|
||||
"marker": "marker",
|
||||
"on_maintenance": "on_maintenance",
|
||||
"reserved": "reserved",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key",
|
||||
"type": "type"},
|
||||
sot._query_mapping._mapping)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"failover_segment_id": "failover_segment_id",
|
||||
"limit": "limit",
|
||||
"marker": "marker",
|
||||
"on_maintenance": "on_maintenance",
|
||||
"reserved": "reserved",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key",
|
||||
"type": "type",
|
||||
},
|
||||
sot._query_mapping._mapping,
|
||||
)
|
||||
|
||||
def test_create(self):
|
||||
sot = host.Host(**HOST)
|
||||
|
@ -21,17 +21,26 @@ FAKE_HOST_UUID = "cad9ff01-c354-4414-ba3c-31b925be67f1"
|
||||
PAYLOAD = {
|
||||
"instance_uuid": "4032bc1d-d723-47f6-b5ac-b9b3e6dbb795",
|
||||
"vir_domain_event": "STOPPED_FAILED",
|
||||
"event": "LIFECYCLE"
|
||||
"event": "LIFECYCLE",
|
||||
}
|
||||
|
||||
PROGRESS_DETAILS = [{"timestamp": "2019-02-28 07:21:33.291810",
|
||||
"progress": 1.0,
|
||||
"message": "Skipping recovery for process "
|
||||
"nova-compute as it is already disabled"}]
|
||||
PROGRESS_DETAILS = [
|
||||
{
|
||||
"timestamp": "2019-02-28 07:21:33.291810",
|
||||
"progress": 1.0,
|
||||
"message": "Skipping recovery for process "
|
||||
"nova-compute as it is already disabled",
|
||||
}
|
||||
]
|
||||
|
||||
RECOVERY_WORKFLOW_DETAILS = [{"progress": 1.0, "state": "SUCCESS",
|
||||
"name": "DisableComputeNodeTask",
|
||||
"progress_details": PROGRESS_DETAILS}]
|
||||
RECOVERY_WORKFLOW_DETAILS = [
|
||||
{
|
||||
"progress": 1.0,
|
||||
"state": "SUCCESS",
|
||||
"name": "DisableComputeNodeTask",
|
||||
"progress_details": PROGRESS_DETAILS,
|
||||
}
|
||||
]
|
||||
|
||||
NOTIFICATION = {
|
||||
"id": FAKE_ID,
|
||||
@ -44,12 +53,11 @@ NOTIFICATION = {
|
||||
"generated_time": "2018-03-21T00:00:00.000000",
|
||||
"payload": PAYLOAD,
|
||||
"source_host_uuid": FAKE_HOST_UUID,
|
||||
"recovery_workflow_details": RECOVERY_WORKFLOW_DETAILS
|
||||
"recovery_workflow_details": RECOVERY_WORKFLOW_DETAILS,
|
||||
}
|
||||
|
||||
|
||||
class TestNotification(base.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
sot = notification.Notification(NOTIFICATION)
|
||||
self.assertEqual("notification", sot.resource_key)
|
||||
@ -61,22 +69,27 @@ class TestNotification(base.TestCase):
|
||||
self.assertFalse(sot.allow_commit)
|
||||
self.assertFalse(sot.allow_delete)
|
||||
|
||||
self.assertDictEqual({"generated_since": "generated-since",
|
||||
"limit": "limit",
|
||||
"marker": "marker",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key",
|
||||
"source_host_uuid": "source_host_uuid",
|
||||
"status": "status",
|
||||
"type": "type"},
|
||||
sot._query_mapping._mapping)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"generated_since": "generated-since",
|
||||
"limit": "limit",
|
||||
"marker": "marker",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key",
|
||||
"source_host_uuid": "source_host_uuid",
|
||||
"status": "status",
|
||||
"type": "type",
|
||||
},
|
||||
sot._query_mapping._mapping,
|
||||
)
|
||||
|
||||
def test_create(self):
|
||||
sot = notification.Notification(**NOTIFICATION)
|
||||
rec_workflow_details = NOTIFICATION["recovery_workflow_details"][0]
|
||||
self.assertEqual(NOTIFICATION["id"], sot.id)
|
||||
self.assertEqual(
|
||||
NOTIFICATION["notification_uuid"], sot.notification_uuid)
|
||||
NOTIFICATION["notification_uuid"], sot.notification_uuid
|
||||
)
|
||||
self.assertEqual(NOTIFICATION["created_at"], sot.created_at)
|
||||
self.assertEqual(NOTIFICATION["updated_at"], sot.updated_at)
|
||||
self.assertEqual(NOTIFICATION["type"], sot.type)
|
||||
@ -85,27 +98,40 @@ class TestNotification(base.TestCase):
|
||||
self.assertEqual(NOTIFICATION["generated_time"], sot.generated_time)
|
||||
self.assertEqual(NOTIFICATION["payload"], sot.payload)
|
||||
self.assertEqual(
|
||||
NOTIFICATION["source_host_uuid"], sot.source_host_uuid)
|
||||
self.assertEqual(rec_workflow_details["name"],
|
||||
sot.recovery_workflow_details[0].name)
|
||||
self.assertEqual(rec_workflow_details["state"],
|
||||
sot.recovery_workflow_details[0].state)
|
||||
self.assertEqual(rec_workflow_details["progress"],
|
||||
sot.recovery_workflow_details[0].progress)
|
||||
NOTIFICATION["source_host_uuid"], sot.source_host_uuid
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["name"], sot.recovery_workflow_details[0].name
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["state"],
|
||||
sot.recovery_workflow_details[0].state,
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["progress"],
|
||||
sot.recovery_workflow_details[0].progress,
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["progress_details"][0]['progress'],
|
||||
sot.recovery_workflow_details[0].progress_details[0].progress)
|
||||
sot.recovery_workflow_details[0].progress_details[0].progress,
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["progress_details"][0]['message'],
|
||||
sot.recovery_workflow_details[0].progress_details[0].message)
|
||||
sot.recovery_workflow_details[0].progress_details[0].message,
|
||||
)
|
||||
self.assertEqual(
|
||||
rec_workflow_details["progress_details"][0]['timestamp'],
|
||||
sot.recovery_workflow_details[0].progress_details[0].timestamp)
|
||||
sot.recovery_workflow_details[0].progress_details[0].timestamp,
|
||||
)
|
||||
self.assertIsInstance(sot.recovery_workflow_details, list)
|
||||
self.assertIsInstance(
|
||||
sot.recovery_workflow_details[0].progress_details, list)
|
||||
self.assertIsInstance(sot.recovery_workflow_details[0],
|
||||
notification.RecoveryWorkflowDetailItem)
|
||||
sot.recovery_workflow_details[0].progress_details, list
|
||||
)
|
||||
self.assertIsInstance(
|
||||
sot.recovery_workflow_details[0],
|
||||
notification.RecoveryWorkflowDetailItem,
|
||||
)
|
||||
self.assertIsInstance(
|
||||
sot.recovery_workflow_details[0].progress_details[0],
|
||||
notification.ProgressDetailsItem)
|
||||
notification.ProgressDetailsItem,
|
||||
)
|
||||
|
@ -30,38 +30,48 @@ class TestInstanceHaProxy(test_proxy_base.TestProxyBase):
|
||||
|
||||
class TestInstanceHaHosts(TestInstanceHaProxy):
|
||||
def test_hosts(self):
|
||||
self.verify_list(self.proxy.hosts,
|
||||
host.Host,
|
||||
method_args=[SEGMENT_ID],
|
||||
expected_args=[],
|
||||
expected_kwargs={"segment_id": SEGMENT_ID})
|
||||
self.verify_list(
|
||||
self.proxy.hosts,
|
||||
host.Host,
|
||||
method_args=[SEGMENT_ID],
|
||||
expected_args=[],
|
||||
expected_kwargs={"segment_id": SEGMENT_ID},
|
||||
)
|
||||
|
||||
def test_host_get(self):
|
||||
self.verify_get(self.proxy.get_host,
|
||||
host.Host,
|
||||
method_args=[HOST_ID],
|
||||
method_kwargs={"segment_id": SEGMENT_ID},
|
||||
expected_kwargs={"segment_id": SEGMENT_ID})
|
||||
self.verify_get(
|
||||
self.proxy.get_host,
|
||||
host.Host,
|
||||
method_args=[HOST_ID],
|
||||
method_kwargs={"segment_id": SEGMENT_ID},
|
||||
expected_kwargs={"segment_id": SEGMENT_ID},
|
||||
)
|
||||
|
||||
def test_host_create(self):
|
||||
self.verify_create(self.proxy.create_host,
|
||||
host.Host,
|
||||
method_args=[SEGMENT_ID],
|
||||
method_kwargs={},
|
||||
expected_args=[],
|
||||
expected_kwargs={"segment_id": SEGMENT_ID})
|
||||
self.verify_create(
|
||||
self.proxy.create_host,
|
||||
host.Host,
|
||||
method_args=[SEGMENT_ID],
|
||||
method_kwargs={},
|
||||
expected_args=[],
|
||||
expected_kwargs={"segment_id": SEGMENT_ID},
|
||||
)
|
||||
|
||||
def test_host_update(self):
|
||||
self.verify_update(self.proxy.update_host,
|
||||
host.Host,
|
||||
method_kwargs={"segment_id": SEGMENT_ID})
|
||||
self.verify_update(
|
||||
self.proxy.update_host,
|
||||
host.Host,
|
||||
method_kwargs={"segment_id": SEGMENT_ID},
|
||||
)
|
||||
|
||||
def test_host_delete(self):
|
||||
self.verify_delete(self.proxy.delete_host,
|
||||
host.Host,
|
||||
True,
|
||||
method_kwargs={"segment_id": SEGMENT_ID},
|
||||
expected_kwargs={"segment_id": SEGMENT_ID})
|
||||
self.verify_delete(
|
||||
self.proxy.delete_host,
|
||||
host.Host,
|
||||
True,
|
||||
method_kwargs={"segment_id": SEGMENT_ID},
|
||||
expected_kwargs={"segment_id": SEGMENT_ID},
|
||||
)
|
||||
|
||||
|
||||
class TestInstanceHaNotifications(TestInstanceHaProxy):
|
||||
@ -69,12 +79,12 @@ class TestInstanceHaNotifications(TestInstanceHaProxy):
|
||||
self.verify_list(self.proxy.notifications, notification.Notification)
|
||||
|
||||
def test_notification_get(self):
|
||||
self.verify_get(self.proxy.get_notification,
|
||||
notification.Notification)
|
||||
self.verify_get(self.proxy.get_notification, notification.Notification)
|
||||
|
||||
def test_notification_create(self):
|
||||
self.verify_create(self.proxy.create_notification,
|
||||
notification.Notification)
|
||||
self.verify_create(
|
||||
self.proxy.create_notification, notification.Notification
|
||||
)
|
||||
|
||||
|
||||
class TestInstanceHaSegments(TestInstanceHaProxy):
|
||||
|
@ -31,7 +31,6 @@ SEGMENT = {
|
||||
|
||||
|
||||
class TestSegment(base.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
sot = segment.Segment(SEGMENT)
|
||||
self.assertEqual("segment", sot.resource_key)
|
||||
@ -43,14 +42,18 @@ class TestSegment(base.TestCase):
|
||||
self.assertTrue(sot.allow_commit)
|
||||
self.assertTrue(sot.allow_delete)
|
||||
|
||||
self.assertDictEqual({"limit": "limit",
|
||||
"marker": "marker",
|
||||
"recovery_method": "recovery_method",
|
||||
"service_type": "service_type",
|
||||
"is_enabled": "enabled",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key"},
|
||||
sot._query_mapping._mapping)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"limit": "limit",
|
||||
"marker": "marker",
|
||||
"recovery_method": "recovery_method",
|
||||
"service_type": "service_type",
|
||||
"is_enabled": "enabled",
|
||||
"sort_dir": "sort_dir",
|
||||
"sort_key": "sort_key",
|
||||
},
|
||||
sot._query_mapping._mapping,
|
||||
)
|
||||
|
||||
def test_create(self):
|
||||
sot = segment.Segment(**SEGMENT)
|
||||
|
Loading…
x
Reference in New Issue
Block a user