Blackify openstack.clustering

Black used with the '-l 79 -S' flags.

A future change will ignore this commit in git-blame history by adding a
'git-blame-ignore-revs' file.

Change-Id: I4f7bb54ac0e751ab73479cf17f19593f6bc73014
Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
Stephen Finucane 2023-05-05 11:23:59 +01:00
parent 073abda5a9
commit c2ff7336ce
26 changed files with 296 additions and 267 deletions

View File

@ -16,7 +16,6 @@ from openstack import resource
class AsyncResource(resource.Resource):
def delete(self, session, error_message=None):
"""Delete the remote resource based on this instance.
@ -39,6 +38,6 @@ class AsyncResource(resource.Resource):
location = response.headers['Location']
action_id = location.split('/')[-1]
action = _action.Action.existing(
id=action_id,
connection=self._connection)
id=action_id, connection=self._connection
)
return action

View File

@ -132,8 +132,9 @@ class Proxy(proxy.Proxy):
:returns: One :class:`~openstack.clustering.v1.profile.Profile` object
or None
"""
return self._find(_profile.Profile, name_or_id,
ignore_missing=ignore_missing)
return self._find(
_profile.Profile, name_or_id, ignore_missing=ignore_missing
)
def get_profile(self, profile):
"""Get a single profile.
@ -231,8 +232,9 @@ class Proxy(proxy.Proxy):
server = self._get_resource(_cluster.Cluster, cluster)
return server.force_delete(self)
else:
return self._delete(_cluster.Cluster, cluster,
ignore_missing=ignore_missing)
return self._delete(
_cluster.Cluster, cluster, ignore_missing=ignore_missing
)
def find_cluster(self, name_or_id, ignore_missing=True):
"""Find a single cluster.
@ -246,8 +248,9 @@ class Proxy(proxy.Proxy):
:returns: One :class:`~openstack.clustering.v1.cluster.Cluster` object
or None
"""
return self._find(_cluster.Cluster, name_or_id,
ignore_missing=ignore_missing)
return self._find(
_cluster.Cluster, name_or_id, ignore_missing=ignore_missing
)
def get_cluster(self, cluster):
"""Get a single cluster.
@ -495,8 +498,9 @@ class Proxy(proxy.Proxy):
:returns: A dictionary containing the list of attribute values.
"""
return self._list(_cluster_attr.ClusterAttr, cluster_id=cluster,
path=path)
return self._list(
_cluster_attr.ClusterAttr, cluster_id=cluster, path=path
)
def check_cluster(self, cluster, **params):
"""Check a cluster.
@ -569,8 +573,9 @@ class Proxy(proxy.Proxy):
server = self._get_resource(_node.Node, node)
return server.force_delete(self)
else:
return self._delete(_node.Node, node,
ignore_missing=ignore_missing)
return self._delete(
_node.Node, node, ignore_missing=ignore_missing
)
def find_node(self, name_or_id, ignore_missing=True):
"""Find a single node.
@ -584,8 +589,9 @@ class Proxy(proxy.Proxy):
:returns: One :class:`~openstack.clustering.v1.node.Node` object
or None.
"""
return self._find(_node.Node, name_or_id,
ignore_missing=ignore_missing)
return self._find(
_node.Node, name_or_id, ignore_missing=ignore_missing
)
def get_node(self, node, details=False):
"""Get a single node.
@ -755,8 +761,9 @@ class Proxy(proxy.Proxy):
:returns: A policy object or None.
:rtype: :class:`~openstack.clustering.v1.policy.Policy`
"""
return self._find(_policy.Policy, name_or_id,
ignore_missing=ignore_missing)
return self._find(
_policy.Policy, name_or_id, ignore_missing=ignore_missing
)
def get_policy(self, policy):
"""Get a single policy.
@ -834,8 +841,9 @@ class Proxy(proxy.Proxy):
:returns: A generator of cluster-policy binding instances.
"""
cluster_id = resource.Resource._get_id(cluster)
return self._list(_cluster_policy.ClusterPolicy, cluster_id=cluster_id,
**query)
return self._list(
_cluster_policy.ClusterPolicy, cluster_id=cluster_id, **query
)
def get_cluster_policy(self, cluster_policy, cluster):
"""Get a cluster-policy binding.
@ -851,8 +859,9 @@ class Proxy(proxy.Proxy):
:raises: :class:`~openstack.exceptions.ResourceNotFound` when no
cluster-policy binding matching the criteria could be found.
"""
return self._get(_cluster_policy.ClusterPolicy, cluster_policy,
cluster_id=cluster)
return self._get(
_cluster_policy.ClusterPolicy, cluster_policy, cluster_id=cluster
)
def create_receiver(self, **attrs):
"""Create a new receiver from attributes.
@ -890,8 +899,9 @@ class Proxy(proxy.Proxy):
:returns: ``None``
"""
self._delete(_receiver.Receiver, receiver,
ignore_missing=ignore_missing)
self._delete(
_receiver.Receiver, receiver, ignore_missing=ignore_missing
)
def find_receiver(self, name_or_id, ignore_missing=True):
"""Find a single receiver.
@ -905,8 +915,9 @@ class Proxy(proxy.Proxy):
:returns: A receiver object or None.
:rtype: :class:`~openstack.clustering.v1.receiver.Receiver`
"""
return self._find(_receiver.Receiver, name_or_id,
ignore_missing=ignore_missing)
return self._find(
_receiver.Receiver, name_or_id, ignore_missing=ignore_missing
)
def get_receiver(self, receiver):
"""Get a single receiver.
@ -1035,8 +1046,9 @@ class Proxy(proxy.Proxy):
"""
return self._list(_event.Event, **query)
def wait_for_status(self, res, status, failures=None, interval=2,
wait=120):
def wait_for_status(
self, res, status, failures=None, interval=2, wait=120
):
"""Wait for a resource to be in a particular status.
:param res: The resource to wait on to reach the specified status.
@ -1059,7 +1071,8 @@ class Proxy(proxy.Proxy):
"""
failures = [] if failures is None else failures
return resource.wait_for_status(
self, res, status, failures, interval, wait)
self, res, status, failures, interval, wait
)
def wait_for_delete(self, res, interval=2, wait=120):
"""Wait for a resource to be deleted.

View File

@ -27,8 +27,14 @@ class Action(resource.Resource):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'name', 'action', 'status', 'sort', 'global_project',
'cluster_id', target_id='target')
'name',
'action',
'status',
'sort',
'global_project',
'cluster_id',
target_id='target',
)
# Properties
#: Name of the action.

View File

@ -29,7 +29,8 @@ class Cluster(_async_resource.AsyncResource, metadata.MetadataMixin):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'name', 'status', 'sort', 'global_project')
'name', 'status', 'sort', 'global_project'
)
# Properties
#: The name of the cluster.
@ -96,9 +97,7 @@ class Cluster(_async_resource.AsyncResource, metadata.MetadataMixin):
def del_nodes(self, session, nodes, **params):
data = {'nodes': nodes}
data.update(params)
body = {
'del_nodes': data
}
body = {'del_nodes': data}
return self.action(session, body)
def replace_nodes(self, session, nodes):
@ -126,17 +125,13 @@ class Cluster(_async_resource.AsyncResource, metadata.MetadataMixin):
return self.action(session, body)
def resize(self, session, **params):
body = {
'resize': params
}
body = {'resize': params}
return self.action(session, body)
def policy_attach(self, session, policy_id, **params):
data = {'policy_id': policy_id}
data.update(params)
body = {
'policy_attach': data
}
body = {'policy_attach': data}
return self.action(session, body)
def policy_detach(self, session, policy_id):
@ -150,21 +145,15 @@ class Cluster(_async_resource.AsyncResource, metadata.MetadataMixin):
def policy_update(self, session, policy_id, **params):
data = {'policy_id': policy_id}
data.update(params)
body = {
'policy_update': data
}
body = {'policy_update': data}
return self.action(session, body)
def check(self, session, **params):
body = {
'check': params
}
body = {'check': params}
return self.action(session, body)
def recover(self, session, **params):
body = {
'recover': params
}
body = {'recover': params}
return self.action(session, body)
def op(self, session, operation, **params):
@ -177,8 +166,7 @@ class Cluster(_async_resource.AsyncResource, metadata.MetadataMixin):
:returns: A dictionary containing the action ID.
"""
url = utils.urljoin(self.base_path, self.id, 'ops')
resp = session.post(url,
json={operation: params})
resp = session.post(url, json={operation: params})
return resp.json()
def force_delete(self, session):

View File

@ -23,7 +23,8 @@ class ClusterPolicy(resource.Resource):
allow_fetch = True
_query_mapping = resource.QueryParameters(
'sort', 'policy_name', 'policy_type', is_enabled='enabled')
'sort', 'policy_name', 'policy_type', is_enabled='enabled'
)
# Properties
#: ID of the policy object.

View File

@ -24,8 +24,14 @@ class Event(resource.Resource):
allow_fetch = True
_query_mapping = resource.QueryParameters(
'cluster_id', 'action', 'level', 'sort', 'global_project',
obj_id='oid', obj_name='oname', obj_type='otype',
'cluster_id',
'action',
'level',
'sort',
'global_project',
obj_id='oid',
obj_name='oname',
obj_type='otype',
)
# Properties

View File

@ -30,8 +30,13 @@ class Node(_async_resource.AsyncResource):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'show_details', 'name', 'sort', 'global_project', 'cluster_id',
'status')
'show_details',
'name',
'sort',
'global_project',
'cluster_id',
'status',
)
# Properties
#: The name of the node.
@ -98,9 +103,7 @@ class Node(_async_resource.AsyncResource):
:param session: A session object used for sending request.
:returns: A dictionary containing the action ID.
"""
body = {
'check': params
}
body = {'check': params}
return self._action(session, body)
def recover(self, session, **params):
@ -109,9 +112,7 @@ class Node(_async_resource.AsyncResource):
:param session: A session object used for sending request.
:returns: A dictionary containing the action ID.
"""
body = {
'recover': params
}
body = {'recover': params}
return self._action(session, body)
def op(self, session, operation, **params):
@ -124,8 +125,7 @@ class Node(_async_resource.AsyncResource):
:returns: A dictionary containing the action ID.
"""
url = utils.urljoin(self.base_path, self.id, 'ops')
resp = session.post(url,
json={operation: params})
resp = session.post(url, json={operation: params})
return resp.json()
def adopt(self, session, preview=False, **params):
@ -143,7 +143,7 @@ class Node(_async_resource.AsyncResource):
'identity': params.get('identity'),
'overrides': params.get('overrides'),
'type': params.get('type'),
'snapshot': params.get('snapshot')
'snapshot': params.get('snapshot'),
}
else:
path = 'adopt'

View File

@ -28,7 +28,8 @@ class Policy(resource.Resource):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'name', 'type', 'sort', 'global_project')
'name', 'type', 'sort', 'global_project'
)
# Properties
#: The name of the policy.

View File

@ -28,7 +28,8 @@ class Profile(resource.Resource):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'sort', 'global_project', 'type', 'name')
'sort', 'global_project', 'type', 'name'
)
# Bodyerties
#: The name of the profile

View File

@ -28,8 +28,14 @@ class Receiver(resource.Resource):
commit_method = 'PATCH'
_query_mapping = resource.QueryParameters(
'name', 'type', 'cluster_id', 'action', 'sort', 'global_project',
user_id='user')
'name',
'type',
'cluster_id',
'action',
'sort',
'global_project',
user_id='user',
)
# Properties
#: The name of the receiver.

View File

@ -28,9 +28,8 @@ class TestCluster(base.BaseFunctionalTest):
self.cidr = '10.99.99.0/16'
self.network, self.subnet = test_network.create_network(
self.conn,
self.getUniqueString(),
self.cidr)
self.conn, self.getUniqueString(), self.cidr
)
self.assertIsNotNone(self.network)
profile_attrs = {
@ -42,8 +41,10 @@ class TestCluster(base.BaseFunctionalTest):
'name': self.getUniqueString(),
'flavor': self.flavor.name,
'image': self.image.name,
'networks': [{'network': self.network.id}]
}}}
'networks': [{'network': self.network.id}],
},
},
}
self.profile = self.conn.clustering.create_profile(**profile_attrs)
self.assertIsNotNone(self.profile)
@ -59,15 +60,16 @@ class TestCluster(base.BaseFunctionalTest):
self.cluster = self.conn.clustering.create_cluster(**cluster_spec)
self.conn.clustering.wait_for_status(
self.cluster, 'ACTIVE',
wait=self._wait_for_timeout)
self.cluster, 'ACTIVE', wait=self._wait_for_timeout
)
assert isinstance(self.cluster, cluster.Cluster)
def tearDown(self):
if self.cluster:
self.conn.clustering.delete_cluster(self.cluster.id)
self.conn.clustering.wait_for_delete(
self.cluster, wait=self._wait_for_timeout)
self.cluster, wait=self._wait_for_timeout
)
test_network.delete_network(self.conn, self.network, self.subnet)
@ -90,7 +92,8 @@ class TestCluster(base.BaseFunctionalTest):
def test_update(self):
new_cluster_name = self.getUniqueString()
sot = self.conn.clustering.update_cluster(
self.cluster, name=new_cluster_name, profile_only=False)
self.cluster, name=new_cluster_name, profile_only=False
)
time.sleep(2)
sot = self.conn.clustering.get_cluster(self.cluster)
@ -98,10 +101,12 @@ class TestCluster(base.BaseFunctionalTest):
def test_delete(self):
cluster_delete_action = self.conn.clustering.delete_cluster(
self.cluster.id)
self.cluster.id
)
self.conn.clustering.wait_for_delete(self.cluster,
wait=self._wait_for_timeout)
self.conn.clustering.wait_for_delete(
self.cluster, wait=self._wait_for_timeout
)
action = self.conn.clustering.get_action(cluster_delete_action.id)
self.assertEqual(action.target_id, self.cluster.id)
@ -112,10 +117,12 @@ class TestCluster(base.BaseFunctionalTest):
def test_force_delete(self):
cluster_delete_action = self.conn.clustering.delete_cluster(
self.cluster.id, False, True)
self.cluster.id, False, True
)
self.conn.clustering.wait_for_delete(self.cluster,
wait=self._wait_for_timeout)
self.conn.clustering.wait_for_delete(
self.cluster, wait=self._wait_for_timeout
)
action = self.conn.clustering.get_action(cluster_delete_action.id)
self.assertEqual(action.target_id, self.cluster.id)

View File

@ -23,7 +23,6 @@ EXAMPLE = {
class TestVersion(base.TestCase):
def test_basic(self):
sot = version.Version()
self.assertEqual('version', sot.resource_key)

View File

@ -45,7 +45,6 @@ FAKE = {
class TestAction(base.TestCase):
def setUp(self):
super(TestAction, self).setUp()

View File

@ -20,12 +20,11 @@ FAKE = {
},
'engine': {
'revision': '1.0.0',
}
},
}
class TestBuildInfo(base.TestCase):
def setUp(self):
super(TestBuildInfo, self).setUp()

View File

@ -65,7 +65,6 @@ FAKE_CREATE_RESP = {
class TestCluster(base.TestCase):
def setUp(self):
super(TestCluster, self).setUp()
@ -102,13 +101,17 @@ class TestCluster(base.TestCase):
self.assertEqual(FAKE['dependents'], sot.dependents)
self.assertTrue(sot.is_profile_only)
self.assertDictEqual({"limit": "limit",
"marker": "marker",
"name": "name",
"status": "status",
"sort": "sort",
"global_project": "global_project"},
sot._query_mapping._mapping)
self.assertDictEqual(
{
"limit": "limit",
"marker": "marker",
"name": "name",
"status": "status",
"sort": "sort",
"global_project": "global_project",
},
sot._query_mapping._mapping,
)
def test_scale_in(self):
sot = cluster.Cluster(**FAKE)
@ -120,8 +123,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.scale_in(sess, 3))
url = 'clusters/%s/actions' % sot.id
body = {'scale_in': {'count': 3}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_scale_out(self):
sot = cluster.Cluster(**FAKE)
@ -133,8 +135,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.scale_out(sess, 3))
url = 'clusters/%s/actions' % sot.id
body = {'scale_out': {'count': 3}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_resize(self):
sot = cluster.Cluster(**FAKE)
@ -146,8 +147,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.resize(sess, foo='bar', zoo=5))
url = 'clusters/%s/actions' % sot.id
body = {'resize': {'foo': 'bar', 'zoo': 5}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_add_nodes(self):
sot = cluster.Cluster(**FAKE)
@ -159,8 +159,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.add_nodes(sess, ['node-33']))
url = 'clusters/%s/actions' % sot.id
body = {'add_nodes': {'nodes': ['node-33']}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_del_nodes(self):
sot = cluster.Cluster(**FAKE)
@ -172,8 +171,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.del_nodes(sess, ['node-11']))
url = 'clusters/%s/actions' % sot.id
body = {'del_nodes': {'nodes': ['node-11']}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_del_nodes_with_params(self):
sot = cluster.Cluster(**FAKE)
@ -193,8 +191,7 @@ class TestCluster(base.TestCase):
'destroy_after_deletion': True,
}
}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_replace_nodes(self):
sot = cluster.Cluster(**FAKE)
@ -206,8 +203,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.replace_nodes(sess, {'node-22': 'node-44'}))
url = 'clusters/%s/actions' % sot.id
body = {'replace_nodes': {'nodes': {'node-22': 'node-44'}}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_policy_attach(self):
sot = cluster.Cluster(**FAKE)
@ -228,8 +224,7 @@ class TestCluster(base.TestCase):
'enabled': True,
}
}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_policy_detach(self):
sot = cluster.Cluster(**FAKE)
@ -242,8 +237,7 @@ class TestCluster(base.TestCase):
url = 'clusters/%s/actions' % sot.id
body = {'policy_detach': {'policy_id': 'POLICY'}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_policy_update(self):
sot = cluster.Cluster(**FAKE)
@ -252,20 +246,12 @@ class TestCluster(base.TestCase):
resp.json = mock.Mock(return_value='')
sess = mock.Mock()
sess.post = mock.Mock(return_value=resp)
params = {
'enabled': False
}
params = {'enabled': False}
self.assertEqual('', sot.policy_update(sess, 'POLICY', **params))
url = 'clusters/%s/actions' % sot.id
body = {
'policy_update': {
'policy_id': 'POLICY',
'enabled': False
}
}
sess.post.assert_called_once_with(url,
json=body)
body = {'policy_update': {'policy_id': 'POLICY', 'enabled': False}}
sess.post.assert_called_once_with(url, json=body)
def test_check(self):
sot = cluster.Cluster(**FAKE)
@ -277,8 +263,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.check(sess))
url = 'clusters/%s/actions' % sot.id
body = {'check': {}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_recover(self):
sot = cluster.Cluster(**FAKE)
@ -290,8 +275,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.recover(sess))
url = 'clusters/%s/actions' % sot.id
body = {'recover': {}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_operation(self):
sot = cluster.Cluster(**FAKE)
@ -303,8 +287,7 @@ class TestCluster(base.TestCase):
self.assertEqual('', sot.op(sess, 'dance', style='tango'))
url = 'clusters/%s/ops' % sot.id
body = {'dance': {'style': 'tango'}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_force_delete(self):
sot = cluster.Cluster(**FAKE)

View File

@ -23,15 +23,15 @@ FAKE = {
class TestClusterAttr(base.TestCase):
def setUp(self):
super(TestClusterAttr, self).setUp()
def test_basic(self):
sot = ca.ClusterAttr()
self.assertEqual('cluster_attributes', sot.resources_key)
self.assertEqual('/clusters/%(cluster_id)s/attrs/%(path)s',
sot.base_path)
self.assertEqual(
'/clusters/%(cluster_id)s/attrs/%(path)s', sot.base_path
)
self.assertTrue(sot.allow_list)
def test_instantiate(self):

View File

@ -26,7 +26,6 @@ FAKE = {
class TestClusterPolicy(base.TestCase):
def setUp(self):
super(TestClusterPolicy, self).setUp()
@ -34,18 +33,21 @@ class TestClusterPolicy(base.TestCase):
sot = cluster_policy.ClusterPolicy()
self.assertEqual('cluster_policy', sot.resource_key)
self.assertEqual('cluster_policies', sot.resources_key)
self.assertEqual('/clusters/%(cluster_id)s/policies',
sot.base_path)
self.assertEqual('/clusters/%(cluster_id)s/policies', sot.base_path)
self.assertTrue(sot.allow_fetch)
self.assertTrue(sot.allow_list)
self.assertDictEqual({"policy_name": "policy_name",
"policy_type": "policy_type",
"is_enabled": "enabled",
"sort": "sort",
"limit": "limit",
"marker": "marker"},
sot._query_mapping._mapping)
self.assertDictEqual(
{
"policy_name": "policy_name",
"policy_type": "policy_type",
"is_enabled": "enabled",
"sort": "sort",
"limit": "limit",
"marker": "marker",
},
sot._query_mapping._mapping,
)
def test_instantiate(self):
sot = cluster_policy.ClusterPolicy(**FAKE)

View File

@ -28,16 +28,12 @@ FAKE = {
'timestamp': '2016-10-10T12:46:36.000000',
'user': '5e5bf8027826429c96af157f68dc9072',
'meta_data': {
"action": {
"created_at": "2019-07-13T13:18:18Z",
"outputs": {}
}
}
"action": {"created_at": "2019-07-13T13:18:18Z", "outputs": {}}
},
}
class TestEvent(base.TestCase):
def setUp(self):
super(TestEvent, self).setUp()

View File

@ -33,12 +33,11 @@ FAKE = {
'created_at': '2015-10-10T12:46:36.000000',
'updated_at': '2016-10-10T12:46:36.000000',
'init_at': '2015-10-10T12:46:36.000000',
'tainted': True
'tainted': True,
}
class TestNode(base.TestCase):
def test_basic(self):
sot = node.Node()
self.assertEqual('node', sot.resource_key)
@ -78,8 +77,7 @@ class TestNode(base.TestCase):
self.assertEqual('', sot.check(sess))
url = 'nodes/%s/actions' % sot.id
body = {'check': {}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_recover(self):
sot = node.Node(**FAKE)
@ -91,8 +89,7 @@ class TestNode(base.TestCase):
self.assertEqual('', sot.recover(sess))
url = 'nodes/%s/actions' % sot.id
body = {'recover': {}}
sess.post.assert_called_once_with(url,
json=body)
sess.post.assert_called_once_with(url, json=body)
def test_operation(self):
sot = node.Node(**FAKE)
@ -103,8 +100,9 @@ class TestNode(base.TestCase):
sess.post = mock.Mock(return_value=resp)
self.assertEqual('', sot.op(sess, 'dance', style='tango'))
url = 'nodes/%s/ops' % sot.id
sess.post.assert_called_once_with(url,
json={'dance': {'style': 'tango'}})
sess.post.assert_called_once_with(
url, json={'dance': {'style': 'tango'}}
)
def test_adopt_preview(self):
sot = node.Node.new()
@ -118,12 +116,11 @@ class TestNode(base.TestCase):
'identity': 'fake-resource-id',
'overrides': {},
'type': 'os.nova.server-1.0',
'snapshot': False
'snapshot': False,
}
res = sot.adopt(sess, True, **attrs)
self.assertEqual({"foo": "bar"}, res)
sess.post.assert_called_once_with("nodes/adopt-preview",
json=attrs)
sess.post.assert_called_once_with("nodes/adopt-preview", json=attrs)
def test_adopt(self):
sot = node.Node.new()
@ -136,8 +133,9 @@ class TestNode(base.TestCase):
res = sot.adopt(sess, False, param="value")
self.assertEqual(sot, res)
sess.post.assert_called_once_with("nodes/adopt",
json={"param": "value"})
sess.post.assert_called_once_with(
"nodes/adopt", json={"param": "value"}
)
def test_force_delete(self):
sot = node.Node(**FAKE)
@ -158,7 +156,6 @@ class TestNode(base.TestCase):
class TestNodeDetail(base.TestCase):
def test_basic(self):
sot = node.NodeDetail()
self.assertEqual('/nodes/%(node_id)s?show_details=True', sot.base_path)

View File

@ -28,7 +28,7 @@ FAKE = {
'grace_period': 60,
'reduce_desired_capacity': False,
'destroy_after_deletion': True,
}
},
},
'project': '42d9e9663331431f97b75e25136307ff',
'domain': '204ccccd267b40aea871750116b5b184',
@ -41,7 +41,6 @@ FAKE = {
class TestPolicy(base.TestCase):
def setUp(self):
super(TestPolicy, self).setUp()
@ -70,7 +69,6 @@ class TestPolicy(base.TestCase):
class TestPolicyValidate(base.TestCase):
def setUp(self):
super(TestPolicyValidate, self).setUp()

View File

@ -16,20 +16,12 @@ from openstack.tests.unit import base
FAKE = {
'name': 'FAKE_POLICY_TYPE',
'schema': {
'foo': 'bar'
},
'support_status': {
'1.0': [{
'status': 'supported',
'since': '2016.10'
}]
}
'schema': {'foo': 'bar'},
'support_status': {'1.0': [{'status': 'supported', 'since': '2016.10'}]},
}
class TestPolicyType(base.TestCase):
def test_basic(self):
sot = policy_type.PolicyType()
self.assertEqual('policy_type', sot.resource_key)

View File

@ -28,8 +28,8 @@ FAKE = {
'flavor': 1,
'image': 'cirros-0.3.2-x86_64-uec',
'key_name': 'oskey',
'name': 'cirros_server'
}
'name': 'cirros_server',
},
},
'project': '42d9e9663331431f97b75e25136307ff',
'domain': '204ccccd267b40aea871750116b5b184',
@ -41,7 +41,6 @@ FAKE = {
class TestProfile(base.TestCase):
def setUp(self):
super(TestProfile, self).setUp()
@ -72,7 +71,6 @@ class TestProfile(base.TestCase):
class TestProfileValidate(base.TestCase):
def setUp(self):
super(TestProfileValidate, self).setUp()

View File

@ -17,20 +17,19 @@ from openstack.tests.unit import base
FAKE = {
'name': 'FAKE_PROFILE_TYPE',
'schema': {
'foo': 'bar'
},
'schema': {'foo': 'bar'},
'support_status': {
'1.0': [{
'status': 'supported',
'since': '2016.10',
}]
}
'1.0': [
{
'status': 'supported',
'since': '2016.10',
}
]
},
}
class TestProfileType(base.TestCase):
def test_basic(self):
sot = profile_type.ProfileType()
self.assertEqual('profile_type', sot.resource_key)

View File

@ -36,17 +36,18 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy = _proxy.Proxy(self.session)
def test_build_info_get(self):
self.verify_get(self.proxy.get_build_info, build_info.BuildInfo,
method_args=[],
expected_kwargs={'requires_id': False})
self.verify_get(
self.proxy.get_build_info,
build_info.BuildInfo,
method_args=[],
expected_kwargs={'requires_id': False},
)
def test_profile_types(self):
self.verify_list(self.proxy.profile_types,
profile_type.ProfileType)
self.verify_list(self.proxy.profile_types, profile_type.ProfileType)
def test_profile_type_get(self):
self.verify_get(self.proxy.get_profile_type,
profile_type.ProfileType)
self.verify_get(self.proxy.get_profile_type, profile_type.ProfileType)
def test_policy_types(self):
self.verify_list(self.proxy.policy_types, policy_type.PolicyType)
@ -58,8 +59,9 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_create(self.proxy.create_profile, profile.Profile)
def test_profile_validate(self):
self.verify_create(self.proxy.validate_profile,
profile.ProfileValidate)
self.verify_create(
self.proxy.validate_profile, profile.ProfileValidate
)
def test_profile_delete(self):
self.verify_delete(self.proxy.delete_profile, profile.Profile, False)
@ -74,9 +76,12 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_get(self.proxy.get_profile, profile.Profile)
def test_profiles(self):
self.verify_list(self.proxy.profiles, profile.Profile,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.profiles,
profile.Profile,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_profile_update(self):
self.verify_update(self.proxy.update_profile, profile.Profile)
@ -95,7 +100,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.cluster.Cluster.force_delete",
self.proxy.delete_cluster,
method_args=["value", False, True],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
def test_cluster_find(self):
self.verify_find(self.proxy.find_cluster, cluster.Cluster)
@ -104,16 +110,18 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_get(self.proxy.get_cluster, cluster.Cluster)
def test_clusters(self):
self.verify_list(self.proxy.clusters, cluster.Cluster,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.clusters,
cluster.Cluster,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_cluster_update(self):
self.verify_update(self.proxy.update_cluster, cluster.Cluster)
def test_services(self):
self.verify_list(self.proxy.services,
service.Service)
self.verify_list(self.proxy.services, service.Service)
@mock.patch.object(proxy_base.Proxy, '_find')
def test_resize_cluster(self, mock_find):
@ -125,9 +133,11 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
method_args=["FAKE_CLUSTER"],
method_kwargs={'k1': 'v1', 'k2': 'v2'},
expected_args=[self.proxy],
expected_kwargs={'k1': 'v1', 'k2': 'v2'})
mock_find.assert_called_once_with(cluster.Cluster, "FAKE_CLUSTER",
ignore_missing=False)
expected_kwargs={'k1': 'v1', 'k2': 'v2'},
)
mock_find.assert_called_once_with(
cluster.Cluster, "FAKE_CLUSTER", ignore_missing=False
)
def test_resize_cluster_with_obj(self):
mock_cluster = cluster.Cluster.new(id='FAKE_CLUSTER')
@ -137,15 +147,17 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
method_args=[mock_cluster],
method_kwargs={'k1': 'v1', 'k2': 'v2'},
expected_args=[self.proxy],
expected_kwargs={'k1': 'v1', 'k2': 'v2'})
expected_kwargs={'k1': 'v1', 'k2': 'v2'},
)
def test_collect_cluster_attrs(self):
self.verify_list(self.proxy.collect_cluster_attrs,
cluster_attr.ClusterAttr,
method_args=['FAKE_ID', 'path.to.attr'],
expected_args=[],
expected_kwargs={'cluster_id': 'FAKE_ID',
'path': 'path.to.attr'})
self.verify_list(
self.proxy.collect_cluster_attrs,
cluster_attr.ClusterAttr,
method_args=['FAKE_ID', 'path.to.attr'],
expected_args=[],
expected_kwargs={'cluster_id': 'FAKE_ID', 'path': 'path.to.attr'},
)
@mock.patch.object(proxy_base.Proxy, '_get_resource')
def test_cluster_check(self, mock_get):
@ -155,7 +167,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.cluster.Cluster.check",
self.proxy.check_cluster,
method_args=["FAKE_CLUSTER"],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
mock_get.assert_called_once_with(cluster.Cluster, "FAKE_CLUSTER")
@mock.patch.object(proxy_base.Proxy, '_get_resource')
@ -166,7 +179,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.cluster.Cluster.recover",
self.proxy.recover_cluster,
method_args=["FAKE_CLUSTER"],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
mock_get.assert_called_once_with(cluster.Cluster, "FAKE_CLUSTER")
def test_node_create(self):
@ -183,7 +197,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.node.Node.force_delete",
self.proxy.delete_node,
method_args=["value", False, True],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
def test_node_find(self):
self.verify_find(self.proxy.find_node, node.Node)
@ -198,12 +213,16 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
method_args=['NODE_ID'],
method_kwargs={'details': True},
expected_args=[node.NodeDetail],
expected_kwargs={'node_id': 'NODE_ID', 'requires_id': False})
expected_kwargs={'node_id': 'NODE_ID', 'requires_id': False},
)
def test_nodes(self):
self.verify_list(self.proxy.nodes, node.Node,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.nodes,
node.Node,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_node_update(self):
self.verify_update(self.proxy.update_node, node.Node)
@ -216,7 +235,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.node.Node.check",
self.proxy.check_node,
method_args=["FAKE_NODE"],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
mock_get.assert_called_once_with(node.Node, "FAKE_NODE")
@mock.patch.object(proxy_base.Proxy, '_get_resource')
@ -227,7 +247,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
"openstack.clustering.v1.node.Node.recover",
self.proxy.recover_node,
method_args=["FAKE_NODE"],
expected_args=[self.proxy])
expected_args=[self.proxy],
)
mock_get.assert_called_once_with(node.Node, "FAKE_NODE")
@mock.patch.object(proxy_base.Proxy, '_get_resource')
@ -239,7 +260,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.adopt_node,
method_kwargs={"preview": False, "foo": "bar"},
expected_args=[self.proxy],
expected_kwargs={"preview": False, "foo": "bar"})
expected_kwargs={"preview": False, "foo": "bar"},
)
mock_get.assert_called_once_with(node.Node, None)
@ -252,7 +274,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.adopt_node,
method_kwargs={"preview": True, "foo": "bar"},
expected_args=[self.proxy],
expected_kwargs={"preview": True, "foo": "bar"})
expected_kwargs={"preview": True, "foo": "bar"},
)
mock_get.assert_called_once_with(node.Node, None)
@ -275,19 +298,24 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_get(self.proxy.get_policy, policy.Policy)
def test_policies(self):
self.verify_list(self.proxy.policies, policy.Policy,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.policies,
policy.Policy,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_policy_update(self):
self.verify_update(self.proxy.update_policy, policy.Policy)
def test_cluster_policies(self):
self.verify_list(self.proxy.cluster_policies,
cluster_policy.ClusterPolicy,
method_args=["FAKE_CLUSTER"],
expected_args=[],
expected_kwargs={"cluster_id": "FAKE_CLUSTER"})
self.verify_list(
self.proxy.cluster_policies,
cluster_policy.ClusterPolicy,
method_args=["FAKE_CLUSTER"],
expected_args=[],
expected_kwargs={"cluster_id": "FAKE_CLUSTER"},
)
def test_get_cluster_policy(self):
fake_policy = cluster_policy.ClusterPolicy.new(id="FAKE_POLICY")
@ -300,7 +328,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
method_args=[fake_policy, "FAKE_CLUSTER"],
expected_args=[cluster_policy.ClusterPolicy, fake_policy],
expected_kwargs={'cluster_id': 'FAKE_CLUSTER'},
expected_result=fake_policy)
expected_result=fake_policy,
)
# Policy ID as input
self._verify(
@ -308,7 +337,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.get_cluster_policy,
method_args=["FAKE_POLICY", "FAKE_CLUSTER"],
expected_args=[cluster_policy.ClusterPolicy, "FAKE_POLICY"],
expected_kwargs={"cluster_id": "FAKE_CLUSTER"})
expected_kwargs={"cluster_id": "FAKE_CLUSTER"},
)
# Cluster object as input
self._verify(
@ -316,7 +346,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.get_cluster_policy,
method_args=["FAKE_POLICY", fake_cluster],
expected_args=[cluster_policy.ClusterPolicy, "FAKE_POLICY"],
expected_kwargs={"cluster_id": fake_cluster})
expected_kwargs={"cluster_id": fake_cluster},
)
def test_receiver_create(self):
self.verify_create(self.proxy.create_receiver, receiver.Receiver)
@ -325,8 +356,9 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_update(self.proxy.update_receiver, receiver.Receiver)
def test_receiver_delete(self):
self.verify_delete(self.proxy.delete_receiver, receiver.Receiver,
False)
self.verify_delete(
self.proxy.delete_receiver, receiver.Receiver, False
)
def test_receiver_delete_ignore(self):
self.verify_delete(self.proxy.delete_receiver, receiver.Receiver, True)
@ -338,17 +370,23 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_get(self.proxy.get_receiver, receiver.Receiver)
def test_receivers(self):
self.verify_list(self.proxy.receivers, receiver.Receiver,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.receivers,
receiver.Receiver,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_action_get(self):
self.verify_get(self.proxy.get_action, action.Action)
def test_actions(self):
self.verify_list(self.proxy.actions, action.Action,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.actions,
action.Action,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
def test_action_update(self):
self.verify_update(self.proxy.update_action, action.Action)
@ -357,9 +395,12 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.verify_get(self.proxy.get_event, event.Event)
def test_events(self):
self.verify_list(self.proxy.events, event.Event,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
self.verify_list(
self.proxy.events,
event.Event,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2},
)
@mock.patch("openstack.resource.wait_for_status")
def test_wait_for(self, mock_wait):
@ -368,8 +409,9 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.wait_for_status(mock_resource, 'ACTIVE')
mock_wait.assert_called_once_with(self.proxy, mock_resource,
'ACTIVE', [], 2, 120)
mock_wait.assert_called_once_with(
self.proxy, mock_resource, 'ACTIVE', [], 2, 120
)
@mock.patch("openstack.resource.wait_for_status")
def test_wait_for_params(self, mock_wait):
@ -378,8 +420,9 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.wait_for_status(mock_resource, 'ACTIVE', ['ERROR'], 1, 2)
mock_wait.assert_called_once_with(self.proxy, mock_resource,
'ACTIVE', ['ERROR'], 1, 2)
mock_wait.assert_called_once_with(
self.proxy, mock_resource, 'ACTIVE', ['ERROR'], 1, 2
)
@mock.patch("openstack.resource.wait_for_delete")
def test_wait_for_delete(self, mock_wait):
@ -405,7 +448,8 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.get_cluster_metadata,
method_args=["value"],
expected_args=[self.proxy],
expected_result=cluster.Cluster(id="value", metadata={}))
expected_result=cluster.Cluster(id="value", metadata={}),
)
def test_set_cluster_metadata(self):
kwargs = {"a": "1", "b": "2"}
@ -415,12 +459,11 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.set_cluster_metadata,
method_args=[id],
method_kwargs=kwargs,
method_result=cluster.Cluster.existing(
id=id, metadata=kwargs),
method_result=cluster.Cluster.existing(id=id, metadata=kwargs),
expected_args=[self.proxy],
expected_kwargs={'metadata': kwargs},
expected_result=cluster.Cluster.existing(
id=id, metadata=kwargs))
expected_result=cluster.Cluster.existing(id=id, metadata=kwargs),
)
def test_delete_cluster_metadata(self):
self._verify(
@ -428,4 +471,5 @@ class TestClusterProxy(test_proxy_base.TestProxyBase):
self.proxy.delete_cluster_metadata,
expected_result=None,
method_args=["value", ["key"]],
expected_args=[self.proxy, "key"])
expected_args=[self.proxy, "key"],
)

View File

@ -26,10 +26,7 @@ FAKE = {
'created_at': '2015-10-10T12:46:36.000000',
'updated_at': '2016-10-10T12:46:36.000000',
'actor': {},
'params': {
'adjustment_type': 'CHANGE_IN_CAPACITY',
'adjustment': 2
},
'params': {'adjustment_type': 'CHANGE_IN_CAPACITY', 'adjustment': 2},
'channel': {
'alarm_url': 'http://host:port/webhooks/AN_ID/trigger?V=1',
},
@ -40,7 +37,6 @@ FAKE = {
class TestReceiver(base.TestCase):
def setUp(self):
super(TestReceiver, self).setUp()

View File

@ -27,7 +27,6 @@ EXAMPLE = {
class TestService(base.TestCase):
def setUp(self):
super(TestService, self).setUp()
self.resp = mock.Mock()