Merge "Remove unnecessary quotes"
This commit is contained in:
commit
c70e5d7d96
openstack
baremetal/v1
block_storage
cloud
compute/v2
network/v2
object_store/v1
tests
functional/cloud
unit
@ -453,7 +453,7 @@ class Node(_common.ListMixin, resource.Resource):
|
||||
if clean_steps is not None:
|
||||
if target != 'clean':
|
||||
raise ValueError(
|
||||
'Clean steps can only be provided with ' '"clean" target'
|
||||
'Clean steps can only be provided with "clean" target'
|
||||
)
|
||||
body['clean_steps'] = clean_steps
|
||||
|
||||
|
@ -180,7 +180,7 @@ class Backup(resource.Resource):
|
||||
body['restore']['name'] = name
|
||||
if not (volume_id or name):
|
||||
raise exceptions.SDKException(
|
||||
'Either of `name` or `volume_id`' ' must be specified.'
|
||||
'Either of `name` or `volume_id` must be specified.'
|
||||
)
|
||||
response = session.post(url, json=body)
|
||||
self._translate_response(response, has_body=False)
|
||||
|
@ -194,7 +194,7 @@ class Backup(resource.Resource):
|
||||
body['restore']['name'] = name
|
||||
if not (volume_id or name):
|
||||
raise exceptions.SDKException(
|
||||
'Either of `name` or `volume_id`' ' must be specified.'
|
||||
'Either of `name` or `volume_id` must be specified.'
|
||||
)
|
||||
response = session.post(url, json=body)
|
||||
self._translate_response(response, has_body=False)
|
||||
|
@ -42,7 +42,7 @@ class Type(resource.Resource):
|
||||
for k, v in extra_specs.items():
|
||||
if not isinstance(v, str):
|
||||
raise ValueError(
|
||||
"The value for %s (%s) must be " "a text string" % (k, v)
|
||||
"The value for %s (%s) must be a text string" % (k, v)
|
||||
)
|
||||
|
||||
if key is not None:
|
||||
|
@ -536,7 +536,7 @@ class FloatingIPCloudMixin:
|
||||
try:
|
||||
for count in utils.iterate_timeout(
|
||||
timeout,
|
||||
"Timeout waiting for the floating IP" " to be ACTIVE",
|
||||
"Timeout waiting for the floating IP to be ACTIVE",
|
||||
wait=self._FLOAT_AGE,
|
||||
):
|
||||
fip = self.get_floating_ip(fip_id)
|
||||
|
@ -107,14 +107,12 @@ class SecurityGroupCloudMixin:
|
||||
raise exc.OpenStackCloudUnavailableFeature(
|
||||
"Unavailable feature: security groups"
|
||||
)
|
||||
error_message = "Error getting security group with" " ID {id}".format(
|
||||
id=id
|
||||
)
|
||||
error_message = f"Error getting security group with ID {id}"
|
||||
if self._use_neutron_secgroups():
|
||||
return self.network.get_security_group(id)
|
||||
else:
|
||||
data = proxy._json_response(
|
||||
self.compute.get('/os-security-groups/{id}'.format(id=id)),
|
||||
self.compute.get(f'/os-security-groups/{id}'),
|
||||
error_message=error_message,
|
||||
)
|
||||
return self._normalize_secgroup(
|
||||
|
@ -721,7 +721,7 @@ class _OpenStackCloudMixin:
|
||||
and self._disable_warnings[service_key]
|
||||
):
|
||||
self.log.debug(
|
||||
"Disabling %(service_key)s entry in catalog" " per config",
|
||||
"Disabling %(service_key)s entry in catalog per config",
|
||||
{'service_key': service_key},
|
||||
)
|
||||
self._disable_warnings[service_key] = True
|
||||
|
@ -53,7 +53,7 @@ class ServerRemoteConsole(resource.Resource):
|
||||
and self.type == 'webmks'
|
||||
):
|
||||
raise ValueError(
|
||||
'Console type webmks is not supported on ' 'server side'
|
||||
'Console type webmks is not supported on server side'
|
||||
)
|
||||
return super(ServerRemoteConsole, self).create(
|
||||
session, prepend_key=prepend_key, base_path=base_path, **params
|
||||
|
@ -628,7 +628,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
self._delete(_bgp_peer.BgpPeer, peer, ignore_missing=ignore_missing)
|
||||
|
||||
def find_bgp_peer(self, name_or_id, ignore_missing=True, **query):
|
||||
""" "Find a single BGP Peer"""
|
||||
"""Find a single BGP Peer"""
|
||||
return self._find(
|
||||
_bgp_peer.BgpPeer,
|
||||
name_or_id,
|
||||
@ -659,7 +659,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
)
|
||||
|
||||
def find_bgp_speaker(self, name_or_id, ignore_missing=True, **query):
|
||||
""" "Find a single BGP Peer"""
|
||||
"""Find a single BGP Peer"""
|
||||
return self._find(
|
||||
_bgp_speaker.BgpSpeaker,
|
||||
name_or_id,
|
||||
@ -756,7 +756,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
self._delete(_bgpvpn.BgpVpn, bgpvpn, ignore_missing=ignore_missing)
|
||||
|
||||
def find_bgpvpn(self, name_or_id, ignore_missing=True, **query):
|
||||
""" "Find a single BGPVPN
|
||||
"""Find a single BGPVPN
|
||||
|
||||
:param name_or_id: The name or ID of a BGPVPN.
|
||||
:param bool ignore_missing: When set to ``False``
|
||||
@ -949,7 +949,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
def find_bgpvpn_port_association(
|
||||
self, name_or_id, bgpvpn_id, ignore_missing=True, **query
|
||||
):
|
||||
""" "Find a single BGPVPN Port Association
|
||||
"""Find a single BGPVPN Port Association
|
||||
|
||||
:param name_or_id: The name or ID of a BgpVpnNetworkAssociation.
|
||||
:param bgpvpn_id: The value can be the ID of a BGPVPN.
|
||||
@ -6151,7 +6151,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
)
|
||||
|
||||
def find_tap_flow(self, name_or_id, ignore_missing=True, **query):
|
||||
""" "Find a single Tap Service"""
|
||||
"""Find a single Tap Service"""
|
||||
return self._find(
|
||||
_tap_flow.TapFlow,
|
||||
name_or_id,
|
||||
@ -6182,7 +6182,7 @@ class Proxy(proxy.Proxy, Generic[T]):
|
||||
)
|
||||
|
||||
def find_tap_service(self, name_or_id, ignore_missing=True, **query):
|
||||
""" "Find a single Tap Service"""
|
||||
"""Find a single Tap Service"""
|
||||
return self._find(
|
||||
_tap_service.TapService,
|
||||
name_or_id,
|
||||
|
@ -1030,7 +1030,7 @@ class Proxy(proxy.Proxy):
|
||||
raise ValueError('path must at least contain /v1/a/c/')
|
||||
else:
|
||||
raise ValueError(
|
||||
'path must be full path to an object' ' e.g. /v1/a/c/o'
|
||||
'path must be full path to an object e.g. /v1/a/c/o'
|
||||
)
|
||||
|
||||
standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE']
|
||||
|
@ -66,7 +66,7 @@ class TestQosPolicy(base.BaseFunctionalTest):
|
||||
def test_create_qos_policy_default(self):
|
||||
if not self.operator_cloud._has_neutron_extension('qos-default'):
|
||||
self.skipTest(
|
||||
"'qos-default' network extension not supported " "by cloud"
|
||||
"'qos-default' network extension not supported by cloud"
|
||||
)
|
||||
policy = self.operator_cloud.create_qos_policy(
|
||||
name=self.policy_name, default=True
|
||||
|
@ -18,7 +18,7 @@ FAKE_ID = '0725b527-e51a-41df-ad22-adad5f4546ad'
|
||||
FAKE_RP_UUID = 'f4b7fe6c-8ab4-4914-a113-547af022935b'
|
||||
FAKE_INSTANCE_UUID = '1ce4a597-9836-4e02-bea1-a3a6cbe7b9f9'
|
||||
FAKE_ATTACH_INFO_STR = (
|
||||
'{"bus": "5e", ' '"device": "00", ' '"domain": "0000", ' '"function": "1"}'
|
||||
'{"bus": "5e", "device": "00", "domain": "0000", "function": "1"}'
|
||||
)
|
||||
|
||||
FAKE = {
|
||||
|
@ -57,7 +57,7 @@ _GroupData = collections.namedtuple(
|
||||
|
||||
_DomainData = collections.namedtuple(
|
||||
'DomainData',
|
||||
'domain_id, domain_name, description, json_response, ' 'json_request',
|
||||
'domain_id, domain_name, description, json_response, json_request',
|
||||
)
|
||||
|
||||
|
||||
@ -223,7 +223,7 @@ class TestCase(base.TestCase):
|
||||
assert not (project or project_list)
|
||||
else:
|
||||
raise Exception(
|
||||
'Must specify a project, project_list, ' 'or project_count'
|
||||
'Must specify a project, project_list, or project_count'
|
||||
)
|
||||
assert list_get or id_get
|
||||
|
||||
@ -362,7 +362,7 @@ class TestCase(base.TestCase):
|
||||
self.assertIs(
|
||||
0,
|
||||
len(kwargs),
|
||||
message='extra key-word args received ' 'on _get_user_data',
|
||||
message='extra key-word args received on _get_user_data',
|
||||
)
|
||||
|
||||
return _UserData(
|
||||
|
@ -64,7 +64,7 @@ ARQ_UUID = uuid.uuid4().hex
|
||||
ARQ_DEV_RP_UUID = uuid.uuid4().hex
|
||||
ARQ_INSTANCE_UUID = uuid.uuid4().hex
|
||||
ARQ_ATTACH_INFO_STR = (
|
||||
'{"bus": "5e", ' '"device": "00", ' '"domain": "0000", ' '"function": "1"}'
|
||||
'{"bus": "5e", "device": "00", "domain": "0000", "function": "1"}'
|
||||
)
|
||||
ARQ_DICT = {
|
||||
'uuid': ARQ_UUID,
|
||||
|
@ -78,7 +78,7 @@ _TASK_PROPERTIES = {
|
||||
},
|
||||
"expires_at": {
|
||||
"description": _(
|
||||
"Datetime when this resource would be" " subject to removal"
|
||||
"Datetime when this resource would be subject to removal"
|
||||
),
|
||||
"type": ["null", "string"],
|
||||
},
|
||||
|
@ -173,7 +173,7 @@ class TestNetworkAddressGroup(TestNetworkProxy):
|
||||
)
|
||||
|
||||
@mock.patch(
|
||||
'openstack.network.v2._proxy.Proxy.' 'add_addresses_to_address_group'
|
||||
'openstack.network.v2._proxy.Proxy.add_addresses_to_address_group'
|
||||
)
|
||||
def test_add_addresses_to_address_group(self, add_addresses):
|
||||
data = mock.sentinel
|
||||
|
@ -353,7 +353,7 @@ class TestTempURL(TestObjectStoreProxy):
|
||||
expires_iso8601_format = '%Y-%m-%dT%H:%M:%SZ'
|
||||
short_expires_iso8601_format = '%Y-%m-%d'
|
||||
time_errmsg = (
|
||||
'time must either be a whole number or in specific ' 'ISO 8601 format.'
|
||||
'time must either be a whole number or in specific ISO 8601 format.'
|
||||
)
|
||||
path_errmsg = 'path must be full path to an object e.g. /v1/a/c/o'
|
||||
url = '/v1/AUTH_account/c/o'
|
||||
@ -361,7 +361,7 @@ class TestTempURL(TestObjectStoreProxy):
|
||||
key = 'correcthorsebatterystaple'
|
||||
method = 'GET'
|
||||
expected_url = url + (
|
||||
'?temp_url_sig=temp_url_signature' '&temp_url_expires=1400003600'
|
||||
'?temp_url_sig=temp_url_signature&temp_url_expires=1400003600'
|
||||
)
|
||||
expected_body = '\n'.join(
|
||||
[
|
||||
|
@ -397,7 +397,7 @@ class TestOrchestrationTemplate(TestOrchestrationProxy):
|
||||
template_url=None,
|
||||
)
|
||||
self.assertEqual(
|
||||
"'template_url' must be specified when template is " "None",
|
||||
"'template_url' must be specified when template is None",
|
||||
str(err),
|
||||
)
|
||||
|
||||
|
@ -57,7 +57,7 @@ FAKE_UPDATE_PREVIEW_RESPONSE = {
|
||||
{
|
||||
'updated_time': 'datetime',
|
||||
'resource_name': '',
|
||||
'physical_resource_id': '{resource id or ' '}',
|
||||
'physical_resource_id': '{resource id or }',
|
||||
'resource_action': 'CREATE',
|
||||
'resource_status': 'COMPLETE',
|
||||
'resource_status_reason': '',
|
||||
@ -70,7 +70,7 @@ FAKE_UPDATE_PREVIEW_RESPONSE = {
|
||||
{
|
||||
'updated_time': 'datetime',
|
||||
'resource_name': '',
|
||||
'physical_resource_id': '{resource id or ' '}',
|
||||
'physical_resource_id': '{resource id or }',
|
||||
'resource_action': 'CREATE',
|
||||
'resource_status': 'COMPLETE',
|
||||
'resource_status_reason': '',
|
||||
@ -83,7 +83,7 @@ FAKE_UPDATE_PREVIEW_RESPONSE = {
|
||||
{
|
||||
'updated_time': 'datetime',
|
||||
'resource_name': '',
|
||||
'physical_resource_id': '{resource id or ' '}',
|
||||
'physical_resource_id': '{resource id or }',
|
||||
'resource_action': 'CREATE',
|
||||
'resource_status': 'COMPLETE',
|
||||
'resource_status_reason': '',
|
||||
@ -96,7 +96,7 @@ FAKE_UPDATE_PREVIEW_RESPONSE = {
|
||||
{
|
||||
'updated_time': 'datetime',
|
||||
'resource_name': '',
|
||||
'physical_resource_id': '{resource id or ' '}',
|
||||
'physical_resource_id': '{resource id or }',
|
||||
'resource_action': 'CREATE',
|
||||
'resource_status': 'COMPLETE',
|
||||
'resource_status_reason': '',
|
||||
@ -109,7 +109,7 @@ FAKE_UPDATE_PREVIEW_RESPONSE = {
|
||||
{
|
||||
'updated_time': 'datetime',
|
||||
'resource_name': '',
|
||||
'physical_resource_id': '{resource id or ' '}',
|
||||
'physical_resource_id': '{resource id or }',
|
||||
'resource_action': 'CREATE',
|
||||
'resource_status': 'COMPLETE',
|
||||
'resource_status_reason': '',
|
||||
|
@ -639,7 +639,7 @@ class TestProxyCache(base.TestCase):
|
||||
self.sot.service_type = 'srv'
|
||||
|
||||
def _get_key(self, id):
|
||||
return f"srv.fake.fake/{id}." "{'microversion': None, 'params': {}}"
|
||||
return "srv.fake.fake/%s.{'microversion': None, 'params': {}}" % id
|
||||
|
||||
def test_get_not_in_cache(self):
|
||||
self.cloud._cache_expirations['srv.fake'] = 5
|
||||
|
@ -941,7 +941,7 @@ class TestResource(base.TestCase):
|
||||
res._translate_response(response)
|
||||
|
||||
expected = (
|
||||
'{"foo": "new_bar", "id": null, ' '"location": null, "name": null}'
|
||||
'{"foo": "new_bar", "id": null, "location": null, "name": null}'
|
||||
)
|
||||
actual = json.dumps(res, sort_keys=True)
|
||||
self.assertEqual(expected, actual)
|
||||
|
@ -402,7 +402,7 @@ class TinyDAG:
|
||||
|
||||
except queue.Empty:
|
||||
raise exceptions.SDKException(
|
||||
'Timeout waiting for ' 'cleanup task to complete'
|
||||
'Timeout waiting for cleanup task to complete'
|
||||
)
|
||||
else:
|
||||
raise StopIteration
|
||||
|
Loading…
x
Reference in New Issue
Block a user