Fix pep8 issues

Fixed pep8 issues for E126, H238, E241, H405, H403, E121, H201.
H306 and H301 which relate to the way imports are done is still
unfixed.

Change-Id: Ib2668c0af16a75e33e93ec3d699ce2d3fffade38
This commit is contained in:
Kristi Nikolla 2016-11-07 15:50:28 -05:00
parent 91bcaee5d1
commit a859704206
8 changed files with 124 additions and 120 deletions

View File

@ -33,27 +33,29 @@ class VolumeCreateEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating volume mapping %s -> %s at %s' % (
payload['volume_id'],
payload['tenant_id'],
self.sp_name))
payload['volume_id'],
payload['tenant_id'],
self.sp_name)
)
insert(ResourceMapping("volumes",
payload['volume_id'],
payload['tenant_id'],
self.sp_name))
payload['volume_id'],
payload['tenant_id'],
self.sp_name))
class VolumeDeleteEndpoint(object):
def __init__(self, sp_name):
self.sp_name = sp_name
filter_rule = oslo_messaging.NotificationFilter(
publisher_id='^volume.*',
event_type='^volume.delete.end$')
publisher_id='^volume.*',
event_type='^volume.delete.end$')
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting volume mapping %s -> %s at %s' % (
payload['volume_id'],
payload['tenant_id'],
self.sp_name))
payload['volume_id'],
payload['tenant_id'],
self.sp_name)
)
delete(ResourceMapping.find("volumes", payload['volume_id']))
@ -67,9 +69,10 @@ class VolumeTransferEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Moving volume mapping %s -> %s at %s' % (
payload['volume_id'],
payload['tenant_id'],
self.sp_name))
payload['volume_id'],
payload['tenant_id'],
self.sp_name)
)
mapping = ResourceMapping.find("volumes", payload['volume_id'])
# Since we're manually updating a field, we have to sanitize the UUID
# ourselves.
@ -78,21 +81,22 @@ class VolumeTransferEndpoint(object):
class SnapshotCreateEndpoint(object):
filter_rule = oslo_messaging.NotificationFilter(
publisher_id='^snapshot.*',
event_type='^snapshot.create.start$')
publisher_id='^snapshot.*',
event_type='^snapshot.create.start$')
def __init__(self, sp_name):
self.sp_name = sp_name
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating snapshot mapping %s -> %s at %s' % (
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name))
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name)
)
insert(ResourceMapping("snapshots",
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name))
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name))
class SnapshotDeleteEndpoint(object):
@ -105,9 +109,10 @@ class SnapshotDeleteEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting snapshot mapping %s -> %s at %s' % (
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name))
payload['snapshot_id'],
payload['tenant_id'],
self.sp_name)
)
delete(ResourceMapping.find("snapshots", payload['snapshot_id']))
@ -121,13 +126,14 @@ class ImageCreateEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating image mapping %s -> %s at %s' % (
payload['id'],
payload['owner'],
self.sp_name))
payload['id'],
payload['owner'],
self.sp_name)
)
insert(ResourceMapping("images",
payload['id'],
payload['owner'],
self.sp_name))
payload['id'],
payload['owner'],
self.sp_name))
class ImageDeleteEndpoint(object):
@ -140,21 +146,22 @@ class ImageDeleteEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting image mapping %s -> %s at %s' % (
payload['id'],
payload['owner'],
self.sp_name))
payload['id'],
payload['owner'],
self.sp_name)
)
delete(ResourceMapping.find("images", payload['id']))
def get_endpoints_for_sp(sp_name):
return [
VolumeCreateEndpoint(sp_name),
VolumeDeleteEndpoint(sp_name),
VolumeTransferEndpoint(sp_name),
SnapshotCreateEndpoint(sp_name),
SnapshotDeleteEndpoint(sp_name),
ImageCreateEndpoint(sp_name),
ImageDeleteEndpoint(sp_name)
VolumeCreateEndpoint(sp_name),
VolumeDeleteEndpoint(sp_name),
VolumeTransferEndpoint(sp_name),
SnapshotCreateEndpoint(sp_name),
SnapshotDeleteEndpoint(sp_name),
ImageCreateEndpoint(sp_name),
ImageDeleteEndpoint(sp_name)
]
@ -167,10 +174,11 @@ def get_server_for_sp(sp):
transport = oslo_messaging.get_notification_transport(CONF, cfg.messagebus)
targets = [oslo_messaging.Target(topic='notifications')]
return oslo_messaging.get_notification_listener(
transport,
targets,
get_endpoints_for_sp(cfg.sp_name),
executor='eventlet')
transport,
targets,
get_endpoints_for_sp(cfg.sp_name),
executor='eventlet'
)
if __name__ == "__main__":

View File

@ -41,7 +41,7 @@ def is_valid_uuid(value):
return False
class RequestHandler:
class RequestHandler(object):
def __init__(self, method, path, headers):
self.method = method
self.path = path

View File

@ -93,8 +93,7 @@ def aggregate(responses, key, params=None, path=None, detailed=True):
# because we need sorting information. Here we
# remove the extra values /volumes/detail provides
if key == 'volumes' and not detailed:
resource_list[start:end] = \
_remove_details(resource_list[start:end])
resource_list[start:end] = _remove_details(resource_list[start:end])
response = {key: resource_list[start:end]}
@ -122,14 +121,9 @@ def list_api_versions(service_type, url):
else:
info.update({'status': 'SUPPORTED'})
info.update({
'id': version,
'links': [
{'href': '%s/%s/' % (url,
version[:-2]),
'rel': 'self'}
]
})
info.update({'id': version,
'links': [{'href': '%s/%s/' % (url, version[:-2]),
'rel': 'self'}]})
api_versions.append(info)
return json.dumps({'versions': api_versions})

View File

@ -36,5 +36,5 @@ def chunked_reader():
try:
while True:
yield stream.next()
except:
except Exception:
return

View File

@ -27,15 +27,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'volume.node4',
'event_type': 'volume.create.start',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'volume.node4',
'event_type': 'volume.create.start',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)
@ -52,15 +52,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'volume.node4',
'event_type': 'volume.delete.end',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'volume.node4',
'event_type': 'volume.delete.end',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)
@ -73,15 +73,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.create.start',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.create.start',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)
@ -98,15 +98,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.delete.end',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.delete.end',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)
@ -119,15 +119,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'id': "1232123212321",
'owner': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'image.node4',
'event_type': 'image.create',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'id': "1232123212321",
'owner': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'image.node4',
'event_type': 'image.create',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)
@ -144,15 +144,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None)
MESSAGE = {
'payload': {
'id': "1232123212321",
'owner': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'image.node4',
'event_type': 'image.delete',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
'payload': {
'id': "1232123212321",
'owner': "abdbabdbabdba"
},
'priority': 'info',
'publisher_id': 'image.node4',
'event_type': 'image.delete',
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
}
incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming)

View File

@ -25,7 +25,7 @@ from mixmatch.proxy import app
from mixmatch.model import BASE, enginefacade, insert, ResourceMapping
class FakeSession():
class FakeSession(object):
"""A replacement for keystoneauth1.session.Session."""
def __init__(self, token, project):
self.token = token

View File

@ -23,16 +23,18 @@ from mixmatch import services
from mixmatch.tests.unit import samples
class Response:
class Response(object):
def __init__(self, text):
self.text = text
# Source: http://stackoverflow.com/a/9468284
class Url(object):
"""A url object that can be compared with other url orbjects
without regard to the vagaries of encoding, escaping, and ordering
of parameters in query strings."""
"""Url object that can be compared with other url objects
This comparison is done without regard to the vagaries of encoding,
escaping, and ordering of parameters in query strings.
"""
def __init__(self, url):
parts = parse.urlparse(url)

View File

@ -33,10 +33,10 @@ commands = oslo_debug_helper {posargs}
[flake8]
# E123, E125 skipped as they are invalid PEP-8.
# FIXME(knikolla): The tests here below need additional work to pass
# They used to not be checked by our previous CI
# H306, H301, E126, H238, E241, H405, H403, E121, H201
# TODO(knikolla): These are the tests we should make passing
# H306 Imports in alphabetical order
# H301 One import per line
show-source = True
ignore = E123,E125,H306,H301,E126,H238,E241,H405,H403,E121,H201
ignore = E123,E125,H306,H301
builtins = _
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build