Fix pep8 issues

Fixed pep8 issues for E126, H238, E241, H405, H403, E121, H201.
H306 and H301 which relate to the way imports are done is still
unfixed.

Change-Id: Ib2668c0af16a75e33e93ec3d699ce2d3fffade38
This commit is contained in:
Kristi Nikolla 2016-11-07 15:50:28 -05:00
parent 91bcaee5d1
commit a859704206
8 changed files with 124 additions and 120 deletions

View File

@ -33,27 +33,29 @@ class VolumeCreateEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating volume mapping %s -> %s at %s' % ( LOG.info('Creating volume mapping %s -> %s at %s' % (
payload['volume_id'], payload['volume_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name)
)
insert(ResourceMapping("volumes", insert(ResourceMapping("volumes",
payload['volume_id'], payload['volume_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name))
class VolumeDeleteEndpoint(object): class VolumeDeleteEndpoint(object):
def __init__(self, sp_name): def __init__(self, sp_name):
self.sp_name = sp_name self.sp_name = sp_name
filter_rule = oslo_messaging.NotificationFilter( filter_rule = oslo_messaging.NotificationFilter(
publisher_id='^volume.*', publisher_id='^volume.*',
event_type='^volume.delete.end$') event_type='^volume.delete.end$')
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting volume mapping %s -> %s at %s' % ( LOG.info('Deleting volume mapping %s -> %s at %s' % (
payload['volume_id'], payload['volume_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name)
)
delete(ResourceMapping.find("volumes", payload['volume_id'])) delete(ResourceMapping.find("volumes", payload['volume_id']))
@ -67,9 +69,10 @@ class VolumeTransferEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Moving volume mapping %s -> %s at %s' % ( LOG.info('Moving volume mapping %s -> %s at %s' % (
payload['volume_id'], payload['volume_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name)
)
mapping = ResourceMapping.find("volumes", payload['volume_id']) mapping = ResourceMapping.find("volumes", payload['volume_id'])
# Since we're manually updating a field, we have to sanitize the UUID # Since we're manually updating a field, we have to sanitize the UUID
# ourselves. # ourselves.
@ -78,21 +81,22 @@ class VolumeTransferEndpoint(object):
class SnapshotCreateEndpoint(object): class SnapshotCreateEndpoint(object):
filter_rule = oslo_messaging.NotificationFilter( filter_rule = oslo_messaging.NotificationFilter(
publisher_id='^snapshot.*', publisher_id='^snapshot.*',
event_type='^snapshot.create.start$') event_type='^snapshot.create.start$')
def __init__(self, sp_name): def __init__(self, sp_name):
self.sp_name = sp_name self.sp_name = sp_name
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating snapshot mapping %s -> %s at %s' % ( LOG.info('Creating snapshot mapping %s -> %s at %s' % (
payload['snapshot_id'], payload['snapshot_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name)
)
insert(ResourceMapping("snapshots", insert(ResourceMapping("snapshots",
payload['snapshot_id'], payload['snapshot_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name))
class SnapshotDeleteEndpoint(object): class SnapshotDeleteEndpoint(object):
@ -105,9 +109,10 @@ class SnapshotDeleteEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting snapshot mapping %s -> %s at %s' % ( LOG.info('Deleting snapshot mapping %s -> %s at %s' % (
payload['snapshot_id'], payload['snapshot_id'],
payload['tenant_id'], payload['tenant_id'],
self.sp_name)) self.sp_name)
)
delete(ResourceMapping.find("snapshots", payload['snapshot_id'])) delete(ResourceMapping.find("snapshots", payload['snapshot_id']))
@ -121,13 +126,14 @@ class ImageCreateEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Creating image mapping %s -> %s at %s' % ( LOG.info('Creating image mapping %s -> %s at %s' % (
payload['id'], payload['id'],
payload['owner'], payload['owner'],
self.sp_name)) self.sp_name)
)
insert(ResourceMapping("images", insert(ResourceMapping("images",
payload['id'], payload['id'],
payload['owner'], payload['owner'],
self.sp_name)) self.sp_name))
class ImageDeleteEndpoint(object): class ImageDeleteEndpoint(object):
@ -140,21 +146,22 @@ class ImageDeleteEndpoint(object):
def info(self, ctxt, publisher_id, event_type, payload, metadata): def info(self, ctxt, publisher_id, event_type, payload, metadata):
LOG.info('Deleting image mapping %s -> %s at %s' % ( LOG.info('Deleting image mapping %s -> %s at %s' % (
payload['id'], payload['id'],
payload['owner'], payload['owner'],
self.sp_name)) self.sp_name)
)
delete(ResourceMapping.find("images", payload['id'])) delete(ResourceMapping.find("images", payload['id']))
def get_endpoints_for_sp(sp_name): def get_endpoints_for_sp(sp_name):
return [ return [
VolumeCreateEndpoint(sp_name), VolumeCreateEndpoint(sp_name),
VolumeDeleteEndpoint(sp_name), VolumeDeleteEndpoint(sp_name),
VolumeTransferEndpoint(sp_name), VolumeTransferEndpoint(sp_name),
SnapshotCreateEndpoint(sp_name), SnapshotCreateEndpoint(sp_name),
SnapshotDeleteEndpoint(sp_name), SnapshotDeleteEndpoint(sp_name),
ImageCreateEndpoint(sp_name), ImageCreateEndpoint(sp_name),
ImageDeleteEndpoint(sp_name) ImageDeleteEndpoint(sp_name)
] ]
@ -167,10 +174,11 @@ def get_server_for_sp(sp):
transport = oslo_messaging.get_notification_transport(CONF, cfg.messagebus) transport = oslo_messaging.get_notification_transport(CONF, cfg.messagebus)
targets = [oslo_messaging.Target(topic='notifications')] targets = [oslo_messaging.Target(topic='notifications')]
return oslo_messaging.get_notification_listener( return oslo_messaging.get_notification_listener(
transport, transport,
targets, targets,
get_endpoints_for_sp(cfg.sp_name), get_endpoints_for_sp(cfg.sp_name),
executor='eventlet') executor='eventlet'
)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -41,7 +41,7 @@ def is_valid_uuid(value):
return False return False
class RequestHandler: class RequestHandler(object):
def __init__(self, method, path, headers): def __init__(self, method, path, headers):
self.method = method self.method = method
self.path = path self.path = path

View File

@ -93,8 +93,7 @@ def aggregate(responses, key, params=None, path=None, detailed=True):
# because we need sorting information. Here we # because we need sorting information. Here we
# remove the extra values /volumes/detail provides # remove the extra values /volumes/detail provides
if key == 'volumes' and not detailed: if key == 'volumes' and not detailed:
resource_list[start:end] = \ resource_list[start:end] = _remove_details(resource_list[start:end])
_remove_details(resource_list[start:end])
response = {key: resource_list[start:end]} response = {key: resource_list[start:end]}
@ -122,14 +121,9 @@ def list_api_versions(service_type, url):
else: else:
info.update({'status': 'SUPPORTED'}) info.update({'status': 'SUPPORTED'})
info.update({ info.update({'id': version,
'id': version, 'links': [{'href': '%s/%s/' % (url, version[:-2]),
'links': [ 'rel': 'self'}]})
{'href': '%s/%s/' % (url,
version[:-2]),
'rel': 'self'}
]
})
api_versions.append(info) api_versions.append(info)
return json.dumps({'versions': api_versions}) return json.dumps({'versions': api_versions})

View File

@ -36,5 +36,5 @@ def chunked_reader():
try: try:
while True: while True:
yield stream.next() yield stream.next()
except: except Exception:
return return

View File

@ -27,15 +27,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'volume_id': "1232123212321", 'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba" 'tenant_id': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'volume.node4', 'publisher_id': 'volume.node4',
'event_type': 'volume.create.start', 'event_type': 'volume.create.start',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)
@ -52,15 +52,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'volume_id': "1232123212321", 'volume_id': "1232123212321",
'tenant_id': "abdbabdbabdba" 'tenant_id': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'volume.node4', 'publisher_id': 'volume.node4',
'event_type': 'volume.delete.end', 'event_type': 'volume.delete.end',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)
@ -73,15 +73,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'snapshot_id': "1232123212321", 'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba" 'tenant_id': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'snapshot.node4', 'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.create.start', 'event_type': 'snapshot.create.start',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)
@ -98,15 +98,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'snapshot_id': "1232123212321", 'snapshot_id': "1232123212321",
'tenant_id': "abdbabdbabdba" 'tenant_id': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'snapshot.node4', 'publisher_id': 'snapshot.node4',
'event_type': 'snapshot.delete.end', 'event_type': 'snapshot.delete.end',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)
@ -119,15 +119,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'id': "1232123212321", 'id': "1232123212321",
'owner': "abdbabdbabdba" 'owner': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'image.node4', 'publisher_id': 'image.node4',
'event_type': 'image.create', 'event_type': 'image.create',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)
@ -144,15 +144,15 @@ class TestListener(testcase.TestCase):
dispatcher = notify_dispatcher.NotificationDispatcher( dispatcher = notify_dispatcher.NotificationDispatcher(
endpoints, serializer=None) endpoints, serializer=None)
MESSAGE = { MESSAGE = {
'payload': { 'payload': {
'id': "1232123212321", 'id': "1232123212321",
'owner': "abdbabdbabdba" 'owner': "abdbabdbabdba"
}, },
'priority': 'info', 'priority': 'info',
'publisher_id': 'image.node4', 'publisher_id': 'image.node4',
'event_type': 'image.delete', 'event_type': 'image.delete',
'timestamp': '2014-03-03 18:21:04.369234', 'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45' 'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'
} }
incoming = mock.Mock(ctxt={}, message=MESSAGE) incoming = mock.Mock(ctxt={}, message=MESSAGE)
dispatcher.dispatch(incoming) dispatcher.dispatch(incoming)

View File

@ -25,7 +25,7 @@ from mixmatch.proxy import app
from mixmatch.model import BASE, enginefacade, insert, ResourceMapping from mixmatch.model import BASE, enginefacade, insert, ResourceMapping
class FakeSession(): class FakeSession(object):
"""A replacement for keystoneauth1.session.Session.""" """A replacement for keystoneauth1.session.Session."""
def __init__(self, token, project): def __init__(self, token, project):
self.token = token self.token = token

View File

@ -23,16 +23,18 @@ from mixmatch import services
from mixmatch.tests.unit import samples from mixmatch.tests.unit import samples
class Response: class Response(object):
def __init__(self, text): def __init__(self, text):
self.text = text self.text = text
# Source: http://stackoverflow.com/a/9468284 # Source: http://stackoverflow.com/a/9468284
class Url(object): class Url(object):
"""A url object that can be compared with other url orbjects """Url object that can be compared with other url objects
without regard to the vagaries of encoding, escaping, and ordering
of parameters in query strings.""" This comparison is done without regard to the vagaries of encoding,
escaping, and ordering of parameters in query strings.
"""
def __init__(self, url): def __init__(self, url):
parts = parse.urlparse(url) parts = parse.urlparse(url)

View File

@ -33,10 +33,10 @@ commands = oslo_debug_helper {posargs}
[flake8] [flake8]
# E123, E125 skipped as they are invalid PEP-8. # E123, E125 skipped as they are invalid PEP-8.
# FIXME(knikolla): The tests here below need additional work to pass # TODO(knikolla): These are the tests we should make passing
# They used to not be checked by our previous CI # H306 Imports in alphabetical order
# H306, H301, E126, H238, E241, H405, H403, E121, H201 # H301 One import per line
show-source = True show-source = True
ignore = E123,E125,H306,H301,E126,H238,E241,H405,H403,E121,H201 ignore = E123,E125,H306,H301
builtins = _ builtins = _
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build