pep8: fixed multiple violations
Fixed violations: * E128 continuation line under-indented for visual indent * E251 unexpected spaces around keyword / parameter equals * E265 block comment should start with '# ' * E713 test for membership should be 'not in' * F402 import shadowed by loop variable * H305 imports not grouped correctly * H307 like imports should be grouped together' * H402 one line docstring needs punctuation' * H703 Multiple positional placeholders Also enabled H803 check that didn't have any violations. Change-Id: I957ab273ddc61b02763d6b60b21d11ed4e73d472
This commit is contained in:
@@ -481,7 +481,7 @@ class Resource(object):
|
||||
|
||||
def __getattr__(self, k):
|
||||
if k not in self.__dict__:
|
||||
#NOTE(bcwaldon): disallow lazy-loading if already loaded once
|
||||
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
|
||||
if not self.is_loaded():
|
||||
self.get()
|
||||
return self.__getattr__(k)
|
||||
|
||||
@@ -427,7 +427,7 @@ def from_response(response, method, url):
|
||||
"""
|
||||
|
||||
req_id = response.headers.get("x-openstack-request-id")
|
||||
#NOTE(hdd) true for older versions of nova and cinder
|
||||
# NOTE(hdd) true for older versions of nova and cinder
|
||||
if not req_id:
|
||||
req_id = response.headers.get("x-compute-request-id")
|
||||
kwargs = {
|
||||
|
||||
@@ -79,7 +79,7 @@ class FakeHTTPClient(client.HTTPClient):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.callstack = []
|
||||
self.fixtures = kwargs.pop("fixtures", None) or {}
|
||||
if not args and not "auth_plugin" in kwargs:
|
||||
if not args and "auth_plugin" not in kwargs:
|
||||
args = (None, )
|
||||
super(FakeHTTPClient, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ import six
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class MigrationExtensionBase(object):
|
||||
|
||||
#used to sort migration in logical order
|
||||
# used to sort migration in logical order
|
||||
order = 0
|
||||
|
||||
@property
|
||||
|
||||
@@ -49,7 +49,7 @@ class MigrateExtension(ext_base.MigrationExtensionBase):
|
||||
|
||||
def downgrade(self, version):
|
||||
try:
|
||||
#version for migrate should be valid int - else skip
|
||||
# version for migrate should be valid int - else skip
|
||||
if version in ('base', None):
|
||||
version = self.init_version
|
||||
version = int(version)
|
||||
|
||||
@@ -17,7 +17,7 @@ MIGRATION_NAMESPACE = 'openstack.common.migration'
|
||||
|
||||
|
||||
def check_plugin_enabled(ext):
|
||||
"""Used for EnabledExtensionManager"""
|
||||
"""Used for EnabledExtensionManager."""
|
||||
return ext.obj.enabled
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ class MigrationManager(object):
|
||||
|
||||
def downgrade(self, revision):
|
||||
"""Downgrade database with available backends."""
|
||||
#downgrading should be performed in reversed order
|
||||
# downgrading should be performed in reversed order
|
||||
results = []
|
||||
for plugin in reversed(self._plugins):
|
||||
results.append(plugin.downgrade(revision))
|
||||
@@ -63,7 +63,7 @@ class MigrationManager(object):
|
||||
|
||||
def revision(self, message, autogenerate):
|
||||
"""Generate template or autogenerated revision."""
|
||||
#revision should be done only by last plugin
|
||||
# revision should be done only by last plugin
|
||||
return self._plugins[-1].revision(message, autogenerate)
|
||||
|
||||
def stamp(self, revision):
|
||||
|
||||
@@ -723,9 +723,10 @@ def _patch_mysqldb_with_stacktrace_comments():
|
||||
|
||||
Patches MySQLdb.cursors.BaseCursor._do_query.
|
||||
"""
|
||||
import MySQLdb.cursors
|
||||
import traceback
|
||||
|
||||
import MySQLdb.cursors
|
||||
|
||||
old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query
|
||||
|
||||
def _do_query(self, q):
|
||||
|
||||
@@ -264,6 +264,7 @@ class WalkVersionsMixin(object):
|
||||
if check:
|
||||
check(engine, data)
|
||||
except Exception:
|
||||
LOG.error(_LE("Failed to migrate to version %s on engine %s") %
|
||||
(version, engine))
|
||||
LOG.error(_LE("Failed to migrate to version %(version)s "
|
||||
"on engine %(engine)s") % {'version': version,
|
||||
'engine': engine})
|
||||
raise
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
##
|
||||
## DO NOT MODIFY THIS FILE
|
||||
##
|
||||
## This file is being graduated to the oslotest library. Please make all
|
||||
## changes there, and only backport critical fixes here. - dhellmann
|
||||
##
|
||||
#
|
||||
# DO NOT MODIFY THIS FILE
|
||||
#
|
||||
# This file is being graduated to the oslotest library. Please make all
|
||||
# changes there, and only backport critical fixes here. - dhellmann
|
||||
#
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
##
|
||||
## DO NOT MODIFY THIS FILE
|
||||
##
|
||||
## This file is being graduated to the oslotest library. Please make all
|
||||
## changes there, and only backport critical fixes here. - dhellmann
|
||||
##
|
||||
#
|
||||
# DO NOT MODIFY THIS FILE
|
||||
#
|
||||
# This file is being graduated to the oslotest library. Please make all
|
||||
# changes there, and only backport critical fixes here. - dhellmann
|
||||
#
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
|
||||
|
||||
@@ -373,8 +373,8 @@ def get_available_languages(domain):
|
||||
'zh_Hant_HK': 'zh_HK',
|
||||
'zh_Hant': 'zh_TW',
|
||||
'fil': 'tl_PH'}
|
||||
for (locale, alias) in six.iteritems(aliases):
|
||||
if locale in language_list and alias not in language_list:
|
||||
for (locale_, alias) in six.iteritems(aliases):
|
||||
if locale_ in language_list and alias not in language_list:
|
||||
language_list.append(alias)
|
||||
|
||||
_AVAILABLE_LANGUAGES[domain] = language_list
|
||||
|
||||
@@ -196,6 +196,7 @@ if os.name == 'nt':
|
||||
else:
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
import posix_ipc
|
||||
InterProcessLock = _PosixLock
|
||||
FileLock = _FcntlLock
|
||||
|
||||
@@ -462,9 +462,8 @@ def setup(product_name, version='unknown'):
|
||||
|
||||
|
||||
def set_defaults(logging_context_format_string):
|
||||
cfg.set_defaults(log_opts,
|
||||
logging_context_format_string=
|
||||
logging_context_format_string)
|
||||
cfg.set_defaults(
|
||||
log_opts, logging_context_format_string=logging_context_format_string)
|
||||
|
||||
|
||||
def _find_facility_from_conf():
|
||||
|
||||
@@ -25,7 +25,7 @@ from openstack.common.gettextutils import _
|
||||
from openstack.common.middleware import base
|
||||
|
||||
|
||||
#default request size is 112k
|
||||
# default request size is 112k
|
||||
max_req_body_size = cfg.IntOpt('max_request_body_size',
|
||||
deprecated_name='osapi_max_request_body_size',
|
||||
default=114688,
|
||||
|
||||
@@ -74,7 +74,8 @@ class QuotaException(Exception):
|
||||
# log the issue and the kwargs
|
||||
LOG.exception(_LE('Exception in string format operation'))
|
||||
for name, value in six.iteritems(kwargs):
|
||||
LOG.error(_LE("%s: %s") % (name, value))
|
||||
LOG.error(_LE("%(name)s: %(value)s") % {'name': name,
|
||||
'value': value})
|
||||
# at least get the core message out if something happened
|
||||
message = self.msg_fmt
|
||||
super(QuotaException, self).__init__(message)
|
||||
|
||||
@@ -320,7 +320,7 @@ class ConsumerBase(object):
|
||||
|
||||
@classmethod
|
||||
def normalize_reply(self, result, replies):
|
||||
#TODO(ewindisch): re-evaluate and document this method.
|
||||
# TODO(ewindisch): re-evaluate and document this method.
|
||||
if isinstance(result, types.GeneratorType):
|
||||
return list(result)
|
||||
elif replies:
|
||||
@@ -539,7 +539,7 @@ class ZmqReactor(ZmqBaseReactor):
|
||||
super(ZmqReactor, self).__init__(conf)
|
||||
|
||||
def consume(self, sock):
|
||||
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
|
||||
# TODO(ewindisch): use zero-copy (i.e. references, not copying)
|
||||
data = sock.recv()
|
||||
LOG.debug("CONSUMER RECEIVED DATA: %s", data)
|
||||
|
||||
|
||||
@@ -127,10 +127,10 @@ class MatchMakerBase(object):
|
||||
def add_binding(self, binding, rule, last=True):
|
||||
self.bindings.append((binding, rule, False, last))
|
||||
|
||||
#NOTE(ewindisch): kept the following method in case we implement the
|
||||
# underlying support.
|
||||
#def add_negate_binding(self, binding, rule, last=True):
|
||||
# self.bindings.append((binding, rule, True, last))
|
||||
# NOTE(ewindisch): kept the following method in case we implement the
|
||||
# underlying support.
|
||||
# def add_negate_binding(self, binding, rule, last=True):
|
||||
# self.bindings.append((binding, rule, True, last))
|
||||
|
||||
def queues(self, key):
|
||||
workers = []
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
##
|
||||
## DO NOT MODIFY THIS FILE
|
||||
##
|
||||
## This file is being graduated to the oslotest library. Please make all
|
||||
## changes there, and only backport critical fixes here. - dhellmann
|
||||
##
|
||||
#
|
||||
# DO NOT MODIFY THIS FILE
|
||||
#
|
||||
# This file is being graduated to the oslotest library. Please make all
|
||||
# changes there, and only backport critical fixes here. - dhellmann
|
||||
#
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
Unit constants
|
||||
"""
|
||||
|
||||
#Binary unit constants.
|
||||
# Binary unit constants.
|
||||
Ki = 1024
|
||||
Mi = 1024 ** 2
|
||||
Gi = 1024 ** 3
|
||||
@@ -27,7 +27,7 @@ Ei = 1024 ** 6
|
||||
Zi = 1024 ** 7
|
||||
Yi = 1024 ** 8
|
||||
|
||||
#Decimal unit constants.
|
||||
# Decimal unit constants.
|
||||
k = 1000
|
||||
M = 1000 ** 2
|
||||
G = 1000 ** 3
|
||||
|
||||
@@ -18,6 +18,7 @@ Helpers for comparing version strings.
|
||||
"""
|
||||
|
||||
import functools
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from openstack.common.gettextutils import _
|
||||
|
||||
@@ -12,9 +12,10 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack.common import gettextutils
|
||||
from oslo.config import cfg
|
||||
|
||||
from openstack.common import gettextutils
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
# Note that this is using the Message class directly instead of using
|
||||
|
||||
@@ -93,7 +93,7 @@ class GlobalFunctionsTest(test_base.BaseTestCase):
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"os.environ",
|
||||
{"OS_TENANT_NAME": "fake-project",
|
||||
"OS_USERNAME": "fake-username"}))
|
||||
"OS_USERNAME": "fake-username"}))
|
||||
parser = argparse.ArgumentParser()
|
||||
auth.load_auth_system_opts(parser)
|
||||
options = parser.parse_args(
|
||||
|
||||
@@ -75,14 +75,14 @@ pool_timeout=7
|
||||
|
||||
def test_dbapi_database_deprecated_parameters(self):
|
||||
path = self.create_tempfiles([['tmp', b'[DATABASE]\n'
|
||||
b'sql_connection=fake_connection\n'
|
||||
b'sql_idle_timeout=100\n'
|
||||
b'sql_min_pool_size=99\n'
|
||||
b'sql_max_pool_size=199\n'
|
||||
b'sql_max_retries=22\n'
|
||||
b'reconnect_interval=17\n'
|
||||
b'sqlalchemy_max_overflow=101\n'
|
||||
b'sqlalchemy_pool_timeout=5\n'
|
||||
b'sql_connection=fake_connection\n'
|
||||
b'sql_idle_timeout=100\n'
|
||||
b'sql_min_pool_size=99\n'
|
||||
b'sql_max_pool_size=199\n'
|
||||
b'sql_max_retries=22\n'
|
||||
b'reconnect_interval=17\n'
|
||||
b'sqlalchemy_max_overflow=101\n'
|
||||
b'sqlalchemy_pool_timeout=5\n'
|
||||
]])[0]
|
||||
self.conf(['--config-file', path])
|
||||
self.assertEqual(self.conf.database.connection, 'fake_connection')
|
||||
@@ -96,8 +96,8 @@ pool_timeout=7
|
||||
|
||||
def test_dbapi_database_deprecated_parameters_sql(self):
|
||||
path = self.create_tempfiles([['tmp', b'[sql]\n'
|
||||
b'connection=test_sql_connection\n'
|
||||
b'idle_timeout=99\n'
|
||||
b'connection=test_sql_connection\n'
|
||||
b'idle_timeout=99\n'
|
||||
]])[0]
|
||||
self.conf(['--config-file', path])
|
||||
self.assertEqual(self.conf.database.connection, 'test_sql_connection')
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
##
|
||||
## DO NOT MODIFY THIS FILE
|
||||
##
|
||||
## This file is being graduated to the oslotest library. Please make all
|
||||
## changes there, and only backport critical fixes here. - dhellmann
|
||||
##
|
||||
#
|
||||
# DO NOT MODIFY THIS FILE
|
||||
#
|
||||
# This file is being graduated to the oslotest library. Please make all
|
||||
# changes there, and only backport critical fixes here. - dhellmann
|
||||
#
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
|
||||
|
||||
@@ -13,8 +13,9 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
import webob
|
||||
|
||||
from openstack.common.middleware import notifier
|
||||
|
||||
@@ -118,8 +118,8 @@ class BaseRpcAMQPTestCase(common.BaseRpcTestCase):
|
||||
|
||||
class MyReplyProxy(rpc_amqp.ReplyProxy):
|
||||
def _process_data(myself, message_data):
|
||||
#with open('mylog', 'a') as f:
|
||||
# f.write('my_process_data: ' + str(message_data) + '\n')
|
||||
# with open('mylog', 'a') as f:
|
||||
# f.write('my_process_data: ' + str(message_data) + '\n')
|
||||
self.assertTrue('_msg_id' in message_data)
|
||||
self.ReplyProxy_was_called = True
|
||||
super(MyReplyProxy, myself)._process_data(message_data)
|
||||
|
||||
@@ -281,7 +281,7 @@ class BaseRpcTestCase(test_base.BaseTestCase):
|
||||
self.assertEqual(synced_echo_call.wait_states(),
|
||||
synced_echo_call.expected_wait_states())
|
||||
|
||||
#synced_echo_call.print_times() #for DEBUG
|
||||
# synced_echo_call.print_times() #for DEBUG
|
||||
self.assertEqual((r1, r2, r3), (1, 2, 3))
|
||||
self.assertTrue(synced_echo_call.verify_time_order(callid3, callid1,
|
||||
callid2))
|
||||
@@ -347,7 +347,7 @@ class SyncedEchoCall():
|
||||
self.list[idx].event.send()
|
||||
retval = self.list[idx].gthread.wait()
|
||||
self.list[idx].expected_wait_state = False
|
||||
#self.print_wait_states() #for DEBUG
|
||||
# self.print_wait_states() #for DEBUG
|
||||
return retval
|
||||
|
||||
def wait(self, idx):
|
||||
@@ -361,20 +361,20 @@ class SyncedEchoCall():
|
||||
self.list[idx2].time < self.list[idx3].time
|
||||
|
||||
# for DEBUG
|
||||
#def print_times(self):
|
||||
# # change /dev/null to name to get output to a log file
|
||||
# with open('mylog', 'a') as f:
|
||||
# f.write('SyncedEchoCall times: ' + '\n')
|
||||
# f.write(' ' + str(self.list[0].time) + '\n')
|
||||
# f.write(' ' + str(self.list[1].time) + '\n')
|
||||
# f.write(' ' + str(self.list[2].time) + '\n')
|
||||
# def print_times(self):
|
||||
# # change /dev/null to name to get output to a log file
|
||||
# with open('mylog', 'a') as f:
|
||||
# f.write('SyncedEchoCall times: ' + '\n')
|
||||
# f.write(' ' + str(self.list[0].time) + '\n')
|
||||
# f.write(' ' + str(self.list[1].time) + '\n')
|
||||
# f.write(' ' + str(self.list[2].time) + '\n')
|
||||
|
||||
# for DEBUG
|
||||
#def print_wait_states(self):
|
||||
# # change /dev/null to name to get output to a log file
|
||||
# with open('mylog', 'a') as f:
|
||||
# f.write('SyncedEchoCall times: ' +
|
||||
# str(self.wait_states()) + '\n')
|
||||
# def print_wait_states(self):
|
||||
# # change /dev/null to name to get output to a log file
|
||||
# with open('mylog', 'a') as f:
|
||||
# f.write('SyncedEchoCall times: ' +
|
||||
# str(self.wait_states()) + '\n')
|
||||
|
||||
|
||||
class TestReceiver(object):
|
||||
|
||||
@@ -126,7 +126,7 @@ class RpcCommonTestCase(test_base.BaseTestCase):
|
||||
after_exc = rpc_common.deserialize_remote_exception(self.FLAGS,
|
||||
serialized)
|
||||
self.assertTrue(isinstance(after_exc, NotImplementedError))
|
||||
#assure the traceback was added
|
||||
# assure the traceback was added
|
||||
self.assertTrue('raise NotImplementedError' in
|
||||
six.text_type(after_exc))
|
||||
|
||||
@@ -158,7 +158,7 @@ class RpcCommonTestCase(test_base.BaseTestCase):
|
||||
serialized)
|
||||
self.assertTrue(isinstance(after_exc, FakeUserDefinedException))
|
||||
self.assertTrue('foobar' in six.text_type(after_exc))
|
||||
#assure the traceback was added
|
||||
# assure the traceback was added
|
||||
self.assertTrue('raise FakeUserDefinedException' in
|
||||
six.text_type(after_exc))
|
||||
|
||||
@@ -204,7 +204,7 @@ class RpcCommonTestCase(test_base.BaseTestCase):
|
||||
self.assertTrue(isinstance(after_exc, rpc_common.RemoteError))
|
||||
self.assertTrue(six.text_type(after_exc).startswith(
|
||||
"Remote error: FakeIDontExistException"))
|
||||
#assure the traceback was added
|
||||
# assure the traceback was added
|
||||
self.assertTrue('raise FakeIDontExistException' in
|
||||
six.text_type(after_exc))
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ Unit Tests for remote procedure calls using fake_impl
|
||||
|
||||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from openstack.common.rpc import impl_fake
|
||||
|
||||
@@ -43,6 +43,7 @@ try:
|
||||
import kombu
|
||||
import kombu.connection
|
||||
import kombu.entity
|
||||
|
||||
from openstack.common.rpc import impl_kombu
|
||||
except ImportError:
|
||||
kombu = None
|
||||
@@ -585,7 +586,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
|
||||
self.fail("should have thrown Exception")
|
||||
except NotImplementedError as exc:
|
||||
self.assertTrue(value in six.text_type(exc))
|
||||
#Traceback should be included in exception message
|
||||
# Traceback should be included in exception message
|
||||
self.assertTrue('raise NotImplementedError(value)' in
|
||||
six.text_type(exc))
|
||||
|
||||
@@ -614,7 +615,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
|
||||
self.fail("should have thrown Exception")
|
||||
except common.ApiError as exc:
|
||||
self.assertTrue(value in six.text_type(exc))
|
||||
#Traceback should be included in exception message
|
||||
# Traceback should be included in exception message
|
||||
self.assertTrue('ApiError' in six.text_type(exc))
|
||||
|
||||
def test_create_worker(self):
|
||||
|
||||
@@ -29,6 +29,7 @@ from openstack.common.fixture import config
|
||||
|
||||
try:
|
||||
import kombu
|
||||
|
||||
from openstack.common.rpc import impl_kombu
|
||||
except ImportError:
|
||||
kombu = None
|
||||
@@ -62,16 +63,16 @@ class RpcKombuSslTestCase(test_base.BaseTestCase):
|
||||
rpc = impl_kombu
|
||||
conn = rpc.create_connection(self.FLAGS, True)
|
||||
c = conn.connection
|
||||
#This might be kombu version dependent...
|
||||
#Since we are now peaking into the internals of kombu...
|
||||
# This might be kombu version dependent...
|
||||
# Since we are now peaking into the internals of kombu...
|
||||
self.assertTrue(isinstance(c.connection.ssl, dict))
|
||||
self.assertEqual(ssl.PROTOCOL_SSLv3,
|
||||
c.connection.ssl.get("ssl_version"))
|
||||
self.assertEqual(SSL_CERT, c.connection.ssl.get("certfile"))
|
||||
self.assertEqual(SSL_CA_CERT, c.connection.ssl.get("ca_certs"))
|
||||
self.assertEqual(SSL_KEYFILE, c.connection.ssl.get("keyfile"))
|
||||
#That hash then goes into amqplib which then goes
|
||||
#Into python ssl creation...
|
||||
# That hash then goes into amqplib which then goes
|
||||
# Into python ssl creation...
|
||||
|
||||
|
||||
class RpcKombuSslBadVersionTestCase(test_base.BaseTestCase):
|
||||
|
||||
@@ -30,7 +30,7 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
class MatchMakerRedisLookupTestCase(test_base.BaseTestCase,
|
||||
common._MatchMakerTestCase):
|
||||
"""Test lookups against the Redis matchmaker"""
|
||||
"""Test lookups against the Redis matchmaker."""
|
||||
def setUp(self):
|
||||
super(MatchMakerRedisLookupTestCase, self).setUp()
|
||||
self.config = self.useFixture(config.Config()).config
|
||||
|
||||
@@ -410,7 +410,7 @@ class RpcQpidTestCase(tests.utils.BaseTestCase):
|
||||
server_params['password'])
|
||||
self.assertEqual(myself.brokers,
|
||||
[server_params['hostname'] + ':' +
|
||||
str(server_params['port'])])
|
||||
str(server_params['port'])])
|
||||
|
||||
MyConnection.pool = rpc_amqp.Pool(self.FLAGS, MyConnection)
|
||||
self.stubs.Set(impl_qpid, 'Connection', MyConnection)
|
||||
@@ -447,7 +447,7 @@ class RpcQpidTestCase(tests.utils.BaseTestCase):
|
||||
r'^.*/.* ; {"node": {"x-declare": {"auto-delete":'
|
||||
' true, "durable": true, "type": "direct"}, "type": '
|
||||
'"topic"}, "create": "always", "link": {"x-declare": '
|
||||
#'{"auto-delete": true, "exclusive": true, "durable": '
|
||||
# '{"auto-delete": true, "exclusive": true, "durable": '
|
||||
'{"exclusive": true, "auto-delete": false, "durable": '
|
||||
'false}, "durable": true, "name": ".*"}}')
|
||||
self.mock_session.receiver(rcv_addr).AndReturn(self.mock_receiver)
|
||||
@@ -801,7 +801,7 @@ class RpcQpidTestCase(tests.utils.BaseTestCase):
|
||||
|
||||
|
||||
#
|
||||
#from nova.tests.rpc import common
|
||||
# from nova.tests.rpc import common
|
||||
#
|
||||
# Qpid does not have a handy in-memory transport like kombu, so it's not
|
||||
# terribly straight forward to take advantage of the common unit tests.
|
||||
|
||||
@@ -31,7 +31,7 @@ class TestFilter(filters.BaseHostFilter):
|
||||
|
||||
|
||||
class TestBogusFilter(object):
|
||||
"""Class that doesn't inherit from BaseHostFilter"""
|
||||
"""Class that doesn't inherit from BaseHostFilter."""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ class FuncutilsTestCase(test_base.BaseTestCase):
|
||||
kwargs = {'instance': {'uuid': 1}, 'red': 3, 'blue': 4}
|
||||
callargs = funcutils.getcallargs(self._test_func, *args, **kwargs)
|
||||
|
||||
#implicit self counts as an arg
|
||||
# implicit self counts as an arg
|
||||
self.assertEqual(4, len(callargs))
|
||||
self.assertTrue('instance' in callargs)
|
||||
self.assertEqual({'uuid': 1}, callargs['instance'])
|
||||
@@ -46,7 +46,7 @@ class FuncutilsTestCase(test_base.BaseTestCase):
|
||||
kwargs = {}
|
||||
callargs = funcutils.getcallargs(self._test_func, *args, **kwargs)
|
||||
|
||||
#implicit self counts as an arg
|
||||
# implicit self counts as an arg
|
||||
self.assertEqual(4, len(callargs))
|
||||
self.assertTrue('instance' in callargs)
|
||||
self.assertEqual({'uuid': 1}, callargs['instance'])
|
||||
@@ -60,7 +60,7 @@ class FuncutilsTestCase(test_base.BaseTestCase):
|
||||
kwargs = {'blue': 4}
|
||||
callargs = funcutils.getcallargs(self._test_func, *args, **kwargs)
|
||||
|
||||
#implicit self counts as an arg
|
||||
# implicit self counts as an arg
|
||||
self.assertEqual(4, len(callargs))
|
||||
self.assertTrue('instance' in callargs)
|
||||
self.assertEqual({'uuid': 1}, callargs['instance'])
|
||||
@@ -74,7 +74,7 @@ class FuncutilsTestCase(test_base.BaseTestCase):
|
||||
kwargs = {'instance': {'uuid': 1}, 'red': 3}
|
||||
callargs = funcutils.getcallargs(self._test_func, *args, **kwargs)
|
||||
|
||||
#implicit self counts as an arg
|
||||
# implicit self counts as an arg
|
||||
self.assertEqual(4, len(callargs))
|
||||
self.assertTrue('instance' in callargs)
|
||||
self.assertEqual({'uuid': 1}, callargs['instance'])
|
||||
@@ -88,7 +88,7 @@ class FuncutilsTestCase(test_base.BaseTestCase):
|
||||
kwargs = {}
|
||||
callargs = funcutils.getcallargs(self._test_func, *args, **kwargs)
|
||||
|
||||
#implicit self counts as an arg
|
||||
# implicit self counts as an arg
|
||||
self.assertEqual(4, len(callargs))
|
||||
self.assertTrue('instance' in callargs)
|
||||
self.assertEqual({'uuid': 1}, callargs['instance'])
|
||||
|
||||
@@ -195,9 +195,9 @@ class ToPrimitiveTestCase(test_base.BaseTestCase):
|
||||
self.assertEqual(len(ret), 3)
|
||||
self.assertTrue(ret[0].startswith(u"<module 'datetime' from "))
|
||||
if six.PY3:
|
||||
self.assertTrue(ret[1].startswith('<function '
|
||||
'ToPrimitiveTestCase.test_nasties.<locals>.foo '
|
||||
'at 0x'))
|
||||
self.assertTrue(ret[1].startswith(
|
||||
'<function ToPrimitiveTestCase.test_nasties.<locals>.foo at 0x'
|
||||
))
|
||||
else:
|
||||
self.assertTrue(ret[1].startswith('<function foo at 0x'))
|
||||
self.assertEqual(ret[2], '<built-in function dir>')
|
||||
|
||||
@@ -79,11 +79,11 @@ class LockTestCase(test_base.BaseTestCase):
|
||||
def test_synchronized_wrapped_function_metadata(self):
|
||||
@lockutils.synchronized('whatever', 'test-')
|
||||
def foo():
|
||||
"""Bar"""
|
||||
"""Bar."""
|
||||
pass
|
||||
|
||||
self.assertEqual(foo.__doc__, 'Bar', "Wrapped function's docstring "
|
||||
"got lost")
|
||||
self.assertEqual(foo.__doc__, 'Bar.', "Wrapped function's docstring "
|
||||
"got lost")
|
||||
self.assertEqual(foo.__name__, 'foo', "Wrapped function's name "
|
||||
"got mangled")
|
||||
|
||||
|
||||
@@ -192,7 +192,7 @@ class SysLogHandlersTestCase(test_base.BaseTestCase):
|
||||
self.logger.binary_name = 'Foo_application'
|
||||
|
||||
def test_rfc_format(self):
|
||||
"""Ensure syslog msg contains APP-NAME for RFC wrapped handler"""
|
||||
"""Ensure syslog msg contains APP-NAME for RFC wrapped handler."""
|
||||
logrecord = logging.LogRecord('name', 'WARN', '/tmp', 1,
|
||||
'Message', None, None)
|
||||
expected = logging.LogRecord('name', 'WARN', '/tmp', 1,
|
||||
@@ -201,7 +201,7 @@ class SysLogHandlersTestCase(test_base.BaseTestCase):
|
||||
expected.getMessage())
|
||||
|
||||
def test_standard_format(self):
|
||||
"""Ensure syslog msg isn't modified for standard handler"""
|
||||
"""Ensure syslog msg isn't modified for standard handler."""
|
||||
logrecord = logging.LogRecord('name', 'WARN', '/tmp', 1,
|
||||
'Message', None, None)
|
||||
expected = logrecord
|
||||
@@ -210,7 +210,7 @@ class SysLogHandlersTestCase(test_base.BaseTestCase):
|
||||
|
||||
|
||||
class PublishErrorsHandlerTestCase(test_base.BaseTestCase):
|
||||
"""Tests for log.PublishErrorsHandler"""
|
||||
"""Tests for log.PublishErrorsHandler."""
|
||||
def setUp(self):
|
||||
super(PublishErrorsHandlerTestCase, self).setUp()
|
||||
self.stubs = self.useFixture(moxstubout.MoxStubout()).stubs
|
||||
|
||||
@@ -20,10 +20,10 @@ Unit Tests for periodic_task decorator and PeriodicTasks class.
|
||||
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
from testtools import matchers
|
||||
|
||||
from openstack.common.fixture import config
|
||||
from openstack.common import periodic_task
|
||||
from testtools import matchers
|
||||
|
||||
|
||||
class AnException(Exception):
|
||||
|
||||
@@ -12,9 +12,10 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base as test_base
|
||||
from xml.dom import minidom
|
||||
|
||||
from oslotest import base as test_base
|
||||
|
||||
from openstack.common import xmlutils
|
||||
|
||||
|
||||
|
||||
@@ -42,11 +42,11 @@
|
||||
"""Display a subunit stream through a colorized unittest test runner."""
|
||||
|
||||
import heapq
|
||||
import subunit
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import six
|
||||
import subunit
|
||||
import testtools
|
||||
|
||||
|
||||
|
||||
2
tox.ini
2
tox.ini
@@ -32,7 +32,7 @@ commands = python -m openstack.common.lockutils python setup.py test --slowest -
|
||||
|
||||
[flake8]
|
||||
show-source = True
|
||||
ignore = E123,E126,E128,E251,E265,E713,F402,H305,H307,H402,H405,H703,H803,H904
|
||||
ignore = E123,E126,H405,H904
|
||||
exclude = .venv,.tox,dist,doc,*.egg,.update-venv
|
||||
|
||||
[testenv:pep8]
|
||||
|
||||
Reference in New Issue
Block a user