Merge "Fix hacking warnings"

This commit is contained in:
Zuul 2020-04-18 23:00:18 +00:00 committed by Gerrit Code Review
commit d4c7e27a7e
39 changed files with 128 additions and 112 deletions

View File

@ -18,6 +18,9 @@
An OpenStack REST API to Heat. An OpenStack REST API to Heat.
""" """
import os
import sys
from oslo_log import log as logging from oslo_log import log as logging
@ -27,8 +30,6 @@ LOG.warning('DEPRECATED: `heat-api` script is deprecated. Please use the '
'system level heat binaries installed to start ' 'system level heat binaries installed to start '
'any of the heat services.') 'any of the heat services.')
import os
import sys
# If ../heat/__init__.py exists, add ../ to Python search path, so that # If ../heat/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python... # it will override what happens to be installed in /usr/(local/)lib/python...
@ -39,6 +40,6 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')): if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR) sys.path.insert(0, POSSIBLE_TOPDIR)
from heat.cmd import api from heat.cmd import api # noqa: E402
api.main() api.main()

View File

@ -20,6 +20,9 @@ translates it into a native representation. It then calls the heat-engine via
AMQP RPC to implement them. AMQP RPC to implement them.
""" """
import os
import sys
from oslo_log import log as logging from oslo_log import log as logging
@ -29,9 +32,6 @@ LOG.warning('DEPRECATED: `heat-api-cfn` script is deprecated. Please use '
'the system level heat binaries installed to start ' 'the system level heat binaries installed to start '
'any of the heat services.') 'any of the heat services.')
import os
import sys
# If ../heat/__init__.py exists, add ../ to Python search path, so that # If ../heat/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python... # it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
@ -41,6 +41,6 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')): if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR) sys.path.insert(0, POSSIBLE_TOPDIR)
from heat.cmd import api_cfn from heat.cmd import api_cfn # noqa: E402
api_cfn.main() api_cfn.main()

View File

@ -20,6 +20,10 @@ Normal communications is done via the heat API which then calls into this
engine. engine.
""" """
import os
import sys
from oslo_log import log as logging from oslo_log import log as logging
@ -29,8 +33,6 @@ LOG.warning('DEPRECATED: `heat-engine` script is deprecated. '
'Please use the system level heat binaries installed to ' 'Please use the system level heat binaries installed to '
'start any of the heat services.') 'start any of the heat services.')
import os
import sys
# If ../heat/__init__.py exists, add ../ to Python search path, so that # If ../heat/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python... # it will override what happens to be installed in /usr/(local/)lib/python...
@ -41,6 +43,6 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')): if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR) sys.path.insert(0, POSSIBLE_TOPDIR)
from heat.cmd import engine from heat.cmd import engine # noqa: E402
engine.main() engine.main()

View File

@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import sys
from oslo_log import log as logging from oslo_log import log as logging
@ -21,8 +25,6 @@ LOG = logging.getLogger(__name__)
LOG.warning('DEPRECATED: `heat-manage` script is deprecated. Please use ' LOG.warning('DEPRECATED: `heat-manage` script is deprecated. Please use '
'the system level heat-manage binary.') 'the system level heat-manage binary.')
import os
import sys
# If ../heat/__init__.py exists, add ../ to Python search path, so that # If ../heat/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python... # it will override what happens to be installed in /usr/(local/)lib/python...
@ -32,6 +34,6 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')): if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'heat', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR) sys.path.insert(0, POSSIBLE_TOPDIR)
from heat.cmd import manage from heat.cmd import manage # noqa: E402
manage.main() manage.main()

View File

@ -15,6 +15,9 @@
An OpenStack Heat server that can run all services. An OpenStack Heat server that can run all services.
""" """
# flake8: noqa: E402
import eventlet import eventlet
eventlet.monkey_patch(os=False) eventlet.monkey_patch(os=False)
@ -65,15 +68,15 @@ def _start_service_threads(services):
def launch_all(setup_logging=True): def launch_all(setup_logging=True):
if setup_logging: if setup_logging:
logging.register_options(cfg.CONF) logging.register_options(cfg.CONF)
cfg.CONF(project='heat', prog='heat-all', cfg.CONF(project='heat', prog='heat-all',
version=version.version_info.version_string()) version=version.version_info.version_string())
if setup_logging: if setup_logging:
logging.setup(cfg.CONF, 'heat-all') logging.setup(cfg.CONF, 'heat-all')
config.set_config_defaults() config.set_config_defaults()
messaging.setup() messaging.setup()
return _start_service_threads(set(cfg.CONF.heat_all.enabled_services)) return _start_service_threads(set(cfg.CONF.heat_all.enabled_services))
def main(): def main():

View File

@ -16,6 +16,8 @@
An OpenStack ReST API to Heat. An OpenStack ReST API to Heat.
""" """
# flake8: noqa: E402
import eventlet import eventlet
eventlet.monkey_patch(os=False) eventlet.monkey_patch(os=False)

View File

@ -18,6 +18,8 @@ translates it into a native representation. It then calls the heat-engine via
AMQP RPC to implement them. AMQP RPC to implement them.
""" """
# flake8: noqa: E402
import eventlet import eventlet
eventlet.monkey_patch(os=False) eventlet.monkey_patch(os=False)

View File

@ -18,6 +18,8 @@ Normal communications is done via the heat API which then calls into this
engine. engine.
""" """
# flake8: noqa: E402
import eventlet import eventlet
eventlet.monkey_patch() eventlet.monkey_patch()

View File

@ -224,6 +224,7 @@ def add_command_parsers(subparsers):
ServiceManageCommand.add_service_parsers(subparsers) ServiceManageCommand.add_service_parsers(subparsers)
command_opt = cfg.SubCommandOpt('command', command_opt = cfg.SubCommandOpt('command',
title='Commands', title='Commands',
help=_('Show available commands.'), help=_('Show available commands.'),

View File

@ -50,5 +50,6 @@ def main():
return upgradecheck.main( return upgradecheck.main(
cfg.CONF, project='heat', upgrade_command=Checks()) cfg.CONF, project='heat', upgrade_command=Checks())
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@ -105,11 +105,11 @@ class CheckResource(object):
def _retrigger_new_traversal(self, cnxt, current_traversal, is_update, def _retrigger_new_traversal(self, cnxt, current_traversal, is_update,
stack_id, rsrc_id): stack_id, rsrc_id):
latest_stack = parser.Stack.load(cnxt, stack_id=stack_id, latest_stack = parser.Stack.load(cnxt, stack_id=stack_id,
force_reload=True) force_reload=True)
if current_traversal != latest_stack.current_traversal: if current_traversal != latest_stack.current_traversal:
self.retrigger_check_resource(cnxt, is_update, rsrc_id, self.retrigger_check_resource(cnxt, is_update, rsrc_id,
latest_stack) latest_stack)
def _handle_stack_timeout(self, cnxt, stack): def _handle_stack_timeout(self, cnxt, stack):
failure_reason = u'Timed out' failure_reason = u'Timed out'
@ -183,7 +183,7 @@ class CheckResource(object):
except scheduler.Timeout: except scheduler.Timeout:
self._handle_resource_failure(cnxt, is_update, rsrc.id, self._handle_resource_failure(cnxt, is_update, rsrc.id,
stack, u'Timed out') stack, u'Timed out')
except CancelOperation as ex: except CancelOperation:
# Stack is already marked FAILED, so we just need to retrigger # Stack is already marked FAILED, so we just need to retrigger
# in case a new traversal has started and is waiting on us. # in case a new traversal has started and is waiting on us.
self._retrigger_new_traversal(cnxt, current_traversal, is_update, self._retrigger_new_traversal(cnxt, current_traversal, is_update,

View File

@ -321,7 +321,7 @@ class Property(object):
value = self.has_default() and self.default() or [] value = self.has_default() and self.default() or []
if self.schema.allow_conversion and isinstance(value, if self.schema.allow_conversion and isinstance(value,
six.string_types): six.string_types):
value = param_utils.delim_string_to_list(value) value = param_utils.delim_string_to_list(value)
if (not isinstance(value, collections.Sequence) or if (not isinstance(value, collections.Sequence) or
isinstance(value, six.string_types)): isinstance(value, six.string_types)):
raise TypeError(_('"%s" is not a list') % repr(value)) raise TypeError(_('"%s" is not a list') % repr(value))

View File

@ -918,7 +918,7 @@ class Resource(status.ResourceStatus):
try: try:
set_in_progress() set_in_progress()
yield yield
except exception.UpdateInProgress as ex: except exception.UpdateInProgress:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.info('Update in progress for %s', self.name) LOG.info('Update in progress for %s', self.name)
except expected_exceptions as ex: except expected_exceptions as ex:
@ -1587,11 +1587,11 @@ class Resource(status.ResourceStatus):
@classmethod @classmethod
def check_is_substituted(cls, new_res_type): def check_is_substituted(cls, new_res_type):
support_status = getattr(cls, 'support_status', None) support_status = getattr(cls, 'support_status', None)
if support_status: if support_status:
is_substituted = support_status.is_substituted(new_res_type) is_substituted = support_status.is_substituted(new_res_type)
return is_substituted return is_substituted
return False return False
def _persist_update_no_change(self, new_template_id): def _persist_update_no_change(self, new_template_id):
"""Persist an update where the resource is unchanged.""" """Persist an update where the resource is unchanged."""

View File

@ -17,8 +17,6 @@ from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
import six import six
cfg.CONF.import_opt('max_server_name_length', 'heat.common.config')
from heat.common import exception from heat.common import exception
from heat.common.i18n import _ from heat.common.i18n import _
from heat.engine import attributes from heat.engine import attributes
@ -28,6 +26,9 @@ from heat.engine import properties
from heat.engine import resource from heat.engine import resource
from heat.engine.resources import scheduler_hints as sh from heat.engine.resources import scheduler_hints as sh
cfg.CONF.import_opt('max_server_name_length', 'heat.common.config')
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)

View File

@ -217,8 +217,8 @@ class GlanceWebImage(resource.Resource):
def get_live_resource_data(self): def get_live_resource_data(self):
image_data = super(GlanceWebImage, self).get_live_resource_data() image_data = super(GlanceWebImage, self).get_live_resource_data()
if image_data.get('status') in ('deleted', 'killed'): if image_data.get('status') in ('deleted', 'killed'):
raise exception.EntityNotFound(entity='Resource', raise exception.EntityNotFound(entity='Resource',
name=self.name) name=self.name)
return image_data return image_data
def parse_live_resource_data(self, resource_properties, resource_data): def parse_live_resource_data(self, resource_properties, resource_data):
@ -479,8 +479,8 @@ class GlanceImage(resource.Resource):
def get_live_resource_data(self): def get_live_resource_data(self):
image_data = super(GlanceImage, self).get_live_resource_data() image_data = super(GlanceImage, self).get_live_resource_data()
if image_data.get('status') in ('deleted', 'killed'): if image_data.get('status') in ('deleted', 'killed'):
raise exception.EntityNotFound(entity='Resource', raise exception.EntityNotFound(entity='Resource',
name=self.name) name=self.name)
return image_data return image_data
def parse_live_resource_data(self, resource_properties, resource_data): def parse_live_resource_data(self, resource_properties, resource_data):

View File

@ -214,7 +214,7 @@ class AutoScalingResourceGroup(aws_asg.AutoScalingGroup):
def _attribute_output_name(self, *attr_path): def _attribute_output_name(self, *attr_path):
return ', '.join(six.text_type(a) for a in attr_path) return ', '.join(six.text_type(a) for a in attr_path)
def get_attribute(self, key, *path): def get_attribute(self, key, *path): # noqa: C901
if key == self.CURRENT_SIZE: if key == self.CURRENT_SIZE:
return grouputils.get_size(self) return grouputils.get_size(self)

View File

@ -442,11 +442,11 @@ class Workflow(signal_responder.SignalResponder,
error=_('Signal data error'), error=_('Signal data error'),
message=message) message=message)
if params is not None and not isinstance(params, dict): if params is not None and not isinstance(params, dict):
message = (_('Params must be a map, find a ' message = (_('Params must be a map, find a '
'%s') % type(params)) '%s') % type(params))
raise exception.StackValidationFailed( raise exception.StackValidationFailed(
error=_('Signal data error'), error=_('Signal data error'),
message=message) message=message)
def validate(self): def validate(self):
super(Workflow, self).validate() super(Workflow, self).validate()

View File

@ -197,8 +197,8 @@ class Pool(neutron.NeutronResource):
if (self.properties[self.LISTENER] is None and if (self.properties[self.LISTENER] is None and
self.properties[self.LOADBALANCER] is None): self.properties[self.LOADBALANCER] is None):
raise exception.PropertyUnspecifiedError(self.LISTENER, raise exception.PropertyUnspecifiedError(self.LISTENER,
self.LOADBALANCER) self.LOADBALANCER)
if self.properties[self.SESSION_PERSISTENCE] is not None: if self.properties[self.SESSION_PERSISTENCE] is not None:
session_p = self.properties[self.SESSION_PERSISTENCE] session_p = self.properties[self.SESSION_PERSISTENCE]

View File

@ -181,8 +181,8 @@ class FlowClassifier(neutron.NeutronResource):
if self.resource_id is None: if self.resource_id is None:
return return
with self.client_plugin().ignore_not_found: with self.client_plugin().ignore_not_found:
self.client_plugin().delete_ext_resource('flow_classifier', self.client_plugin().delete_ext_resource('flow_classifier',
self.resource_id) self.resource_id)
def resource_mapping(): def resource_mapping():

View File

@ -118,8 +118,8 @@ class PortPair(neutron.NeutronResource):
if self.resource_id is None: if self.resource_id is None:
return return
with self.client_plugin().ignore_not_found: with self.client_plugin().ignore_not_found:
self.client_plugin().delete_ext_resource('port_pair', self.client_plugin().delete_ext_resource('port_pair',
self.resource_id) self.resource_id)
def resource_mapping(): def resource_mapping():

View File

@ -135,8 +135,8 @@ class TapFlow(neutron.NeutronResource):
if self.resource_id is None: if self.resource_id is None:
return return
with self.client_plugin().ignore_not_found: with self.client_plugin().ignore_not_found:
self.client_plugin().delete_ext_resource('tap_flow', self.client_plugin().delete_ext_resource('tap_flow',
self.resource_id) self.resource_id)
def check_create_complete(self, data): def check_create_complete(self, data):
return self.client_plugin().check_ext_resource_status( return self.client_plugin().check_ext_resource_status(

View File

@ -99,8 +99,8 @@ class TapService(neutron.NeutronResource):
if self.resource_id is None: if self.resource_id is None:
return return
with self.client_plugin().ignore_not_found: with self.client_plugin().ignore_not_found:
self.client_plugin().delete_ext_resource('tap_service', self.client_plugin().delete_ext_resource('tap_service',
self.resource_id) self.resource_id)
def check_create_complete(self, data): def check_create_complete(self, data):
return self.client_plugin().check_ext_resource_status( return self.client_plugin().check_ext_resource_status(

View File

@ -188,8 +188,8 @@ class Pool(octavia_base.OctaviaBase):
super(Pool, self).validate() super(Pool, self).validate()
if (self.properties[self.LISTENER] is None and if (self.properties[self.LISTENER] is None and
self.properties[self.LOADBALANCER] is None): self.properties[self.LOADBALANCER] is None):
raise exception.PropertyUnspecifiedError(self.LISTENER, raise exception.PropertyUnspecifiedError(self.LISTENER,
self.LOADBALANCER) self.LOADBALANCER)
if self.properties[self.SESSION_PERSISTENCE] is not None: if self.properties[self.SESSION_PERSISTENCE] is not None:
session_p = self.properties[self.SESSION_PERSISTENCE] session_p = self.properties[self.SESSION_PERSISTENCE]

View File

@ -296,7 +296,7 @@ class TaskRunner(object):
return self.__nonzero__() return self.__nonzero__()
def wrappertask(task): def wrappertask(task): # noqa: C901
"""Decorator for a task that needs to drive a subtask. """Decorator for a task that needs to drive a subtask.
This is essentially a replacement for the Python 3-only "yield from" This is essentially a replacement for the Python 3-only "yield from"

View File

@ -716,9 +716,9 @@ class Stack(collections.Mapping):
self.user_creds_id = new_creds.id self.user_creds_id = new_creds.id
if self.convergence: if self.convergence:
# create a traversal ID # create a traversal ID
self.current_traversal = uuidutils.generate_uuid() self.current_traversal = uuidutils.generate_uuid()
s['current_traversal'] = self.current_traversal s['current_traversal'] = self.current_traversal
new_s = stack_object.Stack.create(self.context, s) new_s = stack_object.Stack.create(self.context, s)
self.id = new_s.id self.id = new_s.id
@ -1872,7 +1872,7 @@ class Stack(collections.Mapping):
else: else:
self.clients.client('keystone').delete_trust( self.clients.client('keystone').delete_trust(
trust_id) trust_id)
except Exception as ex: except Exception:
# We want the admin to be able to delete the stack # We want the admin to be able to delete the stack
# Do not FAIL a delete when we cannot delete a trust. # Do not FAIL a delete when we cannot delete a trust.
# We already carry through and delete the credentials # We already carry through and delete the credentials

View File

@ -54,11 +54,11 @@ class SupportStatus(object):
self.previous_status = None self.previous_status = None
def to_dict(self): def to_dict(self):
return {'status': self.status, return {'status': self.status,
'message': self.message, 'message': self.message,
'version': self.version, 'version': self.version,
'previous_status': self.previous_status.to_dict() 'previous_status': self.previous_status.to_dict()
if self.previous_status is not None else None} if self.previous_status is not None else None}
def is_substituted(self, substitute_class): def is_substituted(self, substitute_class):
if self.substitute_class is None: if self.substitute_class is None:

View File

@ -18,6 +18,7 @@ import oslo_i18n as i18n
def fake_translate_msgid(msgid, domain, desired_locale=None): def fake_translate_msgid(msgid, domain, desired_locale=None):
return msgid return msgid
i18n.enable_lazy() i18n.enable_lazy()
# To ensure messages don't really get translated while running tests. # To ensure messages don't really get translated while running tests.

View File

@ -112,4 +112,5 @@ class MessageProcessor(object):
"""Delete all the messages from the queue.""" """Delete all the messages from the queue."""
self.queue.clear() self.queue.clear()
__all__ = ['MessageProcessor', 'asynchronous'] __all__ = ['MessageProcessor', 'asynchronous']

View File

@ -41,4 +41,5 @@ class Processes(object):
self.engine.clear() self.engine.clear()
self.worker.clear() self.worker.clear()
Processes() Processes()

View File

@ -48,4 +48,5 @@ class RealityStore(object):
prop_name) prop_name)
return res_data.value return res_data.value
reality = RealityStore() reality = RealityStore()

View File

@ -26,7 +26,7 @@ def _has_constraint(cset, ctype, cname):
and c.name == cname): and c.name == cname):
return True return True
else: else:
return False return False
class DBMigrationUtilsTest(common.HeatTestCase): class DBMigrationUtilsTest(common.HeatTestCase):

View File

@ -57,16 +57,16 @@ class dependenciesTest(common.HeatTestCase):
def test_single_node(self): def test_single_node(self):
d = dependencies.Dependencies([('only', None)]) d = dependencies.Dependencies([('only', None)])
l = list(iter(d)) li = list(iter(d))
self.assertEqual(1, len(l)) self.assertEqual(1, len(li))
self.assertEqual('only', l[0]) self.assertEqual('only', li[0])
def test_disjoint(self): def test_disjoint(self):
d = dependencies.Dependencies([('1', None), ('2', None)]) d = dependencies.Dependencies([('1', None), ('2', None)])
l = list(iter(d)) li = list(iter(d))
self.assertEqual(2, len(l)) self.assertEqual(2, len(li))
self.assertIn('1', l) self.assertIn('1', li)
self.assertIn('2', l) self.assertIn('2', li)
def test_single_fwd(self): def test_single_fwd(self):
self._dep_test_fwd(('second', 'first')) self._dep_test_fwd(('second', 'first'))
@ -170,9 +170,9 @@ class dependenciesTest(common.HeatTestCase):
def test_single_partial(self): def test_single_partial(self):
d = dependencies.Dependencies([('last', 'first')]) d = dependencies.Dependencies([('last', 'first')])
p = d['last'] p = d['last']
l = list(iter(p)) li = list(iter(p))
self.assertEqual(1, len(l)) self.assertEqual(1, len(li))
self.assertEqual('last', l[0]) self.assertEqual('last', li[0])
def test_simple_partial(self): def test_simple_partial(self):
d = dependencies.Dependencies([('last', 'middle'), d = dependencies.Dependencies([('last', 'middle'),

View File

@ -1242,9 +1242,9 @@ class DescriptionTest(common.HeatTestCase):
self.assertEqual('f', scheduler.task_description(f)) self.assertEqual('f', scheduler.task_description(f))
def test_lambda(self): def test_lambda(self):
l = lambda: None lam = lambda: None # noqa: E731
self.assertEqual('<lambda>', scheduler.task_description(l)) self.assertEqual('<lambda>', scheduler.task_description(lam))
def test_method(self): def test_method(self):
class C(object): class C(object):

View File

@ -81,16 +81,16 @@ class SchemaTest(common.HeatTestCase):
self.assertRaises(ValueError, r.validate, 6) self.assertRaises(ValueError, r.validate, 6)
def test_length_validate(self): def test_length_validate(self):
l = constraints.Length(min=5, max=5, description='a range') cl = constraints.Length(min=5, max=5, description='a range')
l.validate('abcde') cl.validate('abcde')
def test_length_min_fail(self): def test_length_min_fail(self):
l = constraints.Length(min=5, description='a range') cl = constraints.Length(min=5, description='a range')
self.assertRaises(ValueError, l.validate, 'abcd') self.assertRaises(ValueError, cl.validate, 'abcd')
def test_length_max_fail(self): def test_length_max_fail(self):
l = constraints.Length(max=5, description='a range') cl = constraints.Length(max=5, description='a range')
self.assertRaises(ValueError, l.validate, 'abcdef') self.assertRaises(ValueError, cl.validate, 'abcdef')
def test_modulo_validate(self): def test_modulo_validate(self):
r = constraints.Modulo(step=2, offset=1, description='a modulo') r = constraints.Modulo(step=2, offset=1, description='a modulo')
@ -196,8 +196,8 @@ class SchemaTest(common.HeatTestCase):
s = constraints.Schema(constraints.Schema.STRING, 'A string', s = constraints.Schema(constraints.Schema.STRING, 'A string',
default='wibble', default='wibble',
constraints=[constraints.Length(4, 8)]) constraints=[constraints.Length(4, 8)])
l = constraints.Schema(constraints.Schema.LIST, 'A list', schema=s) ls = constraints.Schema(constraints.Schema.LIST, 'A list', schema=s)
self.assertEqual(d, dict(l)) self.assertEqual(d, dict(ls))
def test_schema_map_schema(self): def test_schema_map_schema(self):
d = { d = {
@ -252,8 +252,8 @@ class SchemaTest(common.HeatTestCase):
constraints=[constraints.Length(4, 8)]) constraints=[constraints.Length(4, 8)])
m = constraints.Schema(constraints.Schema.MAP, 'A map', m = constraints.Schema(constraints.Schema.MAP, 'A map',
schema={'Foo': s}) schema={'Foo': s})
l = constraints.Schema(constraints.Schema.LIST, 'A list', schema=m) ls = constraints.Schema(constraints.Schema.LIST, 'A list', schema=m)
self.assertEqual(d, dict(l)) self.assertEqual(d, dict(ls))
def test_invalid_type(self): def test_invalid_type(self):
self.assertRaises(exception.InvalidSchemaError, constraints.Schema, self.assertRaises(exception.InvalidSchemaError, constraints.Schema,

View File

@ -194,7 +194,7 @@ class StackConvergenceCreateUpdateDeleteTest(common.HeatTestCase):
[[4, False], [3, False]], [[4, False], [3, False]],
[[4, False], [4, True]]]), [[4, False], [4, True]]]),
sorted(stack_db.current_deps['edges'])) sorted(stack_db.current_deps['edges']))
''' r'''
To visualize: To visualize:
G(7, True) H(6, True) G(7, True) H(6, True)

View File

@ -71,8 +71,8 @@ class PropertySchemaTest(common.HeatTestCase):
s = properties.Schema(properties.Schema.STRING, 'A string', s = properties.Schema(properties.Schema.STRING, 'A string',
default='wibble', default='wibble',
constraints=[constraints.Length(4, 8)]) constraints=[constraints.Length(4, 8)])
l = properties.Schema(properties.Schema.LIST, 'A list', schema=s) ls = properties.Schema(properties.Schema.LIST, 'A list', schema=s)
self.assertEqual(d, dict(l)) self.assertEqual(d, dict(ls))
def test_schema_map_schema(self): def test_schema_map_schema(self):
d = { d = {
@ -137,8 +137,8 @@ class PropertySchemaTest(common.HeatTestCase):
constraints=[constraints.Length(4, 8)]) constraints=[constraints.Length(4, 8)])
m = properties.Schema(properties.Schema.MAP, 'A map', m = properties.Schema(properties.Schema.MAP, 'A map',
schema={'Foo': s}) schema={'Foo': s})
l = properties.Schema(properties.Schema.LIST, 'A list', schema=m) ls = properties.Schema(properties.Schema.LIST, 'A list', schema=m)
self.assertEqual(d, dict(l)) self.assertEqual(d, dict(ls))
def test_all_resource_schemata(self): def test_all_resource_schemata(self):
for resource_type in resources.global_env().get_types(): for resource_type in resources.global_env().get_types():
@ -292,7 +292,7 @@ class PropertySchemaTest(common.HeatTestCase):
self.assertEqual('[a-z]*', c.pattern) self.assertEqual('[a-z]*', c.pattern)
def test_from_legacy_list(self): def test_from_legacy_list(self):
l = properties.Schema.from_legacy({ ls = properties.Schema.from_legacy({
'Type': 'List', 'Type': 'List',
'Default': ['wibble'], 'Default': ['wibble'],
'Schema': { 'Schema': {
@ -301,15 +301,15 @@ class PropertySchemaTest(common.HeatTestCase):
'MaxLength': 8, 'MaxLength': 8,
} }
}) })
self.assertEqual(properties.Schema.LIST, l.type) self.assertEqual(properties.Schema.LIST, ls.type)
self.assertEqual(['wibble'], l.default) self.assertEqual(['wibble'], ls.default)
ss = l.schema[0] ss = ls.schema[0]
self.assertEqual(properties.Schema.STRING, ss.type) self.assertEqual(properties.Schema.STRING, ss.type)
self.assertEqual('wibble', ss.default) self.assertEqual('wibble', ss.default)
def test_from_legacy_map(self): def test_from_legacy_map(self):
l = properties.Schema.from_legacy({ ls = properties.Schema.from_legacy({
'Type': 'Map', 'Type': 'Map',
'Schema': { 'Schema': {
'foo': { 'foo': {
@ -318,9 +318,9 @@ class PropertySchemaTest(common.HeatTestCase):
} }
} }
}) })
self.assertEqual(properties.Schema.MAP, l.type) self.assertEqual(properties.Schema.MAP, ls.type)
ss = l.schema['foo'] ss = ls.schema['foo']
self.assertEqual(properties.Schema.STRING, ss.type) self.assertEqual(properties.Schema.STRING, ss.type)
self.assertEqual('wibble', ss.default) self.assertEqual('wibble', ss.default)

View File

@ -36,7 +36,7 @@ class StackLockTest(common.HeatTestCase):
stack_object.Stack, 'get_by_id', return_value=stack) stack_object.Stack, 'get_by_id', return_value=stack)
class TestThreadLockException(Exception): class TestThreadLockException(Exception):
pass pass
def test_successful_acquire_new_lock(self): def test_successful_acquire_new_lock(self):
mock_create = self.patchobject(stack_lock_object.StackLock, mock_create = self.patchobject(stack_lock_object.StackLock,

View File

@ -532,7 +532,7 @@ Resources:
'name': self.nic_name, 'name': self.nic_name,
'admin_state_up': True} 'admin_state_up': True}
if security_groups: if security_groups:
self._port['security_groups'] = security_groups self._port['security_groups'] = security_groups
self.mockclient.create_port.return_value = { self.mockclient.create_port.return_value = {
'port': { 'port': {

View File

@ -107,19 +107,14 @@ commands = bandit -r heat -x tests --skip B101,B104,B107,B110,B310,B311,B404,B41
[flake8] [flake8]
show-source = true show-source = true
# E117 over-indented
# E123 closing bracket does not match indentation of opening bracket's line # E123 closing bracket does not match indentation of opening bracket's line
# E226 missing whitespace around arithmetic operator # E226 missing whitespace around arithmetic operator
# E241 multiple spaces after ',' # E241 multiple spaces after ','
# E305 expected 2 blank lines after class or function definition, found 1 # E305 expected 2 blank lines after class or function definition, found 1
# E402 module level import not at top of file # E402 module level import not at top of file
# E731 do not assign a lambda expression, use a def
# E741 ambiguous variable name 'l'
# F841 local variable 'ex' is assigned to but never used
# W503 line break before binary operator # W503 line break before binary operator
# W504 line break after binary operator # W504 line break after binary operator
# W605 invalid escape sequence '\ ' ignore = E123,E226,E241,E402,W503,W504
ignore = E117,E123,E226,E241,E305,E402,E731,E741,F841,W503,W504,W605
exclude=.*,dist,*lib/python*,*egg,build,*convergence/scenarios/* exclude=.*,dist,*lib/python*,*egg,build,*convergence/scenarios/*
max-complexity=23 max-complexity=23