Update hacking for Python3

The repo is Python 3 now, so update hacking to version 3.0 which
supports Python 3.

Fix problems found by updated hacking version.

Update local hacking checks for new flake8.

Remove hacking and friends from lower-constraints, they are not
needed in installations.

Change-Id: If787d610315be6c973b4335e047242c3aa3caf2a
This commit is contained in:
Andreas Jaeger 2020-04-01 20:15:00 +02:00
parent d98c484e70
commit 4a2fd6c292
74 changed files with 227 additions and 175 deletions

View File

@ -30,13 +30,11 @@ eventlet==0.22.0
extras==1.0.0 extras==1.0.0
fasteners==0.14.1 fasteners==0.14.1
fixtures==3.0.0 fixtures==3.0.0
flake8==2.5.5
future==0.16.0 future==0.16.0
futurist==1.6.0 futurist==1.6.0
glance-store==0.26.1 glance-store==0.26.1
google-auth==1.4.1 google-auth==1.4.1
greenlet==0.4.13 greenlet==0.4.13
hacking==0.12.0
heat-translator==2.0.0 heat-translator==2.0.0
idna==2.6 idna==2.6
imagesize==1.0.0 imagesize==1.0.0
@ -92,7 +90,6 @@ paramiko==2.0.0
Paste==2.0.2 Paste==2.0.2
PasteDeploy==1.5.0 PasteDeploy==1.5.0
pbr==2.0.0 pbr==2.0.0
pep8==1.5.7
pkg-resources==0.0.0 pkg-resources==0.0.0
prettytable==0.7.2 prettytable==0.7.2
psutil==5.4.3 psutil==5.4.3
@ -100,7 +97,6 @@ pyasn1-modules==0.2.1
pyasn1==0.4.2 pyasn1==0.4.2
pycadf==2.7.0 pycadf==2.7.0
pycparser==2.18 pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0 Pygments==2.2.0
pyinotify==0.9.6 pyinotify==0.9.6
PyNaCl==1.2.1 PyNaCl==1.2.1

View File

@ -545,8 +545,8 @@ def convert_to_list(data):
return [data] return [data]
HOSTNAME_PATTERN = ("(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]" HOSTNAME_PATTERN = (r"(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]"
"{1,63}(?<!-)\.?)+(?:[a-zA-Z]{2,})$)") r"{1,63}(?<!-)\.?)+(?:[a-zA-Z]{2,})$)")
HEX_ELEM = '[0-9A-Fa-f]' HEX_ELEM = '[0-9A-Fa-f]'
UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}', UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}',

View File

@ -128,8 +128,8 @@ class Controller(object):
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name), '%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data, data,
might_not_exist=True): might_not_exist=True):
# this attribute is visible, check next one # this attribute is visible, check next one
continue continue
# if the code reaches this point then either the policy check # if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place # failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name) attributes_to_exclude.append(attr_name)
@ -511,7 +511,7 @@ class Controller(object):
LOG.debug("Request body: %(body)s", LOG.debug("Request body: %(body)s",
{'body': strutils.mask_password(body)}) {'body': strutils.mask_password(body)})
prep_req_body = lambda x: Controller.prepare_request_body( prep_req_body = lambda x: Controller.prepare_request_body( # noqa
context, context,
x if resource in x else {resource: x}, x if resource in x else {resource: x},
is_create, is_create,
@ -540,9 +540,9 @@ class Controller(object):
if attr_vals['allow_post']: if attr_vals['allow_post']:
if ('default' not in attr_vals and if ('default' not in attr_vals and
attr not in res_dict): attr not in res_dict):
msg = _("Failed to parse request. Required " msg = _("Failed to parse request. Required "
"attribute '%s' not specified") % attr "attribute '%s' not specified") % attr
raise webob.exc.HTTPBadRequest(msg) raise webob.exc.HTTPBadRequest(msg)
res_dict[attr] = res_dict.get(attr, res_dict[attr] = res_dict.get(attr,
attr_vals.get('default')) attr_vals.get('default'))
else: else:
@ -572,8 +572,9 @@ class Controller(object):
and (attr == "vnfd_id") and is_create: and (attr == "vnfd_id") and is_create:
continue continue
# skip validating vnffgd_id when vnffgd_template is provided # skip validating vnffgd_id when vnffgd_template is provided
if (resource == 'vnffg') and ('vnffgd_template' in body['vnffg'])\ if ((resource == 'vnffg')
and (attr == 'vnffgd_id') and is_create: and ('vnffgd_template' in body['vnffg'])
and (attr == 'vnffgd_id') and is_create):
continue continue
# skip validating nsd_id when nsd_template is provided # skip validating nsd_id when nsd_template is provided
if (resource == 'ns') and ('nsd_template' in body['ns'])\ if (resource == 'ns') and ('nsd_template' in body['ns'])\

View File

@ -76,6 +76,7 @@ cfg.CONF.register_cli_opts(core_cli_opts)
def config_opts(): def config_opts():
return [(None, core_opts), (None, core_cli_opts)] return [(None, core_opts), (None, core_cli_opts)]
# Ensure that the control exchange is set correctly # Ensure that the control exchange is set correctly
oslo_messaging.set_transport_defaults(control_exchange='tacker') oslo_messaging.set_transport_defaults(control_exchange='tacker')
@ -89,6 +90,7 @@ def set_db_defaults():
max_pool_size=10, max_pool_size=10,
max_overflow=20, pool_timeout=10) max_overflow=20, pool_timeout=10)
set_db_defaults() set_db_defaults()

View File

@ -94,7 +94,7 @@ def find_config_file(options, config_file):
* Search for the configuration files via common cfg directories * Search for the configuration files via common cfg directories
:retval Full path to config file, or None if no config file found :retval Full path to config file, or None if no config file found
""" """
fix_path = lambda p: os.path.abspath(os.path.expanduser(p)) fix_path = lambda p: os.path.abspath(os.path.expanduser(p)) # noqa: E731
if options.get('config_file'): if options.get('config_file'):
if os.path.exists(options['config_file']): if os.path.exists(options['config_file']):
return fix_path(options['config_file']) return fix_path(options['config_file'])
@ -568,7 +568,7 @@ class MemoryUnit(object):
unit = MemoryUnit.UNIT_SIZE_DEFAULT unit = MemoryUnit.UNIT_SIZE_DEFAULT
LOG.info(_('A memory unit is not provided for size; using the ' LOG.info(_('A memory unit is not provided for size; using the '
'default unit %(default)s.') % {'default': 'B'}) 'default unit %(default)s.') % {'default': 'B'})
regex = re.compile('(\d*)\s*(\w*)') regex = re.compile(r'(\d*)\s*(\w*)')
result = regex.match(str(size)).groups() result = regex.match(str(size)).groups()
if result[1]: if result[1]:
unit_size = MemoryUnit.validate_unit(result[1]) unit_size = MemoryUnit.validate_unit(result[1])

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-22 20:30:03.931290
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '000632983ada' revision = '000632983ada'
down_revision = '0ad3bbce1c19' down_revision = '0ad3bbce1c19'

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-17 19:41:01.906138
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '0ad3bbce1c18' revision = '0ad3bbce1c18'
down_revision = '8f7145914cb0' down_revision = '8f7145914cb0'

View File

@ -21,6 +21,8 @@ Create Date: 2016-09-15 16:27:08.736673
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '0ae5b1ce3024' revision = '0ae5b1ce3024'
down_revision = '507122918800' down_revision = '507122918800'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:18:19.623170
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '12a57080b277' revision = '12a57080b277'
down_revision = '5958429bcb3c' down_revision = '5958429bcb3c'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:18:19.623170
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '12a57080b278' revision = '12a57080b278'
down_revision = '12a57080b277' down_revision = '12a57080b277'

View File

@ -21,6 +21,8 @@ Create Date: 2015-05-18 18:47:22.180962
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '13c0e0661015' revision = '13c0e0661015'
down_revision = '4c31092895b8' down_revision = '4c31092895b8'

View File

@ -21,6 +21,8 @@ Create Date: 2018-07-24 16:47:01.378226
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '13ecc2dd6f7f' revision = '13ecc2dd6f7f'
down_revision = '4747cc26b9c6' down_revision = '4747cc26b9c6'

View File

@ -21,6 +21,8 @@ Create Date: 2013-11-25 18:06:13.980301
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '1c6b0d82afcd' revision = '1c6b0d82afcd'
down_revision = None down_revision = None

View File

@ -21,6 +21,8 @@ Create Date: 2016-05-12 13:29:30.615609
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '22f5385a3d3f' revision = '22f5385a3d3f'
down_revision = '5f88e86b35c7' down_revision = '5f88e86b35c7'

View File

@ -21,6 +21,8 @@ Create Date: 2016-08-01 15:47:51.161749
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '22f5385a3d4f' revision = '22f5385a3d4f'
down_revision = 'd4f265e8eb9d' down_revision = 'd4f265e8eb9d'

View File

@ -21,6 +21,8 @@ Create Date: 2016-08-01 15:47:51.161749
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '22f5385a3d50' revision = '22f5385a3d50'
down_revision = '22f5385a3d4f' down_revision = '22f5385a3d4f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-01-24 19:21:03.410029
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '24bec5f211c7' revision = '24bec5f211c7'
down_revision = '2774a42c7163' down_revision = '2774a42c7163'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:47:51.161749
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '2774a42c7163' revision = '2774a42c7163'
down_revision = '12a57080b278' down_revision = '12a57080b278'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-02 15:14:31.888078
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '2ff0a0e360f1' revision = '2ff0a0e360f1'
down_revision = '22f5385a3d50' down_revision = '22f5385a3d50'

View File

@ -21,6 +21,8 @@ Create Date: 2017-05-30 23:46:20.034085
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '31acbaeb8299' revision = '31acbaeb8299'
down_revision = 'e7993093baf1' down_revision = 'e7993093baf1'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-02 10:05:22.299780
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '354de64ba129' revision = '354de64ba129'
down_revision = 'b07673bb8654' down_revision = 'b07673bb8654'

View File

@ -21,6 +21,8 @@ Create Date: 2018-06-27 03:18:12.227673
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '4747cc26b9c6' revision = '4747cc26b9c6'
down_revision = '5d490546290c' down_revision = '5d490546290c'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-07 03:16:53.513392
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '4ee19c8a6d0a' revision = '4ee19c8a6d0a'
down_revision = '941b5a6fff9e' down_revision = '941b5a6fff9e'

View File

@ -21,6 +21,8 @@ Create Date: 2016-07-29 21:48:18.816277
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '507122918800' revision = '507122918800'
down_revision = '4ee19c8a6d0a' down_revision = '4ee19c8a6d0a'

View File

@ -21,6 +21,8 @@ Create Date: 2016-03-22 14:05:15.129330
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '5246a6bd410f' revision = '5246a6bd410f'
down_revision = '24bec5f211c7' down_revision = '24bec5f211c7'

View File

@ -21,6 +21,8 @@ Create Date: 2015-10-05 17:09:24.710961
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '5958429bcb3c' revision = '5958429bcb3c'
down_revision = '13c0e0661015' down_revision = '13c0e0661015'

View File

@ -21,6 +21,8 @@ Create Date: 2018-01-11 14:27:16.334946
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '5d490546290c' revision = '5d490546290c'
down_revision = 'e9a1e47fb0b5' down_revision = 'e9a1e47fb0b5'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-14 11:16:16.303343
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '5f88e86b35c7' revision = '5f88e86b35c7'
down_revision = '354de64ba129' down_revision = '354de64ba129'

View File

@ -21,6 +21,8 @@ Create Date: 2014-03-19 15:50:11.712686
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '81ffa86020d' revision = '81ffa86020d'
down_revision = '1c6b0d82afcd' down_revision = '1c6b0d82afcd'

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-08 17:28:26.609343
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '8f7145914cb0' revision = '8f7145914cb0'
down_revision = '0ae5b1ce3024' down_revision = '0ae5b1ce3024'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-06 10:12:49.787430
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '941b5a6fff9e' revision = '941b5a6fff9e'
down_revision = '2ff0a0e360f1' down_revision = '2ff0a0e360f1'

View File

@ -26,8 +26,8 @@ Create Date: 2019-12-10 02:40:12.966027
revision = '975e28392888' revision = '975e28392888'
down_revision = 'abbef484b34c' down_revision = 'abbef484b34c'
from alembic import op from alembic import op # noqa: E402
import sqlalchemy as sa import sqlalchemy as sa # noqa: E402
def _migrate_duplicate_vnf_package_vnfd_id(table): def _migrate_duplicate_vnf_package_vnfd_id(table):

View File

@ -14,6 +14,8 @@
# under the License. # under the License.
# #
# flake8: noqa: E402
"""VNF instance management changes """VNF instance management changes
Revision ID: 985e28392890 Revision ID: 985e28392890

View File

@ -21,6 +21,8 @@ Create Date: 2019-06-03 08:37:05.095587
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '9d425296f2c3' revision = '9d425296f2c3'
down_revision = 'cd04a8335c18' down_revision = 'cd04a8335c18'

View File

@ -25,7 +25,7 @@ Create Date: 2019-11-18 19:34:26.853715
revision = 'abbef484b34c' revision = 'abbef484b34c'
down_revision = '9d425296f2c3' down_revision = '9d425296f2c3'
from alembic import op from alembic import op # noqa: E402
def upgrade(active_plugins=None, options=None): def upgrade(active_plugins=None, options=None):

View File

@ -21,6 +21,8 @@ Create Date: 2016-04-07 23:53:56.623647
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'acf941e54075' revision = 'acf941e54075'
down_revision = '5246a6bd410f' down_revision = '5246a6bd410f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-01 12:46:07.499279
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'b07673bb8654' revision = 'b07673bb8654'
down_revision = 'c7cde2f45f82' down_revision = 'c7cde2f45f82'

View File

@ -21,6 +21,8 @@ Create Date: 2017-03-01 12:28:58.467900
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'c256228ed37c' revision = 'c256228ed37c'
down_revision = 'ef14f8026327' down_revision = 'ef14f8026327'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-01 10:58:43.022668
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'c7cde2f45f82' revision = 'c7cde2f45f82'
down_revision = '6e56d4474b2a' down_revision = '6e56d4474b2a'

View File

@ -21,6 +21,8 @@ Create Date: 2019-01-25 13:43:10.499421
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'cd04a8335c18' revision = 'cd04a8335c18'
down_revision = '13ecc2dd6f7f' down_revision = '13ecc2dd6f7f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-07-14 11:07:28.115225
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'd4f265e8eb9d' revision = 'd4f265e8eb9d'
down_revision = '22f5385a3d3f' down_revision = '22f5385a3d3f'

View File

@ -21,6 +21,8 @@ Create Date: 2017-04-19 10:57:22.157326
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'e7993093baf1' revision = 'e7993093baf1'
down_revision = 'c256228ed37c' down_revision = 'c256228ed37c'

View File

@ -21,6 +21,8 @@ Create Date: 2017-02-09 00:11:08.081746
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'e8918cda6433' revision = 'e8918cda6433'
down_revision = '000632983ada' down_revision = '000632983ada'

View File

@ -21,6 +21,8 @@ Create Date: 2017-07-17 10:02:37.572587
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'e9a1e47fb0b5' revision = 'e9a1e47fb0b5'
down_revision = 'f5c1c3b0f6b4' down_revision = 'f5c1c3b0f6b4'

View File

@ -21,6 +21,8 @@ Create Date: 2017-02-10 12:10:09.606460
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'ef14f8026327' revision = 'ef14f8026327'
down_revision = 'e8918cda6433' down_revision = 'e8918cda6433'

View File

@ -21,6 +21,8 @@ Create Date: 2017-06-23 03:03:12.200270
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'f5c1c3b0f6b4' revision = 'f5c1c3b0f6b4'
down_revision = '31acbaeb8299' down_revision = '31acbaeb8299'

View File

@ -21,6 +21,8 @@ Create Date: 2016-05-28 07:13:07.125562
""" """
# flake8: noqa: E402
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'f958f58e5daa' revision = 'f958f58e5daa'
down_revision = 'acf941e54075' down_revision = 'acf941e54075'
@ -43,8 +45,8 @@ def upgrade(active_plugins=None, options=None):
'devicetemplateattributes') 'devicetemplateattributes')
for table in pk_id_tables: for table in pk_id_tables:
with migration.modify_foreign_keys_constraint(FK_MAP.get(table, [])): with migration.modify_foreign_keys_constraint(FK_MAP.get(table, [])):
op.alter_column(table, 'id', type_=types.Uuid, op.alter_column(table, 'id', type_=types.Uuid,
nullable=False) nullable=False)
fk_template_id_tables = ('devices', 'servicetypes', fk_template_id_tables = ('devices', 'servicetypes',
'devicetemplateattributes') 'devicetemplateattributes')

View File

@ -348,9 +348,9 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
return self._make_vnfd_dict(vnfd_db) return self._make_vnfd_dict(vnfd_db)
def get_vnfds(self, context, filters, fields=None): def get_vnfds(self, context, filters, fields=None):
if 'template_source' in filters and \ if ('template_source' in filters and
filters['template_source'][0] == 'all': filters['template_source'][0] == 'all'):
filters.pop('template_source') filters.pop('template_source')
return self._get_collection(context, VNFD, return self._get_collection(context, VNFD,
self._make_vnfd_dict, self._make_vnfd_dict,
filters=filters, fields=fields) filters=filters, fields=fields)
@ -419,10 +419,10 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
deleted_at=datetime.min) deleted_at=datetime.min)
context.session.add(vnf_db) context.session.add(vnf_db)
for key, value in attributes.items(): for key, value in attributes.items():
arg = VNFAttribute( arg = VNFAttribute(
id=uuidutils.generate_uuid(), vnf_id=vnf_id, id=uuidutils.generate_uuid(), vnf_id=vnf_id,
key=key, value=value) key=key, value=value)
context.session.add(arg) context.session.add(arg)
except DBDuplicateEntry as e: except DBDuplicateEntry as e:
raise exceptions.DuplicateEntity( raise exceptions.DuplicateEntity(
_type="vnf", _type="vnf",

View File

@ -97,7 +97,7 @@ def load_csar(package_uuid, location):
try: try:
resp, size = glance_store.backend.get_from_backend(location) resp, size = glance_store.backend.get_from_backend(location)
except Exception as exp: except Exception:
LOG.info("Failed to get csar data from glance store %(location)s for " LOG.info("Failed to get csar data from glance store %(location)s for "
"package %(uuid)s", "package %(uuid)s",
{"location": location, "uuid": package_uuid}) {"location": location, "uuid": package_uuid})

View File

@ -12,9 +12,10 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import pycodestyle
import re import re
import pep8 from hacking import core
""" """
Guidelines for writing new hacking checks Guidelines for writing new hacking checks
@ -35,16 +36,13 @@ log_translation = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)\(\s*('|\")") r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)\(\s*('|\")")
def validate_log_translations(logical_line, physical_line, filename): @core.flake8ext
def validate_log_translations(physical_line, logical_line, filename):
# Translations are not required in the test directory # Translations are not required in the test directory
if "tacker/tests" in filename: if "tacker/tests" in filename:
return return
if pep8.noqa(physical_line): if pycodestyle.noqa(physical_line):
return return
msg = "N320: Log messages require translations!" msg = "N320: Log messages require translations!"
if log_translation.match(logical_line): if log_translation.match(logical_line):
yield (0, msg) yield (0, msg)
def factory(register):
register(validate_log_translations)

View File

@ -81,7 +81,7 @@ class BarbicanKeyManager(key_manager.KeyManager):
def _get_keystone_auth(self, context): def _get_keystone_auth(self, context):
if context.__class__.__name__ is 'KeystonePassword': if context.__class__.__name__ == 'KeystonePassword':
return identity.Password( return identity.Password(
auth_url=self._auth_url, auth_url=self._auth_url,
username=context.username, username=context.username,
@ -97,7 +97,7 @@ class BarbicanKeyManager(key_manager.KeyManager):
project_domain_id=context.project_domain_id, project_domain_id=context.project_domain_id,
project_domain_name=context.project_domain_name, project_domain_name=context.project_domain_name,
reauthenticate=context.reauthenticate) reauthenticate=context.reauthenticate)
elif context.__class__.__name__ is 'KeystoneToken': elif context.__class__.__name__ == 'KeystoneToken':
return identity.Token( return identity.Token(
auth_url=self._auth_url, auth_url=self._auth_url,
token=context.token, token=context.token,
@ -111,8 +111,8 @@ class BarbicanKeyManager(key_manager.KeyManager):
reauthenticate=context.reauthenticate) reauthenticate=context.reauthenticate)
# this will be kept for oslo.context compatibility until # this will be kept for oslo.context compatibility until
# projects begin to use utils.credential_factory # projects begin to use utils.credential_factory
elif (context.__class__.__name__ is 'RequestContext' or elif (context.__class__.__name__ == 'RequestContext' or
context.__class__.__name__ is 'Context'): context.__class__.__name__ == 'Context'):
return identity.Token( return identity.Token(
auth_url=self._auth_url, auth_url=self._auth_url,
token=context.auth_token, token=context.auth_token,

View File

@ -320,14 +320,14 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
return client_type(session=sess) return client_type(session=sess)
def _translate_ip_protocol(self, ip_proto): def _translate_ip_protocol(self, ip_proto):
if ip_proto == '1': if ip_proto == '1':
return 'icmp' return 'icmp'
elif ip_proto == '6': elif ip_proto == '6':
return 'tcp' return 'tcp'
elif ip_proto == '17': elif ip_proto == '17':
return 'udp' return 'udp'
else: else:
return None return None
def _create_classifier_params(self, fc): def _create_classifier_params(self, fc):
classifier_params = {} classifier_params = {}
@ -678,20 +678,20 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def _dissociate_classifier_from_chain(self, chain_id, fc_ids, def _dissociate_classifier_from_chain(self, chain_id, fc_ids,
neutronclient): neutronclient):
pc_info = neutronclient.port_chain_show(chain_id) pc_info = neutronclient.port_chain_show(chain_id)
current_fc_list = pc_info['port_chain']['flow_classifiers'] current_fc_list = pc_info['port_chain']['flow_classifiers']
for fc_id in fc_ids: for fc_id in fc_ids:
current_fc_list.remove(fc_id) current_fc_list.remove(fc_id)
pc_id = neutronclient.port_chain_update(chain_id, pc_id = neutronclient.port_chain_update(chain_id,
{'flow_classifiers': current_fc_list}) {'flow_classifiers': current_fc_list})
if pc_id is None: if pc_id is None:
raise nfvo.UpdateClassifierException( raise nfvo.UpdateClassifierException(
message="Failed to update classifiers") message="Failed to update classifiers")
for fc_id in fc_ids: for fc_id in fc_ids:
try: try:
neutronclient.flow_classifier_delete(fc_id) neutronclient.flow_classifier_delete(fc_id)
except ValueError as e: except ValueError as e:
raise e raise e
def remove_and_delete_flow_classifiers(self, chain_id, fc_ids, def remove_and_delete_flow_classifiers(self, chain_id, fc_ids,
auth_attr=None): auth_attr=None):

View File

@ -62,8 +62,6 @@ class WorkflowGenerator(workflow_generator.WorkflowGeneratorBase):
'retry': { 'retry': {
'count': 10, 'count': 10,
'delay': 10, 'delay': 10,
'break-on': '<% $.status_{0} = "ACTIVE" '
'%>'.format(node),
'break-on': '<% $.status_{0} = "ERROR"' 'break-on': '<% $.status_{0} = "ERROR"'
' %>'.format(node), ' %>'.format(node),
'continue-on': '<% $.status_{0} = "PENDING_CREATE" ' 'continue-on': '<% $.status_{0} = "PENDING_CREATE" '

View File

@ -580,7 +580,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
LOG.debug('Attempting to open key file for vim id %s', vim_id) LOG.debug('Attempting to open key file for vim id %s', vim_id)
try: try:
with open(key_file, 'r') as f: with open(key_file, 'r') as f:
return f.read() return f.read()
except Exception: except Exception:
LOG.warning('VIM id invalid or key not found for %s', vim_id) LOG.warning('VIM id invalid or key not found for %s', vim_id)
raise nfvo.VimKeyNotFoundException(vim_id=vim_id) raise nfvo.VimKeyNotFoundException(vim_id=vim_id)

View File

@ -69,7 +69,8 @@ def _add_user_defined_data(context, package_uuid, user_data,
def _vnf_package_user_data_get_query(context, package_uuid, model): def _vnf_package_user_data_get_query(context, package_uuid, model):
return api.model_query(context, model, read_deleted="no", project_only=True).\ return api.model_query(context, model, read_deleted="no",
project_only=True).\
filter_by(package_uuid=package_uuid) filter_by(package_uuid=package_uuid)

View File

@ -207,7 +207,7 @@ class VnfSoftwareImage(base.TackerObject, base.TackerPersistentObject):
for field in self.fields: for field in self.fields:
if field in software_image and field not in self: if field in software_image and field not in self:
setattr(self, field, getattr(software_image, field)) setattr(self, field, getattr(software_image, field))
@base.remotable @base.remotable
def create(self): def create(self):

View File

@ -316,7 +316,7 @@ class FieldCheck(policy.Check):
attr = attributes.RESOURCE_ATTRIBUTE_MAP[resource][field] attr = attributes.RESOURCE_ATTRIBUTE_MAP[resource][field]
conv_func = attr['convert_to'] conv_func = attr['convert_to']
except KeyError: except KeyError:
conv_func = lambda x: x conv_func = lambda x: x # noqa: E731
self.field = field self.field = field
self.value = conv_func(value) self.value = conv_func(value)

View File

@ -26,6 +26,7 @@ def requires_py2(testcase):
def requires_py3(testcase): def requires_py3(testcase):
return testtools.skipUnless(six.PY3, "requires python 3.x")(testcase) return testtools.skipUnless(six.PY3, "requires python 3.x")(testcase)
if sys.version_info < (3,): if sys.version_info < (3,):
def compact_byte(x): def compact_byte(x):
return x return x

View File

@ -196,12 +196,12 @@ class BaseTackerTest(base.BaseTestCase):
sleep_interval): sleep_interval):
start_time = int(time.time()) start_time = int(time.time())
while True: while True:
vnf_result = self.client.show_vnf(vnf_id) vnf_result = self.client.show_vnf(vnf_id)
status = vnf_result['vnf']['status'] status = vnf_result['vnf']['status']
if (status == target_status) or ( if (status == target_status) or (
(int(time.time()) - start_time) > timeout): (int(time.time()) - start_time) > timeout):
break break
time.sleep(sleep_interval) time.sleep(sleep_interval)
self.assertEqual(status, target_status, self.assertEqual(status, target_status,
"vnf %(vnf_id)s with status %(status)s is" "vnf %(vnf_id)s with status %(status)s is"

View File

@ -54,7 +54,7 @@ class NsdTestCreate(base.BaseTackerTest):
try: try:
self.client.delete_nsd(nsd_id) self.client.delete_nsd(nsd_id)
except Exception: except Exception:
assert False, "nsd Delete failed" assert False, "nsd Delete failed"
def _test_delete_vnfd(self, vnfd_id, timeout=constants.NS_DELETE_TIMEOUT): def _test_delete_vnfd(self, vnfd_id, timeout=constants.NS_DELETE_TIMEOUT):
start_time = int(time.time()) start_time = int(time.time())
@ -76,12 +76,12 @@ class NsdTestCreate(base.BaseTackerTest):
sleep_interval): sleep_interval):
start_time = int(time.time()) start_time = int(time.time())
while True: while True:
ns_result = self.client.show_ns(ns_id) ns_result = self.client.show_ns(ns_id)
status = ns_result['ns']['status'] status = ns_result['ns']['status']
if (status == target_status) or ( if (status == target_status) or (
(int(time.time()) - start_time) > timeout): (int(time.time()) - start_time) > timeout):
break break
time.sleep(sleep_interval) time.sleep(sleep_interval)
self.assertEqual(status, target_status, self.assertEqual(status, target_status,
"ns %(ns_id)s with status %(status)s is" "ns %(ns_id)s with status %(status)s is"
@ -222,11 +222,11 @@ class NsdTestCreate(base.BaseTackerTest):
timeout=60, sleep_interval=2): timeout=60, sleep_interval=2):
start_time = int(time.time()) start_time = int(time.time())
while True: while True:
server_info = self.novaclient().servers.get(server_id) server_info = self.novaclient().servers.get(server_id)
if (server_info.status == target_status) or ( if (server_info.status == target_status) or (
(int(time.time()) - start_time) > timeout): (int(time.time()) - start_time) > timeout):
break break
time.sleep(sleep_interval) time.sleep(sleep_interval)
def test_create_delete_ns_vnffg(self): def test_create_delete_ns_vnffg(self):
net = self.neutronclient().list_networks() net = self.neutronclient().list_networks()

View File

@ -106,20 +106,20 @@ class ResourceExtensionTest(base.BaseTestCase):
return {'collection': 'value'} return {'collection': 'value'}
class DummySvcPlugin(wsgi.Controller): class DummySvcPlugin(wsgi.Controller):
def get_plugin_type(self): def get_plugin_type(self):
return constants.DUMMY return constants.DUMMY
def index(self, request, **kwargs): def index(self, request, **kwargs):
return "resource index" return "resource index"
def custom_member_action(self, request, **kwargs): def custom_member_action(self, request, **kwargs):
return {'member_action': 'value'} return {'member_action': 'value'}
def collection_action(self, request, **kwargs): def collection_action(self, request, **kwargs):
return {'collection': 'value'} return {'collection': 'value'}
def show(self, request, id): def show(self, request, id):
return {'data': {'id': id}} return {'data': {'id': id}}
def test_exceptions_notimplemented(self): def test_exceptions_notimplemented(self):
controller = self.ResourceExtensionController() controller = self.ResourceExtensionController()

View File

@ -1359,22 +1359,22 @@ class ExtensionTestCase(base.BaseTestCase):
class TestSubresourcePlugin(object): class TestSubresourcePlugin(object):
def get_network_dummies(self, context, network_id, def get_network_dummies(self, context, network_id,
filters=None, fields=None): filters=None, fields=None):
return [] return []
def get_network_dummy(self, context, id, network_id, def get_network_dummy(self, context, id, network_id,
fields=None): fields=None):
return {} return {}
def create_network_dummy(self, context, network_id, dummy): def create_network_dummy(self, context, network_id, dummy):
return {} return {}
def update_network_dummy(self, context, id, network_id, dummy): def update_network_dummy(self, context, id, network_id, dummy):
return {} return {}
def delete_network_dummy(self, context, id, network_id): def delete_network_dummy(self, context, id, network_id):
return return
class ListArgsTestCase(base.BaseTestCase): class ListArgsTestCase(base.BaseTestCase):

View File

@ -29,6 +29,7 @@ def _get_template(name):
f = codecs.open(filename, encoding='utf-8', errors='strict') f = codecs.open(filename, encoding='utf-8', errors='strict')
return f.read() return f.read()
tosca_cvnf_vnfd = _get_template('test_tosca_cvnf.yaml') tosca_cvnf_vnfd = _get_template('test_tosca_cvnf.yaml')
tosca_vnfd_openwrt = _get_template('test_tosca_openwrt.yaml') tosca_vnfd_openwrt = _get_template('test_tosca_openwrt.yaml')
tosca_vnfd_openwrt_param = _get_template('test_tosca_openwrt_param.yaml') tosca_vnfd_openwrt_param = _get_template('test_tosca_openwrt_param.yaml')
@ -164,24 +165,6 @@ def get_dummy_vnf_invalid_param_type_obj():
return {'vnf': {u'attributes': {u'param_values': 'dummy_param'}}} return {'vnf': {u'attributes': {u'param_values': 'dummy_param'}}}
def get_dummy_vnf_invalid_config_type_obj():
return {'vnf': {u'attributes': {u'config': 'dummy_config'}}}
def get_dummy_vnf_invalid_param_content():
return {'vnf': {u'attributes': {u'param_values': {}}}}
def get_dummy_vnf_param_obj():
return {'vnf': {u'attributes': {u'param_values':
{'flavor': 'm1.tiny',
'reservation_id': '99999999-3925-4c9e-9074-239a902b68d7'}}}}
def get_dummy_vnf_invalid_param_type_obj():
return {'vnf': {u'attributes': {u'param_values': 'dummy_param'}}}
def get_dummy_vnf(status='PENDING_CREATE', scaling_group=False, def get_dummy_vnf(status='PENDING_CREATE', scaling_group=False,
instance_id=None): instance_id=None):
dummy_vnf = {'status': status, 'instance_id': instance_id, 'name': dummy_vnf = {'status': status, 'instance_id': instance_id, 'name':
@ -264,22 +247,6 @@ def get_dummy_vnf_update_empty_param():
return {'vnf': {'attributes': {'param_values': {}}}} return {'vnf': {'attributes': {'param_values': {}}}}
def get_dummy_vnf_update_param():
return {'vnf': {'attributes': {'param_values': update_param_data}}}
def get_dummy_vnf_update_new_param():
return {'vnf': {'attributes': {'param_values': update_new_param_data}}}
def get_dummy_vnf_update_invalid_param():
return {'vnf': {'attributes': {'param_values': update_invalid_param_data}}}
def get_dummy_vnf_update_empty_param():
return {'vnf': {'attributes': {'param_values': {}}}}
def get_vim_obj(): def get_vim_obj():
return {'vim': {'type': 'openstack', return {'vim': {'type': 'openstack',
'auth_url': 'http://localhost/identity', 'auth_url': 'http://localhost/identity',

View File

@ -284,6 +284,7 @@ def get_instantiated_vnf_info():
} }
return instantiated_vnf_info return instantiated_vnf_info
instantiated_vnf_info = { instantiated_vnf_info = {
'ext_cp_info': [vnf_ext_cp_info], 'ext_cp_info': [vnf_ext_cp_info],
'flavour_id': uuidsentinel.flavour_id, 'flavour_id': uuidsentinel.flavour_id,

View File

@ -567,7 +567,7 @@ class ResourceTest(base.BaseTestCase):
@staticmethod @staticmethod
def my_fault_body_function(): def my_fault_body_function():
return 'off' return 'off'
class Controller(object): class Controller(object):
def index(self, request, index=None): def index(self, request, index=None):

View File

@ -349,7 +349,7 @@ def get_volumes(template):
for prop_name, prop_value in block_properties.items(): for prop_name, prop_value in block_properties.items():
if prop_name == 'size': if prop_name == 'size':
prop_value = \ prop_value = \
re.compile('(\d+)\s*(\w+)').match(prop_value).groups()[0] re.compile(r'(\d+)\s*(\w+)').match(prop_value).groups()[0]
volume_dict[node_name][prop_name] = prop_value volume_dict[node_name][prop_name] = prop_value
del node_tpl[node_name] del node_tpl[node_name]
return volume_dict return volume_dict
@ -447,14 +447,14 @@ def convert_unsupported_res_prop(heat_dict, unsupported_res_prop):
unsupported_prop = set(prop_dict.keys()) & set( unsupported_prop = set(prop_dict.keys()) & set(
unsupported_prop_dict.keys()) unsupported_prop_dict.keys())
for prop in unsupported_prop: for prop in unsupported_prop:
# some properties are just punted to 'value_specs' # some properties are just punted to 'value_specs'
# property if they are incompatible # property if they are incompatible
new_prop = unsupported_prop_dict[prop] new_prop = unsupported_prop_dict[prop]
if new_prop == 'value_specs': if new_prop == 'value_specs':
prop_dict.setdefault(new_prop, {})[ prop_dict.setdefault(new_prop, {})[
prop] = prop_dict.pop(prop) prop] = prop_dict.pop(prop)
else: else:
prop_dict[new_prop] = prop_dict.pop(prop) prop_dict[new_prop] = prop_dict.pop(prop)
@log.log @log.log
@ -1145,7 +1145,7 @@ def get_resources_dict(template, flavor_extra_input=None):
res_dict = dict() res_dict = dict()
for res, method in (OS_RESOURCES).items(): for res, method in (OS_RESOURCES).items():
res_method = getattr(sys.modules[__name__], method) res_method = getattr(sys.modules[__name__], method)
if res is 'flavor': if res == 'flavor':
res_dict[res] = res_method(template, flavor_extra_input) res_dict[res] = res_method(template, flavor_extra_input)
else: else:
res_dict[res] = res_method(template) res_dict[res] = res_method(template)

View File

@ -799,7 +799,7 @@ class OpenStack(abstract_driver.VnfAbstractDriver,
for vnf_vl_resource_info in vnf_virtual_link_resource_info: for vnf_vl_resource_info in vnf_virtual_link_resource_info:
if (vnf_vl_resource_info.vnf_virtual_link_desc_id != if (vnf_vl_resource_info.vnf_virtual_link_desc_id !=
vnf_virtual_link_desc_id): vnf_virtual_link_desc_id):
continue continue
vl_resource_data = pop_resources.pop( vl_resource_data = pop_resources.pop(
vnf_virtual_link_desc_id, None) vnf_virtual_link_desc_id, None)
@ -886,7 +886,7 @@ class OpenStack(abstract_driver.VnfAbstractDriver,
for vnf_vl_resource_info in vnf_virtual_link_resource_info: for vnf_vl_resource_info in vnf_virtual_link_resource_info:
if (vnf_vl_resource_info.vnf_virtual_link_desc_id != if (vnf_vl_resource_info.vnf_virtual_link_desc_id !=
ext_managed_vl_info.vnf_virtual_link_desc_id): ext_managed_vl_info.vnf_virtual_link_desc_id):
continue continue
for vl_port in vnf_vl_resource_info.vnf_link_ports: for vl_port in vnf_vl_resource_info.vnf_link_ports:
_update_link_port(vl_port) _update_link_port(vl_port)
@ -982,9 +982,9 @@ class OpenStack(abstract_driver.VnfAbstractDriver,
for resource in vdu_resources: for resource in vdu_resources:
for stack_uuid, resources in stack_resources.items(): for stack_uuid, resources in stack_resources.items():
res_details = resources.get(resource['resource_name']) res_details = resources.get(resource['resource_name'])
if res_details and res_details['physical_resource_id'] == \ if (res_details and res_details['physical_resource_id'] ==
resource['physical_resource_id']: resource['physical_resource_id']):
yield stack_uuid, resource['resource_name'] yield stack_uuid, resource['resource_name']
def _resource_mark_unhealthy(): def _resource_mark_unhealthy():
vnfc_resources = self._get_vnfc_resources_from_heal_request( vnfc_resources = self._get_vnfc_resources_from_heal_request(

View File

@ -174,10 +174,9 @@ class VNFMonitor(object):
vdu_delay = params.get('monitoring_delay', vnf_delay) vdu_delay = params.get('monitoring_delay', vnf_delay)
if not timeutils.is_older_than( if not timeutils.is_older_than(hosting_vnf['boot_at'],
hosting_vnf['boot_at'], vdu_delay):
vdu_delay): continue
continue
actions = policy[driver].get('actions', {}) actions = policy[driver].get('actions', {})
params['mgmt_ip'] = mgmt_ips[vdu] params['mgmt_ip'] = mgmt_ips[vdu]

View File

@ -37,6 +37,7 @@ cfg.CONF.register_opts(OPTS, group='ceilometer')
def config_opts(): def config_opts():
return [('ceilometer', OPTS)] return [('ceilometer', OPTS)]
ALARM_INFO = ( ALARM_INFO = (
ALARM_ACTIONS, OK_ACTIONS, REPEAT_ACTIONS, ALARM, ALARM_ACTIONS, OK_ACTIONS, REPEAT_ACTIONS, ALARM,
INSUFFICIENT_DATA_ACTIONS, DESCRIPTION, ENABLED, TIME_CONSTRAINTS, INSUFFICIENT_DATA_ACTIONS, DESCRIPTION, ENABLED, TIME_CONSTRAINTS,

View File

@ -90,14 +90,14 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
LOG.error('Cannot request error : %s', response['error']['data']) LOG.error('Cannot request error : %s', response['error']['data'])
def create_graph(self, itemid, name, nodename): def create_graph(self, itemid, name, nodename):
temp_graph_api = copy.deepcopy(zapi.dGRAPH_CREATE_API) temp_graph_api = copy.deepcopy(zapi.dGRAPH_CREATE_API)
gitems = [{'itemid': itemid, 'color': '00AA00'}] gitems = [{'itemid': itemid, 'color': '00AA00'}]
temp_graph_api['auth'] = \ temp_graph_api['auth'] = \
self.hostinfo[nodename]['zbx_info']['zabbix_token'] self.hostinfo[nodename]['zbx_info']['zabbix_token']
temp_graph_api['params']['gitems'] = gitems temp_graph_api['params']['gitems'] = gitems
temp_graph_api['params']['name'] = name temp_graph_api['params']['name'] = name
response = self.send_post(temp_graph_api) response = self.send_post(temp_graph_api)
VNFMonitorZabbix.check_error(response) VNFMonitorZabbix.check_error(response)
def create_action(self): def create_action(self):
for vdu in self.vduname: for vdu in self.vduname:

View File

@ -131,7 +131,7 @@ class VimClient(object):
LOG.debug('Attempting to open key file for vim id %s', vim_id) LOG.debug('Attempting to open key file for vim id %s', vim_id)
try: try:
with open(key_file, 'r') as f: with open(key_file, 'r') as f:
return f.read() return f.read()
except Exception: except Exception:
LOG.warning('VIM id invalid or key not found for %s', vim_id) LOG.warning('VIM id invalid or key not found for %s', vim_id)
raise nfvo.VimKeyNotFoundException(vim_id=vim_id) raise nfvo.VimKeyNotFoundException(vim_id=vim_id)

View File

@ -83,6 +83,7 @@ CONF.register_opts(socket_opts)
def config_opts(): def config_opts():
return [(None, socket_opts)] return [(None, socket_opts)]
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -1038,7 +1039,7 @@ class Resource(Application):
action_result = self.dispatch(request, action, args) action_result = self.dispatch(request, action, args)
except Fault as ex: except Fault as ex:
response = ex response = ex
except Exception as ex: except Exception:
raise Fault(webob.exc.HTTPInternalServerError()) raise Fault(webob.exc.HTTPInternalServerError())
if not response: if not response:

View File

@ -8,7 +8,7 @@ coverage!=4.4,>=4.0 # Apache-2.0
ddt>=1.0.1 # MIT ddt>=1.0.1 # MIT
doc8>=0.6.0 # Apache-2.0 doc8>=0.6.0 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD fixtures>=3.0.0 # Apache-2.0/BSD
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 hacking>=3.0,<3.1.0 # Apache-2.0
mock>=3.0.0 # BSD mock>=3.0.0 # BSD
python-subunit>=1.0.0 # Apache-2.0/BSD python-subunit>=1.0.0 # Apache-2.0/BSD
python-tackerclient>=0.8.0 # Apache-2.0 python-tackerclient>=0.8.0 # Apache-2.0

10
tox.ini
View File

@ -85,14 +85,20 @@ commands = {posargs}
[flake8] [flake8]
# E128 continuation line under-indented for visual indent # E128 continuation line under-indented for visual indent
# N320 log messages does not translate # N320 log messages does not translate
ignore = E128,N320 # W503 line break before binary operator
# W504 line break after binary operator
ignore = E128,N320,W503,W504
show-source = true show-source = true
builtins = _ builtins = _
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,.ropeproject exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,.ropeproject
[hacking] [hacking]
import_exceptions = tacker._i18n import_exceptions = tacker._i18n
local-check-factory = tacker.hacking.checks.factory
[flake8:local-plugins]
extension =
N320 = checks:validate_log_translations
paths = ./tacker/hacking
[testenv:config-gen] [testenv:config-gen]
commands = commands =