Update hacking for Python3

The repo is Python 3 now, so update hacking to version 3.0 which
supports Python 3.

Fix problems found by updated hacking version.

Update local hacking checks for new flake8.

Remove hacking and friends from lower-constraints, they are not
needed in installations.

Change-Id: If787d610315be6c973b4335e047242c3aa3caf2a
This commit is contained in:
Andreas Jaeger 2020-04-01 20:15:00 +02:00
parent d98c484e70
commit 4a2fd6c292
74 changed files with 227 additions and 175 deletions

View File

@ -30,13 +30,11 @@ eventlet==0.22.0
extras==1.0.0
fasteners==0.14.1
fixtures==3.0.0
flake8==2.5.5
future==0.16.0
futurist==1.6.0
glance-store==0.26.1
google-auth==1.4.1
greenlet==0.4.13
hacking==0.12.0
heat-translator==2.0.0
idna==2.6
imagesize==1.0.0
@ -92,7 +90,6 @@ paramiko==2.0.0
Paste==2.0.2
PasteDeploy==1.5.0
pbr==2.0.0
pep8==1.5.7
pkg-resources==0.0.0
prettytable==0.7.2
psutil==5.4.3
@ -100,7 +97,6 @@ pyasn1-modules==0.2.1
pyasn1==0.4.2
pycadf==2.7.0
pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
PyNaCl==1.2.1

View File

@ -545,8 +545,8 @@ def convert_to_list(data):
return [data]
HOSTNAME_PATTERN = ("(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]"
"{1,63}(?<!-)\.?)+(?:[a-zA-Z]{2,})$)")
HOSTNAME_PATTERN = (r"(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]"
r"{1,63}(?<!-)\.?)+(?:[a-zA-Z]{2,})$)")
HEX_ELEM = '[0-9A-Fa-f]'
UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}',

View File

@ -128,8 +128,8 @@ class Controller(object):
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True):
# this attribute is visible, check next one
continue
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
@ -511,7 +511,7 @@ class Controller(object):
LOG.debug("Request body: %(body)s",
{'body': strutils.mask_password(body)})
prep_req_body = lambda x: Controller.prepare_request_body(
prep_req_body = lambda x: Controller.prepare_request_body( # noqa
context,
x if resource in x else {resource: x},
is_create,
@ -540,9 +540,9 @@ class Controller(object):
if attr_vals['allow_post']:
if ('default' not in attr_vals and
attr not in res_dict):
msg = _("Failed to parse request. Required "
"attribute '%s' not specified") % attr
raise webob.exc.HTTPBadRequest(msg)
msg = _("Failed to parse request. Required "
"attribute '%s' not specified") % attr
raise webob.exc.HTTPBadRequest(msg)
res_dict[attr] = res_dict.get(attr,
attr_vals.get('default'))
else:
@ -572,8 +572,9 @@ class Controller(object):
and (attr == "vnfd_id") and is_create:
continue
# skip validating vnffgd_id when vnffgd_template is provided
if (resource == 'vnffg') and ('vnffgd_template' in body['vnffg'])\
and (attr == 'vnffgd_id') and is_create:
if ((resource == 'vnffg')
and ('vnffgd_template' in body['vnffg'])
and (attr == 'vnffgd_id') and is_create):
continue
# skip validating nsd_id when nsd_template is provided
if (resource == 'ns') and ('nsd_template' in body['ns'])\

View File

@ -76,6 +76,7 @@ cfg.CONF.register_cli_opts(core_cli_opts)
def config_opts():
return [(None, core_opts), (None, core_cli_opts)]
# Ensure that the control exchange is set correctly
oslo_messaging.set_transport_defaults(control_exchange='tacker')
@ -89,6 +90,7 @@ def set_db_defaults():
max_pool_size=10,
max_overflow=20, pool_timeout=10)
set_db_defaults()

View File

@ -94,7 +94,7 @@ def find_config_file(options, config_file):
* Search for the configuration files via common cfg directories
:retval Full path to config file, or None if no config file found
"""
fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
fix_path = lambda p: os.path.abspath(os.path.expanduser(p)) # noqa: E731
if options.get('config_file'):
if os.path.exists(options['config_file']):
return fix_path(options['config_file'])
@ -568,7 +568,7 @@ class MemoryUnit(object):
unit = MemoryUnit.UNIT_SIZE_DEFAULT
LOG.info(_('A memory unit is not provided for size; using the '
'default unit %(default)s.') % {'default': 'B'})
regex = re.compile('(\d*)\s*(\w*)')
regex = re.compile(r'(\d*)\s*(\w*)')
result = regex.match(str(size)).groups()
if result[1]:
unit_size = MemoryUnit.validate_unit(result[1])

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-22 20:30:03.931290
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '000632983ada'
down_revision = '0ad3bbce1c19'

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-17 19:41:01.906138
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '0ad3bbce1c18'
down_revision = '8f7145914cb0'

View File

@ -21,6 +21,8 @@ Create Date: 2016-09-15 16:27:08.736673
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '0ae5b1ce3024'
down_revision = '507122918800'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:18:19.623170
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '12a57080b277'
down_revision = '5958429bcb3c'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:18:19.623170
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '12a57080b278'
down_revision = '12a57080b277'

View File

@ -21,6 +21,8 @@ Create Date: 2015-05-18 18:47:22.180962
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '13c0e0661015'
down_revision = '4c31092895b8'

View File

@ -21,6 +21,8 @@ Create Date: 2018-07-24 16:47:01.378226
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '13ecc2dd6f7f'
down_revision = '4747cc26b9c6'

View File

@ -21,6 +21,8 @@ Create Date: 2013-11-25 18:06:13.980301
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '1c6b0d82afcd'
down_revision = None

View File

@ -21,6 +21,8 @@ Create Date: 2016-05-12 13:29:30.615609
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '22f5385a3d3f'
down_revision = '5f88e86b35c7'

View File

@ -21,6 +21,8 @@ Create Date: 2016-08-01 15:47:51.161749
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '22f5385a3d4f'
down_revision = 'd4f265e8eb9d'

View File

@ -21,6 +21,8 @@ Create Date: 2016-08-01 15:47:51.161749
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '22f5385a3d50'
down_revision = '22f5385a3d4f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-01-24 19:21:03.410029
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '24bec5f211c7'
down_revision = '2774a42c7163'

View File

@ -21,6 +21,8 @@ Create Date: 2015-11-26 15:47:51.161749
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '2774a42c7163'
down_revision = '12a57080b278'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-02 15:14:31.888078
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '2ff0a0e360f1'
down_revision = '22f5385a3d50'

View File

@ -21,6 +21,8 @@ Create Date: 2017-05-30 23:46:20.034085
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '31acbaeb8299'
down_revision = 'e7993093baf1'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-02 10:05:22.299780
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '354de64ba129'
down_revision = 'b07673bb8654'

View File

@ -21,6 +21,8 @@ Create Date: 2018-06-27 03:18:12.227673
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '4747cc26b9c6'
down_revision = '5d490546290c'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-07 03:16:53.513392
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '4ee19c8a6d0a'
down_revision = '941b5a6fff9e'

View File

@ -21,6 +21,8 @@ Create Date: 2016-07-29 21:48:18.816277
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '507122918800'
down_revision = '4ee19c8a6d0a'

View File

@ -21,6 +21,8 @@ Create Date: 2016-03-22 14:05:15.129330
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '5246a6bd410f'
down_revision = '24bec5f211c7'

View File

@ -21,6 +21,8 @@ Create Date: 2015-10-05 17:09:24.710961
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '5958429bcb3c'
down_revision = '13c0e0661015'

View File

@ -21,6 +21,8 @@ Create Date: 2018-01-11 14:27:16.334946
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '5d490546290c'
down_revision = 'e9a1e47fb0b5'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-14 11:16:16.303343
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '5f88e86b35c7'
down_revision = '354de64ba129'

View File

@ -21,6 +21,8 @@ Create Date: 2014-03-19 15:50:11.712686
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '81ffa86020d'
down_revision = '1c6b0d82afcd'

View File

@ -21,6 +21,8 @@ Create Date: 2016-12-08 17:28:26.609343
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '8f7145914cb0'
down_revision = '0ae5b1ce3024'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-06 10:12:49.787430
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '941b5a6fff9e'
down_revision = '2ff0a0e360f1'

View File

@ -26,8 +26,8 @@ Create Date: 2019-12-10 02:40:12.966027
revision = '975e28392888'
down_revision = 'abbef484b34c'
from alembic import op
import sqlalchemy as sa
from alembic import op # noqa: E402
import sqlalchemy as sa # noqa: E402
def _migrate_duplicate_vnf_package_vnfd_id(table):

View File

@ -14,6 +14,8 @@
# under the License.
#
# flake8: noqa: E402
"""VNF instance management changes
Revision ID: 985e28392890

View File

@ -21,6 +21,8 @@ Create Date: 2019-06-03 08:37:05.095587
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '9d425296f2c3'
down_revision = 'cd04a8335c18'

View File

@ -25,7 +25,7 @@ Create Date: 2019-11-18 19:34:26.853715
revision = 'abbef484b34c'
down_revision = '9d425296f2c3'
from alembic import op
from alembic import op # noqa: E402
def upgrade(active_plugins=None, options=None):

View File

@ -21,6 +21,8 @@ Create Date: 2016-04-07 23:53:56.623647
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'acf941e54075'
down_revision = '5246a6bd410f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-01 12:46:07.499279
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'b07673bb8654'
down_revision = 'c7cde2f45f82'

View File

@ -21,6 +21,8 @@ Create Date: 2017-03-01 12:28:58.467900
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'c256228ed37c'
down_revision = 'ef14f8026327'

View File

@ -21,6 +21,8 @@ Create Date: 2016-06-01 10:58:43.022668
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'c7cde2f45f82'
down_revision = '6e56d4474b2a'

View File

@ -21,6 +21,8 @@ Create Date: 2019-01-25 13:43:10.499421
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'cd04a8335c18'
down_revision = '13ecc2dd6f7f'

View File

@ -21,6 +21,8 @@ Create Date: 2016-07-14 11:07:28.115225
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'd4f265e8eb9d'
down_revision = '22f5385a3d3f'

View File

@ -21,6 +21,8 @@ Create Date: 2017-04-19 10:57:22.157326
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'e7993093baf1'
down_revision = 'c256228ed37c'

View File

@ -21,6 +21,8 @@ Create Date: 2017-02-09 00:11:08.081746
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'e8918cda6433'
down_revision = '000632983ada'

View File

@ -21,6 +21,8 @@ Create Date: 2017-07-17 10:02:37.572587
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'e9a1e47fb0b5'
down_revision = 'f5c1c3b0f6b4'

View File

@ -21,6 +21,8 @@ Create Date: 2017-02-10 12:10:09.606460
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'ef14f8026327'
down_revision = 'e8918cda6433'

View File

@ -21,6 +21,8 @@ Create Date: 2017-06-23 03:03:12.200270
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'f5c1c3b0f6b4'
down_revision = '31acbaeb8299'

View File

@ -21,6 +21,8 @@ Create Date: 2016-05-28 07:13:07.125562
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'f958f58e5daa'
down_revision = 'acf941e54075'
@ -43,8 +45,8 @@ def upgrade(active_plugins=None, options=None):
'devicetemplateattributes')
for table in pk_id_tables:
with migration.modify_foreign_keys_constraint(FK_MAP.get(table, [])):
op.alter_column(table, 'id', type_=types.Uuid,
nullable=False)
op.alter_column(table, 'id', type_=types.Uuid,
nullable=False)
fk_template_id_tables = ('devices', 'servicetypes',
'devicetemplateattributes')

View File

@ -348,9 +348,9 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
return self._make_vnfd_dict(vnfd_db)
def get_vnfds(self, context, filters, fields=None):
if 'template_source' in filters and \
filters['template_source'][0] == 'all':
filters.pop('template_source')
if ('template_source' in filters and
filters['template_source'][0] == 'all'):
filters.pop('template_source')
return self._get_collection(context, VNFD,
self._make_vnfd_dict,
filters=filters, fields=fields)
@ -419,10 +419,10 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
deleted_at=datetime.min)
context.session.add(vnf_db)
for key, value in attributes.items():
arg = VNFAttribute(
id=uuidutils.generate_uuid(), vnf_id=vnf_id,
key=key, value=value)
context.session.add(arg)
arg = VNFAttribute(
id=uuidutils.generate_uuid(), vnf_id=vnf_id,
key=key, value=value)
context.session.add(arg)
except DBDuplicateEntry as e:
raise exceptions.DuplicateEntity(
_type="vnf",

View File

@ -97,7 +97,7 @@ def load_csar(package_uuid, location):
try:
resp, size = glance_store.backend.get_from_backend(location)
except Exception as exp:
except Exception:
LOG.info("Failed to get csar data from glance store %(location)s for "
"package %(uuid)s",
{"location": location, "uuid": package_uuid})

View File

@ -12,9 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
import pycodestyle
import re
import pep8
from hacking import core
"""
Guidelines for writing new hacking checks
@ -35,16 +36,13 @@ log_translation = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)\(\s*('|\")")
def validate_log_translations(logical_line, physical_line, filename):
@core.flake8ext
def validate_log_translations(physical_line, logical_line, filename):
# Translations are not required in the test directory
if "tacker/tests" in filename:
return
if pep8.noqa(physical_line):
if pycodestyle.noqa(physical_line):
return
msg = "N320: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def factory(register):
register(validate_log_translations)

View File

@ -81,7 +81,7 @@ class BarbicanKeyManager(key_manager.KeyManager):
def _get_keystone_auth(self, context):
if context.__class__.__name__ is 'KeystonePassword':
if context.__class__.__name__ == 'KeystonePassword':
return identity.Password(
auth_url=self._auth_url,
username=context.username,
@ -97,7 +97,7 @@ class BarbicanKeyManager(key_manager.KeyManager):
project_domain_id=context.project_domain_id,
project_domain_name=context.project_domain_name,
reauthenticate=context.reauthenticate)
elif context.__class__.__name__ is 'KeystoneToken':
elif context.__class__.__name__ == 'KeystoneToken':
return identity.Token(
auth_url=self._auth_url,
token=context.token,
@ -111,8 +111,8 @@ class BarbicanKeyManager(key_manager.KeyManager):
reauthenticate=context.reauthenticate)
# this will be kept for oslo.context compatibility until
# projects begin to use utils.credential_factory
elif (context.__class__.__name__ is 'RequestContext' or
context.__class__.__name__ is 'Context'):
elif (context.__class__.__name__ == 'RequestContext' or
context.__class__.__name__ == 'Context'):
return identity.Token(
auth_url=self._auth_url,
token=context.auth_token,

View File

@ -320,14 +320,14 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
return client_type(session=sess)
def _translate_ip_protocol(self, ip_proto):
if ip_proto == '1':
return 'icmp'
elif ip_proto == '6':
return 'tcp'
elif ip_proto == '17':
return 'udp'
else:
return None
if ip_proto == '1':
return 'icmp'
elif ip_proto == '6':
return 'tcp'
elif ip_proto == '17':
return 'udp'
else:
return None
def _create_classifier_params(self, fc):
classifier_params = {}
@ -678,20 +678,20 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def _dissociate_classifier_from_chain(self, chain_id, fc_ids,
neutronclient):
pc_info = neutronclient.port_chain_show(chain_id)
current_fc_list = pc_info['port_chain']['flow_classifiers']
for fc_id in fc_ids:
current_fc_list.remove(fc_id)
pc_id = neutronclient.port_chain_update(chain_id,
{'flow_classifiers': current_fc_list})
if pc_id is None:
raise nfvo.UpdateClassifierException(
message="Failed to update classifiers")
for fc_id in fc_ids:
try:
neutronclient.flow_classifier_delete(fc_id)
except ValueError as e:
raise e
pc_info = neutronclient.port_chain_show(chain_id)
current_fc_list = pc_info['port_chain']['flow_classifiers']
for fc_id in fc_ids:
current_fc_list.remove(fc_id)
pc_id = neutronclient.port_chain_update(chain_id,
{'flow_classifiers': current_fc_list})
if pc_id is None:
raise nfvo.UpdateClassifierException(
message="Failed to update classifiers")
for fc_id in fc_ids:
try:
neutronclient.flow_classifier_delete(fc_id)
except ValueError as e:
raise e
def remove_and_delete_flow_classifiers(self, chain_id, fc_ids,
auth_attr=None):

View File

@ -62,8 +62,6 @@ class WorkflowGenerator(workflow_generator.WorkflowGeneratorBase):
'retry': {
'count': 10,
'delay': 10,
'break-on': '<% $.status_{0} = "ACTIVE" '
'%>'.format(node),
'break-on': '<% $.status_{0} = "ERROR"'
' %>'.format(node),
'continue-on': '<% $.status_{0} = "PENDING_CREATE" '

View File

@ -580,7 +580,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
LOG.debug('Attempting to open key file for vim id %s', vim_id)
try:
with open(key_file, 'r') as f:
return f.read()
return f.read()
except Exception:
LOG.warning('VIM id invalid or key not found for %s', vim_id)
raise nfvo.VimKeyNotFoundException(vim_id=vim_id)

View File

@ -69,7 +69,8 @@ def _add_user_defined_data(context, package_uuid, user_data,
def _vnf_package_user_data_get_query(context, package_uuid, model):
return api.model_query(context, model, read_deleted="no", project_only=True).\
return api.model_query(context, model, read_deleted="no",
project_only=True).\
filter_by(package_uuid=package_uuid)

View File

@ -207,7 +207,7 @@ class VnfSoftwareImage(base.TackerObject, base.TackerPersistentObject):
for field in self.fields:
if field in software_image and field not in self:
setattr(self, field, getattr(software_image, field))
setattr(self, field, getattr(software_image, field))
@base.remotable
def create(self):

View File

@ -316,7 +316,7 @@ class FieldCheck(policy.Check):
attr = attributes.RESOURCE_ATTRIBUTE_MAP[resource][field]
conv_func = attr['convert_to']
except KeyError:
conv_func = lambda x: x
conv_func = lambda x: x # noqa: E731
self.field = field
self.value = conv_func(value)

View File

@ -26,6 +26,7 @@ def requires_py2(testcase):
def requires_py3(testcase):
return testtools.skipUnless(six.PY3, "requires python 3.x")(testcase)
if sys.version_info < (3,):
def compact_byte(x):
return x

View File

@ -196,12 +196,12 @@ class BaseTackerTest(base.BaseTestCase):
sleep_interval):
start_time = int(time.time())
while True:
vnf_result = self.client.show_vnf(vnf_id)
status = vnf_result['vnf']['status']
if (status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
vnf_result = self.client.show_vnf(vnf_id)
status = vnf_result['vnf']['status']
if (status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
self.assertEqual(status, target_status,
"vnf %(vnf_id)s with status %(status)s is"

View File

@ -54,7 +54,7 @@ class NsdTestCreate(base.BaseTackerTest):
try:
self.client.delete_nsd(nsd_id)
except Exception:
assert False, "nsd Delete failed"
assert False, "nsd Delete failed"
def _test_delete_vnfd(self, vnfd_id, timeout=constants.NS_DELETE_TIMEOUT):
start_time = int(time.time())
@ -76,12 +76,12 @@ class NsdTestCreate(base.BaseTackerTest):
sleep_interval):
start_time = int(time.time())
while True:
ns_result = self.client.show_ns(ns_id)
status = ns_result['ns']['status']
if (status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
ns_result = self.client.show_ns(ns_id)
status = ns_result['ns']['status']
if (status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
self.assertEqual(status, target_status,
"ns %(ns_id)s with status %(status)s is"
@ -222,11 +222,11 @@ class NsdTestCreate(base.BaseTackerTest):
timeout=60, sleep_interval=2):
start_time = int(time.time())
while True:
server_info = self.novaclient().servers.get(server_id)
if (server_info.status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
server_info = self.novaclient().servers.get(server_id)
if (server_info.status == target_status) or (
(int(time.time()) - start_time) > timeout):
break
time.sleep(sleep_interval)
def test_create_delete_ns_vnffg(self):
net = self.neutronclient().list_networks()

View File

@ -106,20 +106,20 @@ class ResourceExtensionTest(base.BaseTestCase):
return {'collection': 'value'}
class DummySvcPlugin(wsgi.Controller):
def get_plugin_type(self):
return constants.DUMMY
def get_plugin_type(self):
return constants.DUMMY
def index(self, request, **kwargs):
return "resource index"
def index(self, request, **kwargs):
return "resource index"
def custom_member_action(self, request, **kwargs):
return {'member_action': 'value'}
def custom_member_action(self, request, **kwargs):
return {'member_action': 'value'}
def collection_action(self, request, **kwargs):
return {'collection': 'value'}
def collection_action(self, request, **kwargs):
return {'collection': 'value'}
def show(self, request, id):
return {'data': {'id': id}}
def show(self, request, id):
return {'data': {'id': id}}
def test_exceptions_notimplemented(self):
controller = self.ResourceExtensionController()

View File

@ -1359,22 +1359,22 @@ class ExtensionTestCase(base.BaseTestCase):
class TestSubresourcePlugin(object):
def get_network_dummies(self, context, network_id,
filters=None, fields=None):
return []
def get_network_dummies(self, context, network_id,
filters=None, fields=None):
return []
def get_network_dummy(self, context, id, network_id,
fields=None):
return {}
def get_network_dummy(self, context, id, network_id,
fields=None):
return {}
def create_network_dummy(self, context, network_id, dummy):
return {}
def create_network_dummy(self, context, network_id, dummy):
return {}
def update_network_dummy(self, context, id, network_id, dummy):
return {}
def update_network_dummy(self, context, id, network_id, dummy):
return {}
def delete_network_dummy(self, context, id, network_id):
return
def delete_network_dummy(self, context, id, network_id):
return
class ListArgsTestCase(base.BaseTestCase):

View File

@ -29,6 +29,7 @@ def _get_template(name):
f = codecs.open(filename, encoding='utf-8', errors='strict')
return f.read()
tosca_cvnf_vnfd = _get_template('test_tosca_cvnf.yaml')
tosca_vnfd_openwrt = _get_template('test_tosca_openwrt.yaml')
tosca_vnfd_openwrt_param = _get_template('test_tosca_openwrt_param.yaml')
@ -164,24 +165,6 @@ def get_dummy_vnf_invalid_param_type_obj():
return {'vnf': {u'attributes': {u'param_values': 'dummy_param'}}}
def get_dummy_vnf_invalid_config_type_obj():
return {'vnf': {u'attributes': {u'config': 'dummy_config'}}}
def get_dummy_vnf_invalid_param_content():
return {'vnf': {u'attributes': {u'param_values': {}}}}
def get_dummy_vnf_param_obj():