Port from Python2 to Python3

node_check fails to evaluate down hypervisors
Switch to stestr

Change-Id: I99bdc541f28781665b751b05de5605a54f766497
This commit is contained in:
Sawan Choudhary 2020-05-26 20:36:55 -07:00
parent 0a2caaa3fe
commit 2c8a5af1b5
27 changed files with 100 additions and 105 deletions

2
.gitignore vendored
View File

@ -25,7 +25,7 @@ pip-log.txt
.coverage
.tox
nosetests.xml
.testrepository
.stestr/
.venv
# Translations

3
.stestr.conf Normal file
View File

@ -0,0 +1,3 @@
[DEFAULT]
test_path=${OS_TEST_PATH:-./cloudpulse/tests/unit}
top_dir=./

View File

@ -2,6 +2,8 @@
check:
jobs:
- openstack-tox-pep8
- openstack-tox-py36
gate:
jobs:
- openstack-tox-pep8
- openstack-tox-py36

View File

@ -100,7 +100,7 @@ class Periodic_TestManager(os_service.Service):
def start(self):
tasks = CONF.periodic_tests
for key in tasks.keys():
for key in list(tasks.keys()):
interval, task_name = tasks[key], key
if int(interval) > 0:
period_task = Periodic_Task(task_name)

View File

@ -37,7 +37,10 @@ def validate_limit(limit):
if limit is not None and limit <= 0:
raise wsme.exc.ClientSideError(_("Limit must be positive"))
return min(CONF.api.max_limit, limit) or CONF.api.max_limit
if limit is not None:
return min(CONF.api.max_limit, limit)
else:
return CONF.api.max_limit
def validate_sort_dir(sort_dir):

View File

@ -49,8 +49,8 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
# The information whether the API call is being performed against the
# public API is required for some other components. Saving it to the
# WSGI environment is reasonable thereby.
env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path),
self.public_api_routes))
env['is_public_api'] = any(
[re.match(pattern, path) for pattern in self.public_api_routes])
if env['is_public_api']:
return self._app(env, start_response)

View File

@ -18,9 +18,7 @@ response with one formatted so the client can parse it.
Based on pecan.middleware.errordocument
"""
import json
# from xml import etree as et
from oslo_serialization import jsonutils
from cloudpulse.openstack.common._i18n import _
@ -61,7 +59,8 @@ class ParsableErrorMiddleware(object):
app_iter = self.app(environ, replacement_start_response)
if (state['status_code'] // 100) not in (2, 3):
body = [json.dumps({'error_message': '\n'.join(app_iter)})]
body = [jsonutils.dump_as_bytes(
{'error_message': b'\n'.join(app_iter)})]
state['headers'].append(('Content-Type', 'application/json'))
state['headers'].append(('Content-Length', str(len(body[0]))))
else:

View File

@ -200,7 +200,7 @@ class CloudpulseException(Exception):
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_LE('Exception in string format operation'))
for name, value in kwargs.iteritems():
for name, value in kwargs.items():
LOG.error(_LE("%(name)s: %(value)s") %
{'name': name, 'value': value})
try:

View File

@ -38,13 +38,13 @@ def getcallargs(function, *args, **kwargs):
if 'self' in argnames[0] or 'cls' == argnames[0]:
# The function may not actually be a method or have im_self.
# Typically seen when it's stubbed with mox.
if inspect.ismethod(function) and hasattr(function, 'im_self'):
keyed_args[argnames[0]] = function.im_self
if inspect.ismethod(function) and hasattr(function, '__self__'):
keyed_args[argnames[0]] = function.__self__
else:
keyed_args[argnames[0]] = None
remaining_argnames = filter(lambda x: x not in keyed_args, argnames)
keyed_args.update(dict(zip(remaining_argnames, args)))
remaining_argnames = [x for x in argnames if x not in keyed_args]
keyed_args.update(dict(list(zip(remaining_argnames, args))))
if defaults:
num_defaults = len(defaults)

View File

@ -65,7 +65,7 @@ testthreads = []
def delete_old_entries():
tasks = CONF.periodic_tests
num_tests = CONF.database.max_db_entries
num_range = len([key for key in tasks.keys() if int(tasks[key]) > 0])
num_range = len([key for key in list(tasks.keys()) if int(tasks[key]) > 0])
conn = dbapi.get_backend()
conn.delete_old_tests(num_range, num_tests)

View File

@ -26,7 +26,7 @@ import pecan
import random
import re
import shutil
import sys
import six
import tempfile
import uuid
@ -36,11 +36,6 @@ from oslo_config import cfg
from oslo_utils import excutils
import paramiko
if sys.version_info.major == 3:
from past.builtins import basestring
import six
from cloudpulse.common import exception
from cloudpulse.openstack.common._i18n import _
from cloudpulse.openstack.common._i18n import _LE
@ -376,14 +371,14 @@ def temporary_mutation(obj, **kwargs):
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
for attr, new_value in list(kwargs.items()):
old_values[attr] = get(obj, attr, NOT_PRESENT)
set_value(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
for attr, old_value in list(old_values.items()):
if old_value is NOT_PRESENT:
delete(obj, attr)
else:
@ -582,7 +577,7 @@ def allow_logical_names():
def raise_exception_invalid_scheme(url):
valid_schemes = ['http', 'https']
if not isinstance(url, basestring):
if not isinstance(url, str):
raise exception.Urllib2InvalidScheme(url=url)
scheme = url.split(':')[0]

View File

@ -16,9 +16,6 @@ from cloudpulse import objects
import contextlib
from oslo_log import log as logging
from oslo_utils import excutils
import sys
if sys.version_info.major == 3:
from past.builtins import xrange
import time
LOG = logging.getLogger(__name__)
@ -31,7 +28,7 @@ class CpulseLock(object):
self.conductor_id = conductor_id
def acquire(self, retry=True, times=10):
for num in xrange(0, times):
for num in range(0, times):
lock_id = objects.CpulseLock.create(self.cpulse_test.name,
self.conductor_id)
if lock_id is None:

View File

@ -16,7 +16,7 @@
# W0621: Redefining name %s from outer scope
# pylint: disable=W0603,W0621
from __future__ import print_function
import getpass
import inspect

View File

@ -14,7 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import copy
import errno
@ -80,7 +80,7 @@ def _print_greenthreads():
def _print_nativethreads():
for threadId, stack in sys._current_frames().items():
for threadId, stack in list(sys._current_frames().items()):
print(threadId)
traceback.print_stack(stack)
print()

View File

@ -457,7 +457,7 @@ def _find_facility_from_conf():
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
valid_facilities = list(facility_names.keys())
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
@ -659,7 +659,7 @@ class ContextFormatter(logging.Formatter):
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
for k, v in list(d.items()):
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params

View File

@ -156,7 +156,7 @@ class _PeriodicTasksMeta(type):
except AttributeError:
cls._periodic_spacing = {}
for value in cls.__dict__.values():
for value in list(cls.__dict__.values()):
if getattr(value, '_periodic_task', False):
cls._add_periodic_task(value)

View File

@ -391,7 +391,7 @@ class ProcessLauncher(object):
cfg.CONF.reload_config_files()
for service in set(
[wrap.service for wrap in self.children.values()]):
[wrap.service for wrap in list(self.children.values())]):
service.reset()
for pid in self.children:

View File

@ -12,7 +12,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from hostObj import HostObject
from cloudpulse.operator.ansible.hostObj import HostObject
import os
import yaml
@ -39,7 +39,7 @@ class os_cfg_reader(object):
def setOpenstackNodeIp(self):
# print self.hostYamlObj
for key in self.hostYamlObj.keys():
for key in list(self.hostYamlObj.keys()):
name = key
ip = self.hostYamlObj[key]["ip"]
hostname = key
@ -81,8 +81,8 @@ class os_cfg_reader(object):
obj.getPassword())
f.write('\n')
f.close()
"""
"""
def update_ansible_playbook(self):
f = open('testcase-configs/ansible-playbook.yaml')
f1 = open('testcase-configs/ansible-playbook_update.yaml', "w")
@ -93,7 +93,7 @@ class os_cfg_reader(object):
f1.write(line)
f.close()
f1.close()
"""
"""
if __name__ == '__main__':
yhp = os_cfg_reader()
yhp.setOpenstackNodeIp()

View File

@ -13,8 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from openstack_node import openstack_node_obj
from cloudpulse.operator.ansible.openstack_node import openstack_node_obj
import yaml
@ -35,7 +34,7 @@ class openstack_node_info_reader(object):
def get_host_list(self):
openstack_host_list = []
for key in self.hostYamlObj.keys():
for key in list(self.hostYamlObj.keys()):
name = key
ip = self.hostYamlObj[key]["ip"]
hostname = key
@ -60,8 +59,8 @@ class openstack_node_info_reader(object):
def get_galera_details(self):
galera = {}
print(self.hostYamlObj)
for key in self.hostYamlObj.keys():
if 'galerauser' in self.hostYamlObj[key].keys():
for key in list(self.hostYamlObj.keys()):
if 'galerauser' in list(self.hostYamlObj[key].keys()):
galera['username'] = self.hostYamlObj[key]['galerauser']
galera['password'] = self.hostYamlObj[key]['galerapassword']

View File

@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from cloudpulse.openstack.api import keystone_session
from cloudpulse.openstack.api.nova_api import NovaHealth
from cloudpulse.operator.ansible.openstack_node_info_reader import \
@ -272,7 +272,7 @@ class operator_scenario(base.Scenario):
docker_failed = ""
res['output'] = res['output'].split('\n')
output = filter(lambda x: not re.match(r'^\s*$', x), res['output'])
output = [x for x in res['output'] if not re.match(r'^\s*$', x)]
for line in output:
line = line.split('|')
@ -348,7 +348,7 @@ class operator_scenario(base.Scenario):
# Handle ceph status in luminous, result should be picked form
# 'status' instead of 'overall_status'
if len(ceph_json['health']['summary']) and \
'summary' in ceph_json['health']['summary'][0].keys() \
'summary' in list(ceph_json['health']['summary'][0].keys()) \
and 'mon health preluminous compat warning' in \
ceph_json['health']['summary'][0]['summary']:
overall_status = ceph_json['health']['status']
@ -373,48 +373,44 @@ class operator_scenario(base.Scenario):
@base.scenario(admin_only=False, operator=True)
def node_check(self):
failed_hosts = None
failed_hosts = []
self.load()
nodes_from_ansible_config = [node.name.lower()
for node in
nodes_from_ansible_config = [node.name for node in
self.os_node_info_obj.get_host_list()
if "compute" in node.role.split()]
nova_hypervisor_list = self._get_nova_hypervior_list()
if nova_hypervisor_list[0] != 200:
return (404, ("Cannot get hypervisor list from "
"Nova reason-%sa") % nova_hypervisor_list[1])
nodes_from_nova = [node.lower() for node in nova_hypervisor_list[2]]
extra_nodes_nova = set(
nodes_from_nova) - set(nodes_from_ansible_config)
extra_nodes_ansible = set(
nodes_from_ansible_config) - set(nodes_from_nova)
if extra_nodes_nova:
return (404, ("Hypervisors in nova hypervisor list are more"
" than configured.nova hypervisor list = %s") %
nodes_from_nova)
if extra_nodes_ansible:
return (404, ("Hypervisors in nova hypervisor list are less"
" than configured.nova hypervisor list = %s") %
nodes_from_nova)
nodes_from_nova = [node for node in nova_hypervisor_list[2]]
anscmd = ("ansible -o all -i '%s' -m ping -u root" %
','.join(nova_hypervisor_list[2]))
','.join(nodes_from_ansible_config))
res = execute(anscmd)
res['output'] = res['output'].split('\n')
output = filter(lambda x: not re.match(r'^\s*$', x), res['output'])
for line in output:
if "SUCCESS" not in line:
failed_hosts = failed_hosts + line.split('|')[0]
failed_hosts.append(line.split('|')[0].strip())
if not res['status']:
return (200, "All nodes are up.nova hypervisor list = %s" %
nodes_from_nova)
# Check if ansible ping cmd failed with reason other than unreachable
# nodes
if res['status'] and not failed_hosts:
return (404, "Unable to perform ping test on nodes, ansible cmd: "
"'%s' failed" % anscmd)
# Check if nova also recognizes that passed nodes were up
nova_failed_hosts = [node for node in nodes_from_ansible_config if
node not in nodes_from_nova]
failed_hosts = list(set(nova_failed_hosts + failed_hosts))
if not failed_hosts:
return (200, "All nodes are up. nova hypervisor list = %s" %
', '.join(nodes_from_nova))
else:
msg = "Some nodes are not up"
if failed_hosts:
msg = ("The following nodes are not up: %s."
"nova hypervisor list = %s" %
(str(failed_hosts[0]), nodes_from_nova))
msg = ("The following nodes are down: %s. nova hypervisor list = "
"%s" % (', '.join(failed_hosts),
', '.join(nodes_from_nova)))
return (404, msg)
@base.scenario(admin_only=False, operator=True)

View File

@ -88,7 +88,7 @@ class TestCase(base.BaseTestCase):
def config(self, **kw):
"""Override config options for a test."""
group = kw.pop('group', None)
for k, v in kw.iteritems():
for k, v in kw.items():
CONF.set_override(k, v, group)
def path_get(self, project_file=None):

View File

@ -12,14 +12,14 @@
import mock
fakeAuthTokenHeaders = {'X-User-Id': u'773a902f022949619b5c2f32cd89d419',
'X-Roles': u'admin, ResellerAdmin, _member_',
'X-Project-Id': u'5588aebbcdc24e17a061595f80574376',
fakeAuthTokenHeaders = {'X-User-Id': '773a902f022949619b5c2f32cd89d419',
'X-Roles': 'admin, ResellerAdmin, _member_',
'X-Project-Id': '5588aebbcdc24e17a061595f80574376',
'X-Project-Name': 'test',
'X-User-Name': 'test',
'X-Auth-Token': u'5588aebbcdc24e17a061595f80574376',
'X-Forwarded-For': u'10.10.10.10, 11.11.11.11',
'X-Service-Catalog': u'{test: 12345}',
'X-Auth-Token': '5588aebbcdc24e17a061595f80574376',
'X-Forwarded-For': '10.10.10.10, 11.11.11.11',
'X-Service-Catalog': '{test: 12345}',
'X-Auth-Url': 'fake_auth_url',
'X-Identity-Status': 'Confirmed',
'X-User-Domain-Name': 'domain',

View File

@ -66,11 +66,11 @@ class TestContextHook(base.TestCase):
class TestNoExceptionTracebackHook(api_base.FunctionalTest):
TRACE = [u'Traceback (most recent call last):',
u' File "/opt/stack/cloudpulse/cloudpulse/openstack',
u'/common/rpc/amqp.py",',
TRACE = ['Traceback (most recent call last):',
' File "/opt/stack/cloudpulse/cloudpulse/openstack',
'/common/rpc/amqp.py",',
' line 434, in _process_data\\n **args)',
u' File "/opt/stack/cloudpulse/cloudpulse/openstack/common/rpc/'
' File "/opt/stack/cloudpulse/cloudpulse/openstack/common/rpc/'
'dispatcher.py", line 172, in dispatch\\n result ='
' getattr(proxyobj, method)(context, **kwargs)']
MSG_WITHOUT_TRACE = "Test exception message."
@ -103,7 +103,7 @@ class TestNoExceptionTracebackHook(api_base.FunctionalTest):
# rare thing (happens due to wrong deserialization settings etc.)
# we don't care about this garbage.
expected_msg = ("Remote error: %s %s"
% (test_exc_type, self.MSG_WITHOUT_TRACE) + "\n[u'")
% (test_exc_type, self.MSG_WITHOUT_TRACE) + "\n['")
actual_msg = json.loads(response.json['error_message'])['faultstring']
self.assertEqual(expected_msg, actual_msg)

View File

@ -37,8 +37,8 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = u'cloudpulse'
copyright = u'2013, OpenStack Foundation'
project = 'cloudpulse'
copyright = '2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
@ -67,8 +67,8 @@ htmlhelp_basename = '%sdoc' % project
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
'%s Documentation' % project,
'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.

View File

@ -13,10 +13,9 @@ classifier =
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
[files]
packages =

View File

@ -11,6 +11,6 @@ python-subunit>=0.0.18
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
oslosphinx>=2.5.0 # Apache-2.0
oslotest>=1.10.0 # Apache-2.0
testrepository>=0.0.18
stestr>=2.0.0
testscenarios>=0.4
testtools>=1.4.0

View File

@ -1,6 +1,6 @@
[tox]
minversion = 2.0
envlist = pep8
minversion = 3.1.1
envlist = py36,pep8
skipsdist = True
[testenv]
@ -16,11 +16,13 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
passenv = TEMPEST_* OS_TEST_*
whitelist_externals = find, stestr
commands =
find . -type f -name "*.py[c|o]" -delete
stestr run {posargs}
[testenv:pep8]
whitelist_externals = bash
commands =
bash tools/flake8wrap.sh {posargs}