Merge "Upgrade pylint to a version that works with python3"

This commit is contained in:
Zuul 2018-10-05 17:14:15 +00:00 committed by Gerrit Code Review
commit 3e579256a3
7 changed files with 36 additions and 34 deletions

View File

@ -2,7 +2,7 @@ alabaster==0.7.10
alembic==0.8.10
amqp==2.1.1
appdirs==1.3.0
astroid==1.3.8
astroid==1.6.5
Babel==2.3.4
bandit==1.1.0
bashate==0.5.1
@ -101,7 +101,7 @@ pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
pylint==1.4.5
pylint==1.9.2
PyMySQL==0.7.6
pyparsing==2.1.0
pyperclip==1.5.27

View File

@ -25,7 +25,7 @@ from oslo_service import loopingcall
from oslo_utils import encodeutils
import requests
import six
import six.moves.urllib.parse as urlparse
from six.moves import urllib
import webob
from neutron._i18n import _
@ -179,7 +179,7 @@ class MetadataProxyHandler(object):
nova_host_port = '%s:%s' % (self.conf.nova_metadata_host,
self.conf.nova_metadata_port)
url = urlparse.urlunsplit((
url = urllib.parse.urlunsplit((
self.conf.nova_metadata_protocol,
nova_host_port,
req.path_info,

View File

@ -22,7 +22,7 @@ from oslo_config import cfg
import oslo_i18n
from oslo_log import log as logging
from oslo_serialization import jsonutils
from six.moves.urllib import parse
from six.moves import urllib
from webob import exc
from neutron._i18n import _
@ -127,7 +127,7 @@ def get_previous_link(request, items, id_key):
params['marker'] = marker
params['page_reverse'] = True
return "%s?%s" % (prepare_url(get_path_url(request)),
parse.urlencode(params))
urllib.parse.urlencode(params))
def get_next_link(request, items, id_key):
@ -138,7 +138,7 @@ def get_next_link(request, items, id_key):
params['marker'] = marker
params.pop('page_reverse', None)
return "%s?%s" % (prepare_url(get_path_url(request)),
parse.urlencode(params))
urllib.parse.urlencode(params))
def prepare_url(orig_url):
@ -147,24 +147,24 @@ def prepare_url(orig_url):
# Copied directly from nova/api/openstack/common.py
if not prefix:
return orig_url
url_parts = list(parse.urlsplit(orig_url))
prefix_parts = list(parse.urlsplit(prefix))
url_parts = list(urllib.parse.urlsplit(orig_url))
prefix_parts = list(urllib.parse.urlsplit(prefix))
url_parts[0:2] = prefix_parts[0:2]
url_parts[2] = prefix_parts[2] + url_parts[2]
return parse.urlunsplit(url_parts).rstrip('/')
return urllib.parse.urlunsplit(url_parts).rstrip('/')
def get_path_url(request):
"""Return correct link if X-Forwarded-Proto exists in headers."""
protocol = request.headers.get('X-Forwarded-Proto')
parsed = parse.urlparse(request.path_url)
parsed = urllib.parse.urlparse(request.path_url)
if protocol and parsed.scheme != protocol:
new_parsed = parse.ParseResult(
new_parsed = urllib.parse.ParseResult(
protocol, parsed.netloc,
parsed.path, parsed.params,
parsed.query, parsed.fragment)
return parse.urlunparse(new_parsed)
return urllib.parse.urlunparse(new_parsed)
else:
return request.path_url

View File

@ -22,7 +22,7 @@ from oslo_config import cfg
from oslo_log import log
import pecan
from pecan import request
import six.moves.urllib.parse as urlparse
from six.moves import urllib
from neutron.api.views import versions as versions_view
from neutron import manager
@ -99,7 +99,7 @@ class V2Controller(object):
layout = []
for name, collection in _CORE_RESOURCES.items():
href = urlparse.urljoin(pecan.request.path_url, collection)
href = urllib.parse.urljoin(pecan.request.path_url, collection)
resource = {'name': name,
'collection': collection,
'links': [{'rel': 'self',

View File

@ -30,7 +30,7 @@ from oslo_db import exception as db_exc
from oslo_policy import policy as oslo_policy
from oslo_utils import uuidutils
import six
import six.moves.urllib.parse as urlparse
from six.moves import urllib
import webob
from webob import exc
import webtest
@ -592,16 +592,16 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
self.assertEqual(1, len(next_links))
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(next_links[0]['href'])
url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id2]
self.assertEqual(params, urlparse.parse_qs(url.query))
self.assertEqual(params, urllib.parse.parse_qs(url.query))
url = urlparse.urlparse(previous_links[0]['href'])
url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id1]
params['page_reverse'] = ['True']
self.assertEqual(params, urlparse.parse_qs(url.query))
self.assertEqual(params, urllib.parse.parse_qs(url.query))
def test_list_pagination_with_last_page(self):
id = str(_uuid())
@ -631,12 +631,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
previous_links.append(r)
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(previous_links[0]['href'])
url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
expect_params['marker'] = [id]
expect_params['page_reverse'] = ['True']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_list_pagination_with_empty_page(self):
return_value = []
@ -657,12 +657,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
previous_links.append(r)
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(previous_links[0]['href'])
url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
expect_params['page_reverse'] = ['True']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_list_pagination_reverse_with_last_page(self):
id = str(_uuid())
@ -693,13 +693,13 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
next_links.append(r)
self.assertEqual(1, len(next_links))
url = urlparse.urlparse(next_links[0]['href'])
url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expected_params = params.copy()
del expected_params['page_reverse']
expected_params['marker'] = [id]
self.assertEqual(expected_params,
urlparse.parse_qs(url.query))
urllib.parse.parse_qs(url.query))
def test_list_pagination_reverse_with_empty_page(self):
return_value = []
@ -720,12 +720,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
next_links.append(r)
self.assertEqual(1, len(next_links))
url = urlparse.urlparse(next_links[0]['href'])
url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
del expect_params['page_reverse']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_create(self):
net_id = _uuid()

View File

@ -20,7 +20,7 @@ import ssl
import mock
from neutron_lib import exceptions as exception
from oslo_config import cfg
import six.moves.urllib.request as urlrequest
from six.moves import urllib
import testtools
import webob
import webob.exc
@ -43,12 +43,13 @@ def open_no_proxy(*args, **kwargs):
# introduced in python 2.7.9 under PEP-0476
# https://github.com/python/peps/blob/master/pep-0476.txt
if hasattr(ssl, "_create_unverified_context"):
opener = urlrequest.build_opener(
urlrequest.ProxyHandler({}),
urlrequest.HTTPSHandler(context=ssl._create_unverified_context())
opener = urllib.request.build_opener(
urllib.request.ProxyHandler({}),
urllib.request.HTTPSHandler(
context=ssl._create_unverified_context())
)
else:
opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
return opener.open(*args, **kwargs)

View File

@ -18,7 +18,8 @@ oslotest>=3.2.0 # Apache-2.0
stestr>=1.0.0 # Apache-2.0
reno>=2.5.0 # Apache-2.0
ddt>=1.0.1 # MIT
pylint==1.4.5 # GPLv2
astroid==1.6.5 # LGPLv2.1
pylint==1.9.2 # GPLv2
# Needed to run DB commands in virtualenvs
PyMySQL>=0.7.6 # MIT License
bashate>=0.5.1 # Apache-2.0