Cleanup py27 support

This repo is now testing only with Python 3, so let's make
a few cleanups:
- Remove python 2.7 stanza from setup.py
- Add requires on python >= 3.6 to setup.cfg so that pypi and pip
  know about the requirement
- Remove obsolete sections from setup.cfg
- Update classifiers
- Remove install_command from tox.ini, the default is fine
- Remove Babel from requirements, it's not needed for running.
- Remove unused babel.cfg and obsolete openstack-common.conf
- Use TOX_CONSTRAINTS_FILE instead of obsolete UPPER_CONSTRAINTS_FILE.
- Remove six usage
- update to hacking 3.1.0, fix problems found.
- Remove hacking and friends from lower-constraints, they are
  blacklisted

Change-Id: I6321b612f37bca7b441171814b059eaad03d9f24
changes/01/751301/1
Andreas Jaeger 3 years ago committed by Andreas Jaeger
parent 4f43c0a254
commit e3ffcc084d

@ -1,2 +0,0 @@
[python: **.py]

@ -2,7 +2,6 @@ alabaster==0.7.10
alembic==0.8.10
amqp==2.1.1
appdirs==1.3.0
Babel==2.3.4
beautifulsoup4==4.6.0
cachetools==2.0.0
cffi==1.7.0
@ -20,12 +19,9 @@ eventlet==0.18.2
extras==1.0.0
fasteners==0.7.0
fixtures==3.0.0
flake8==2.5.5
flake8-import-order==0.12
future==0.16.0
futurist==1.2.0
greenlet==0.4.10
hacking==0.12.0
httplib2==0.9.1
imagesize==0.7.1
iso8601==0.1.11
@ -83,7 +79,6 @@ Paste==2.0.2
PasteDeploy==1.5.0
pbr==2.0.0
pecan==1.0.0
pep8==1.5.7
pika==0.10.0
pika-pool==0.1.3
positional==1.2.1
@ -91,9 +86,7 @@ prettytable==0.7.2
psutil==3.2.2
psycopg2==2.7.5
pycadf==1.1.0
pycodestyle==2.3.1
pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
PyMySQL==0.7.6
@ -118,7 +111,6 @@ rfc3986==0.3.1
Routes==2.3.1
ryu==4.14
simplejson==3.5.1
six==1.10.0
snowballstemmer==1.2.1
SQLAlchemy==1.2.0
sqlalchemy-migrate==0.11.0

@ -18,6 +18,6 @@ eventlet.monkey_patch()
# Monkey patch the original current_thread to use the up-to-date _active
# global variable. See https://bugs.launchpad.net/bugs/1863021 and
# https://github.com/eventlet/eventlet/issues/592
import __original_module_threading as orig_threading
import __original_module_threading as orig_threading # noqa
import threading # noqa
orig_threading.current_thread.__globals__['_active'] = threading._active

@ -144,41 +144,41 @@ class L2GatewayMixin(l2gateway.L2GatewayPluginBase,
tenant_id = self._get_tenant_id_for_create(context, gw)
devices = gw['devices']
with context.session.begin(subtransactions=True):
gw_db = models.L2Gateway(
id=gw.get('id', uuidutils.generate_uuid()),
tenant_id=tenant_id,
name=gw.get('name'))
context.session.add(gw_db)
l2gw_device_dict = {}
for device in devices:
l2gw_device_dict['l2_gateway_id'] = id
device_name = device['device_name']
l2gw_device_dict['device_name'] = device_name
l2gw_device_dict['id'] = uuidutils.generate_uuid()
uuid = self._generate_uuid()
dev_db = models.L2GatewayDevice(id=uuid,
l2_gateway_id=gw_db.id,
device_name=device_name)
context.session.add(dev_db)
for interface_list in device['interfaces']:
int_name = interface_list.get('name')
if constants.SEG_ID in interface_list:
seg_id_list = interface_list.get(constants.SEG_ID)
for seg_ids in seg_id_list:
uuid = self._generate_uuid()
interface_db = self._get_int_model(uuid,
int_name,
dev_db.id,
seg_ids)
context.session.add(interface_db)
else:
gw_db = models.L2Gateway(
id=gw.get('id', uuidutils.generate_uuid()),
tenant_id=tenant_id,
name=gw.get('name'))
context.session.add(gw_db)
l2gw_device_dict = {}
for device in devices:
l2gw_device_dict['l2_gateway_id'] = id
device_name = device['device_name']
l2gw_device_dict['device_name'] = device_name
l2gw_device_dict['id'] = uuidutils.generate_uuid()
uuid = self._generate_uuid()
dev_db = models.L2GatewayDevice(id=uuid,
l2_gateway_id=gw_db.id,
device_name=device_name)
context.session.add(dev_db)
for interface_list in device['interfaces']:
int_name = interface_list.get('name')
if constants.SEG_ID in interface_list:
seg_id_list = interface_list.get(constants.SEG_ID)
for seg_ids in seg_id_list:
uuid = self._generate_uuid()
interface_db = self._get_int_model(uuid,
int_name,
dev_db.id,
0)
seg_ids)
context.session.add(interface_db)
context.session.query(models.L2GatewayDevice).all()
else:
uuid = self._generate_uuid()
interface_db = self._get_int_model(uuid,
int_name,
dev_db.id,
0)
context.session.add(interface_db)
context.session.query(models.L2GatewayDevice).all()
return self._make_l2_gateway_dict(gw_db)
def update_l2_gateway(self, context, id, l2_gateway):

@ -72,33 +72,33 @@ def add_known_arguments(self, parser):
def args2body(self, parsed_args):
if parsed_args.devices:
devices = parsed_args.devices
interfaces = []
else:
devices = []
device_dict = []
for device in devices:
if 'interface_names' in device.keys():
interface = device['interface_names']
if INTERFACE_DELIMITER in interface:
interface_dict = interface.split(INTERFACE_DELIMITER)
interfaces = get_interface(interface_dict)
else:
interfaces = get_interface([interface])
if 'name' in device.keys():
device = {'device_name': device['name'],
'interfaces': interfaces}
if parsed_args.devices:
devices = parsed_args.devices
interfaces = []
else:
devices = []
device_dict = []
for device in devices:
if 'interface_names' in device.keys():
interface = device['interface_names']
if INTERFACE_DELIMITER in interface:
interface_dict = interface.split(INTERFACE_DELIMITER)
interfaces = get_interface(interface_dict)
else:
device = {'interfaces': interfaces}
device_dict.append(device)
if parsed_args.name:
l2gw_name = parsed_args.name
body = {'l2_gateway': {'name': l2gw_name,
'devices': device_dict}, }
interfaces = get_interface([interface])
if 'name' in device.keys():
device = {'device_name': device['name'],
'interfaces': interfaces}
else:
body = {'l2_gateway': {'devices': device_dict}, }
return body
device = {'interfaces': interfaces}
device_dict.append(device)
if parsed_args.name:
l2gw_name = parsed_args.name
body = {'l2_gateway': {'name': l2gw_name,
'devices': device_dict}, }
else:
body = {'l2_gateway': {'devices': device_dict}, }
return body
class L2GatewayCreate(extension.ClientExtensionCreate, L2Gateway):

@ -83,33 +83,33 @@ def get_interface(interfaces):
def _args2body(parsed_args, update=False):
if parsed_args.devices:
devices = parsed_args.devices
interfaces = []
else:
devices = []
device_dict = []
for device in devices:
if 'interface_names' in device.keys():
interface = device['interface_names']
if INTERFACE_DELIMITER in interface:
interface_dict = interface.split(INTERFACE_DELIMITER)
interfaces = get_interface(interface_dict)
else:
interfaces = get_interface([interface])
if 'name' in device.keys():
device = {'device_name': device['name'],
'interfaces': interfaces}
if parsed_args.devices:
devices = parsed_args.devices
interfaces = []
else:
devices = []
device_dict = []
for device in devices:
if 'interface_names' in device.keys():
interface = device['interface_names']
if INTERFACE_DELIMITER in interface:
interface_dict = interface.split(INTERFACE_DELIMITER)
interfaces = get_interface(interface_dict)
else:
device = {'interfaces': interfaces}
device_dict.append(device)
if parsed_args.name:
l2gw_name = parsed_args.name
body = {L2_GATEWAY: {'name': l2gw_name,
'devices': device_dict}, }
interfaces = get_interface([interface])
if 'name' in device.keys():
device = {'device_name': device['name'],
'interfaces': interfaces}
else:
body = {L2_GATEWAY: {'devices': device_dict}, }
return body
device = {'interfaces': interfaces}
device_dict.append(device)
if parsed_args.name:
l2gw_name = parsed_args.name
body = {L2_GATEWAY: {'name': l2gw_name,
'devices': device_dict}, }
else:
body = {L2_GATEWAY: {'devices': device_dict}, }
return body
class CreateL2gw(command.ShowOne):

@ -14,11 +14,8 @@
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class API(object):
class API(object, metaclass=abc.ABCMeta):
def __init__(self, context):
self.context = context

@ -24,7 +24,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
import six
from networking_l2gw.services.l2gateway.common import constants as n_const
@ -225,7 +224,7 @@ class BaseConnection(object):
rc += 1
if rc > lc:
raise Exception("json string not valid")
elif lc == rc and lc is not 0:
elif lc == rc and lc != 0:
chunks.append(response[message_mark:i + 1])
message = "".join(chunks)
eventlet.greenthread.spawn_n(
@ -269,7 +268,7 @@ class BaseConnection(object):
while retry_count <= n_const.MAX_RETRIES:
try:
data = jsonutils.dumps(message)
if not isinstance(data, six.binary_type):
if not isinstance(data, bytes):
data = data.encode()
if self.enable_manager:
bytes_sent = self.ovsdb_dicts.get(addr).send(data)

@ -97,37 +97,37 @@ class OVSDBMonitor(base_connection.BaseConnection):
def set_monitor_response_handler(self, addr=None):
"""Monitor OVSDB tables to receive events for any changes in OVSDB."""
if self.connected:
op_id = str(random.getrandbits(128))
props = {'select': {'initial': True,
'insert': True,
'delete': True,
'modify': True}}
monitor_message = {'id': op_id,
'method': 'monitor',
'params': [n_const.OVSDB_SCHEMA_NAME,
None,
{'Logical_Switch': [props],
'Physical_Switch': [props],
'Physical_Port': [props],
'Ucast_Macs_Local': [props],
'Ucast_Macs_Remote': [props],
'Physical_Locator': [props],
'Mcast_Macs_Local': [props],
'Physical_Locator_Set': [props]}
]}
self._set_handler("update", self._update_event_handler)
if not self.send(monitor_message, addr=addr):
# Return so that this will retried in the next iteration
return
try:
response_result = self._process_response(op_id)
except exceptions.OVSDBError:
with excutils.save_and_reraise_exception():
if self.enable_manager:
self.check_monitor_table_thread = False
LOG.exception("Exception while receiving the "
"response for the monitor message")
self._process_monitor_msg(response_result, addr)
op_id = str(random.getrandbits(128))
props = {'select': {'initial': True,
'insert': True,
'delete': True,
'modify': True}}
monitor_message = {'id': op_id,
'method': 'monitor',
'params': [n_const.OVSDB_SCHEMA_NAME,
None,
{'Logical_Switch': [props],
'Physical_Switch': [props],
'Physical_Port': [props],
'Ucast_Macs_Local': [props],
'Ucast_Macs_Remote': [props],
'Physical_Locator': [props],
'Mcast_Macs_Local': [props],
'Physical_Locator_Set': [props]}
]}
self._set_handler("update", self._update_event_handler)
if not self.send(monitor_message, addr=addr):
# Return so that this will retried in the next iteration
return
try:
response_result = self._process_response(op_id)
except exceptions.OVSDBError:
with excutils.save_and_reraise_exception():
if self.enable_manager:
self.check_monitor_table_thread = False
LOG.exception("Exception while receiving the "
"response for the monitor message")
self._process_monitor_msg(response_result, addr)
def _update_event_handler(self, message, addr):
self._process_update_event(message, addr)
@ -232,7 +232,7 @@ class OVSDBMonitor(base_connection.BaseConnection):
rc += 1
if rc > lc:
raise Exception("json string not valid")
elif lc == rc and lc is not 0:
elif lc == rc and lc != 0:
chunks.append(response[message_mark:i + 1])
message = "".join(chunks)
eventlet.greenthread.spawn_n(

@ -251,7 +251,7 @@ class OVSDBWriter(base_connection.BaseConnection):
elif c == '}' and not (prev_char and
prev_char == '\\'):
rc += 1
if lc == rc and lc is not 0:
if lc == rc and lc != 0:
chunks.append(response[0:i + 1])
message = "".join(chunks)
return message

@ -515,7 +515,7 @@ class OVSDBData(object):
logical_switches = db.get_all_logical_switches_by_ovsdb_id(
context, self.ovsdb_identifier)
for logical_switch in logical_switches:
logical_switch_ids.add(logical_switch.get('uuid'))
logical_switch_ids.add(logical_switch.get('uuid'))
return list(logical_switch_ids)
def _get_agent_ips(self, context):

@ -15,11 +15,8 @@
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class L2gwDriverBase(object):
class L2gwDriverBase(object, metaclass=abc.ABCMeta):
def __init__(self, service_plugin, validator=None):
self.service_plugin = service_plugin
@ -101,8 +98,7 @@ class L2gwDriverBase(object):
pass
@six.add_metaclass(abc.ABCMeta)
class L2gwDriver(L2gwDriverBase):
class L2gwDriver(L2gwDriverBase, metaclass=abc.ABCMeta):
def __init__(self, service_plugin, validator=None):
super(L2gwDriver, self).__init__(service_plugin)

@ -39,7 +39,6 @@ from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import importutils
import six
LOG = logging.getLogger(__name__)
@ -48,8 +47,7 @@ L2GW_CALLBACK = ("networking_l2gw.services.l2gateway.ovsdb."
"data.L2GatewayOVSDBCallbacks")
@six.add_metaclass(abc.ABCMeta)
class L2gwRpcDriver(service_drivers.L2gwDriver):
class L2gwRpcDriver(service_drivers.L2gwDriver, metaclass=abc.ABCMeta):
"""L2gw RPC Service Driver class."""
def __init__(self, service_plugin, validator=None):

@ -452,11 +452,11 @@ class TestOVSDBWriter(base.BaseTestCase):
base_test.FakeDecodeClass(
jsonutils.dumps(fake_data)))
with mock.patch.object(socket, 'socket', return_value=fake_socket):
ovsdb_conf = base_test.FakeConf()
l2gw_obj = ovsdb_writer.OVSDBWriter(
cfg.CONF.ovsdb, ovsdb_conf)
result = l2gw_obj._recv_data(mock.ANY)
self.assertEqual(jsonutils.dumps(fake_data), result)
ovsdb_conf = base_test.FakeConf()
l2gw_obj = ovsdb_writer.OVSDBWriter(
cfg.CONF.ovsdb, ovsdb_conf)
result = l2gw_obj._recv_data(mock.ANY)
self.assertEqual(jsonutils.dumps(fake_data), result)
def test_recv_data_with_empty_data(self):
"""Test case to test _recv_data with empty data."""

@ -1,6 +0,0 @@
[DEFAULT]
# The list of modules to copy from oslo-incubator.git
# The base module to hold the copy of openstack.common
base=networking_l2gw

@ -3,7 +3,6 @@
# process, which may cause wedges in the gate later.
pbr!=2.1.0,>=2.0.0 # Apache-2.0
Babel!=2.4.0,>=2.3.4 # BSD
neutron-lib>=1.29.0 # Apache-2.0
python-neutronclient>=6.7.0 # Apache-2.0
ovsdbapp>=0.10.0 # Apache-2.0

@ -1,11 +1,12 @@
[metadata]
name = networking-l2gw
summary = API's and implementations to support L2 Gateways in Neutron.
summary = APIs and implementations to support L2 Gateways in Neutron.
description-file =
README.rst
author = OpenStack
author-email = openstack-discuss@lists.openstack.org
home-page = https://opendev.org/openstack/networking-l2gw
python-requires = >=3.6
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
@ -13,10 +14,12 @@ classifier =
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
[files]
packages =
@ -46,25 +49,3 @@ openstack.neutronclient.v2 =
l2gw_connection_delete = networking_l2gw.l2gatewayclient.osc.l2gw_connection:DeleteL2gwConnection
neutron.service_plugins =
l2gw = networking_l2gw.services.l2gateway.plugin:L2GatewayPlugin
[build_sphinx]
source-dir = doc/source
build-dir = doc/build
all_files = 1
[upload_sphinx]
upload-dir = doc/build/html
[compile_catalog]
directory = networking_l2gw/locale
domain = networking_l2gw
[update_catalog]
domain = networking_l2gw
output_dir = networking_l2gw/locale
input_file = networking_l2gw/locale/networking_l2gw.pot
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = networking_l2gw/locale/networking_l2gw.pot

@ -13,17 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)

@ -2,7 +2,7 @@
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
hacking>=3.1.0,<=3.2.0 # Apache-2.0
coverage!=4.4,>=4.0 # Apache-2.0
flake8-import-order==0.12 # LGPLv3

@ -1,7 +1,8 @@
[tox]
envlist = py36,py37,pep8
minversion = 2.0
minversion = 3.2.0
skipsdist = True
ignore_basepython_conflict=true
[testenv]
basepython = python3
@ -9,8 +10,7 @@ setenv =
VIRTUAL_ENV={envdir}
PYTHONWARNINGS=default::DeprecationWarning
usedevelop = True
install_command = pip install {opts} {packages}
deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
whitelist_externals = *
@ -30,7 +30,7 @@ commands = {posargs}
commands = python setup.py testr --coverage --testr-args='{posargs}'
[testenv:docs]
deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r{toxinidir}/doc/requirements.txt
commands = sphinx-build -W -d doc/build/doctrees -b html doc/source doc/build/html
@ -48,8 +48,9 @@ commands =
# H302 import only modules
# E123 skipped as they are invalid PEP-8.
# H904 Wrap long lines in parentheses instead of a backslash
# W504 line break after binary operator
show-source = True
ignore = E123,H803,H302,H904
ignore = E123,H803,H302,H904,W504
builtins = _
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,.tmp
import-order-style = pep8

Loading…
Cancel
Save