Update DragonFlow tests to run with Python 3

As Python 2.7 will soon reach the end of life, we should switch to
Python 3.
This updates the tests to run with that version.

Change-Id: I1014769881786c62d9d6aabf21ca7858a6deb882
This commit is contained in:
Shachar Snapiri 2019-05-12 21:38:44 +03:00
parent d622410892
commit a9fccaa38c
18 changed files with 48 additions and 32 deletions

View File

@ -143,6 +143,8 @@ class EtcdDbDriver(db_api.DbApi):
for entry in directory:
value = entry[0]
if value:
if not six.PY2:
value = value.decode("utf-8")
res.append(value)
return res

View File

@ -34,7 +34,7 @@ def key2slot(key):
end = k.find('}', start + 1)
if end > -1 and end != start + 1:
k = k[start + 1:end]
return crc16.crc16xmodem(k.encode('UTF-8')) % REDIS_NSLOTS
return crc16.crc16xmodem(k.encode('utf-8')) % REDIS_NSLOTS
class Node(object):
@ -47,8 +47,10 @@ class Node(object):
@property
def client(self):
if self._client is None:
decode = not six.PY2
self._client = redis_client.StrictRedis(host=self.ip,
port=self.port)
port=self.port,
decode_responses=decode)
return self._client
@property
@ -128,7 +130,7 @@ class Cluster(object):
class RedisDbDriver(db_api.DbApi):
def __init__(self, *args, **kwargs):
super(RedisDbDriver, self).__init__(*args, **kwargs)
self._table_strip_re = re.compile(b'^{.+}(.+)$')
self._table_strip_re = re.compile('^{.+}(.+)$')
self.config = cfg.CONF.df_redis
self.BATCH_KEY_AMOUNT = self.config.batch_amount
self.RETRY_COUNT = self.config.retries
@ -347,7 +349,7 @@ class RedisDbDriver(db_api.DbApi):
# ask a specific node
pattern = self._key_name(table, topic, key or '*')
nodes = (self._cluster.get_node(pattern), )
return (pattern, nodes)
return pattern, nodes
def _scan(self, table, key=None, topic=None):
(pattern, nodes) = self._query_info(table, topic, key)

View File

@ -146,7 +146,7 @@ class RedisMgt(object):
return new_nodes
def _get_cluster_info(self, node):
raw = node.execute_command('cluster info')
raw = node.execute_command('cluster info').decode('utf-8', 'ignore')
def _split(line):
k, v = line.split(':')
@ -157,7 +157,7 @@ class RedisMgt(object):
[_split(line) for line in raw.split('\r\n') if line]}
def _get_cluster_nodes(self, node):
raw = node.execute_command('cluster nodes')
raw = node.execute_command('cluster nodes').decode('utf-8', 'ignore')
ret = {}
for line in raw.split('\n'):
@ -373,7 +373,7 @@ class RedisMgt(object):
update = db_common.DbUpdate('ha', 'nodes',
'set', nodes_json,
topic=topic)
topic = topic.encode('utf8')
topic = topic.encode('utf8', 'ignore')
data = pub_sub_api.pack_message(update.to_dict())
self.publisher._send_event(data, topic)

View File

@ -139,7 +139,7 @@ class PublisherAgentBase(PublisherApi):
if topic is None:
topic = update.topic or db_common.SEND_ALL_TOPIC
topic = topic.encode('utf8')
topic = topic.encode('utf8', 'ignore')
LOG.debug("Sending %s to %s", update, topic)

View File

@ -105,13 +105,13 @@ class ZMQSubscriberAgentBase(pub_sub_api.SubscriberAgentBase):
self.sub_socket.disconnect(uri)
def register_topic(self, topic):
topic = topic.encode('ascii', 'ignore')
topic = topic.encode('utf-8', 'ignore')
is_new = super(ZMQSubscriberAgentBase, self).register_topic(topic)
if is_new and self.sub_socket:
self.sub_socket.setsockopt(zmq.SUBSCRIBE, topic)
def unregister_topic(self, topic):
topic = topic.encode('ascii', 'ignore')
topic = topic.encode('utf-8', 'ignore')
super(ZMQSubscriberAgentBase, self).unregister_topic(topic)
if self.sub_socket:
self.sub_socket.setsockopt(zmq.UNSUBSCRIBE, topic)

View File

@ -347,7 +347,7 @@ def load_datapath_from_file_stream(stream):
def load_datapath_from_file_name(dppath):
with open(dppath, 'r') as f:
with open(dppath, 'rb') as f:
return load_datapath_from_file_stream(f)

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import collections
import fcntl
import os
@ -93,7 +94,8 @@ def delete_tap_device(dev):
def packet_raw_data_to_hex(buf):
return str(buf).encode('hex')
return binascii.hexlify(str(buf).encode('utf-8', 'ignore')
).decode('utf-8', 'ignore')
class Topology(object):
@ -428,7 +430,7 @@ class LogicalPortTap(object):
if isinstance(buf, bytearray):
buf = bytes(buf)
elif isinstance(buf, six.string_types):
buf = buf.encode('utf-8')
buf = buf.encode('utf-8', 'ignore')
if self.is_blocking:
# Takes string and read-only bytes-like objects

View File

@ -67,11 +67,11 @@ class TestDHCPApp(test_base.DFTestBase):
options = [
os_ken.lib.packet.dhcp.option(
os_ken.lib.packet.dhcp.DHCP_MESSAGE_TYPE_OPT,
chr(os_ken.lib.packet.dhcp.DHCP_DISCOVER),
bytes((os_ken.lib.packet.dhcp.DHCP_DISCOVER,)),
),
os_ken.lib.packet.dhcp.option(
os_ken.lib.packet.dhcp.DHCP_PARAMETER_REQUEST_LIST_OPT,
chr(os_ken.lib.packet.dhcp.DHCP_GATEWAY_ADDR_OPT),
bytes((os_ken.lib.packet.dhcp.DHCP_GATEWAY_ADDR_OPT,)),
),
]
dhcp = os_ken.lib.packet.dhcp.dhcp(
@ -113,15 +113,15 @@ class TestDHCPApp(test_base.DFTestBase):
options = [
os_ken.lib.packet.dhcp.option(
os_ken.lib.packet.dhcp.DHCP_MESSAGE_TYPE_OPT,
chr(os_ken.lib.packet.dhcp.DHCP_REQUEST),
bytes((os_ken.lib.packet.dhcp.DHCP_REQUEST,)),
),
os_ken.lib.packet.dhcp.option(
os_ken.lib.packet.dhcp.DHCP_REQUESTED_IP_ADDR_OPT,
offer.yiaddr,
offer.yiaddr.encode(),
),
os_ken.lib.packet.dhcp.option(
os_ken.lib.packet.dhcp.DHCP_PARAMETER_REQUEST_LIST_OPT,
chr(os_ken.lib.packet.dhcp.DHCP_GATEWAY_ADDR_OPT),
bytes((os_ken.lib.packet.dhcp.DHCP_GATEWAY_ADDR_OPT,)),
),
]
dhcp = os_ken.lib.packet.dhcp.dhcp(

View File

@ -157,4 +157,5 @@ class DFTestBase(base.BaseTestCase):
def _create_random_string(self, length=16):
alphabet = string.printable
return ''.join([random.choice(alphabet) for _ in range(length)])
ret = ''.join([random.choice(alphabet) for _ in range(length)])
return ret.encode('utf-8', 'ignore')

View File

@ -11,10 +11,10 @@
# under the License.
import re
from six.moves import configparser
from oslo_config import cfg
import ConfigParser
from dragonflow.common import utils as df_utils
from dragonflow.controller.common import constants as const
from dragonflow.tests.common import utils
@ -394,7 +394,7 @@ class TestL2FLows(test_base.DFTestBase):
readhandle = None
value = None
try:
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
readhandle = open(ML2_CONF_INI, 'r')
config.readfp(readhandle)
value = config.get(section, key)

View File

@ -39,7 +39,9 @@ class TestOvsdbMonitor(test_base.DFTestBase):
return False
elif _interface.lport is None:
return False
elif _interface.port_num <= 0:
# port_num may be None if it does not exist in the json
# This happens before the port is bound
elif _interface.port_num is None or _interface.port_num <= 0:
return False
elif _interface.admin_state != "up":
return False

View File

@ -486,7 +486,7 @@ class TestFcApp(SfcTestsCommonBase):
@property
def _initial_packet(self):
payload = '0' * 64
payload = (b'0' * 64)
if self.pkt_proto == constants.PROTO_NAME_TCP:
tp = self._gen_tcp()
@ -574,13 +574,13 @@ class TestSfcApp(SfcTestsCommonBase):
self._gen_ethernet() /
self._gen_ipv4(proto=inet.IPPROTO_UDP) /
self._gen_udp(src_port=SRC_PORT, dst_port=DST_PORT) /
('0' * 64)
(b'0' * 64)
)
final_packet = self._get_bytes(
self._gen_ethernet() /
self._gen_ipv4(proto=inet.IPPROTO_UDP) /
self._gen_udp(src_port=SRC_PORT, dst_port=DST_PORT) /
('{len}'.format(len=len(self.layout)) * 64)
(bytes((len(self.layout) + ord('0'),)) * 64)
)
fc = objects.FlowClassifierTestObj(self.neutron, self.nb_api)
self.addCleanup(fc.close)

View File

@ -128,8 +128,8 @@ class TestRedisDB(tests_base.BaseTestCase):
'DEL', '{table.topic}key')
def test_get_all_keys_topic(self):
expected = [b'key1', b'key2', b'key3']
keys_response = [b'{table.topic}' + key for key in expected]
expected = ['key1', 'key2', 'key3']
keys_response = ['{table.topic}' + key for key in expected]
self.RedisDbDriver._cluster = mock.Mock()
node = mock.Mock()
self.RedisDbDriver._cluster.get_node.return_value = node
@ -139,14 +139,14 @@ class TestRedisDB(tests_base.BaseTestCase):
def test_get_all_keys_notopic(self):
nodes_keys = (
[b'key1', b'key2', b'key3'],
[b'key3', b'key4', b'key5'],
['key1', 'key2', 'key3'],
['key3', 'key4', 'key5'],
)
expected = set()
nodes = []
for node_keys in nodes_keys:
expected.update(node_keys)
keys_response = [b'{table.topic}' + key for key in node_keys]
keys_response = ['{table.topic}' + key for key in node_keys]
node = mock.Mock()
node.client.scan.return_value = (0, keys_response)
nodes.append(node)

View File

@ -52,7 +52,8 @@ class TestZMQPubSub(tests_base.BaseTestCase):
log_debug.assert_called()
args = self.ZMQPublisherAgent.socket.send_multipart.call_args
self.ZMQPublisherAgent.socket.send_multipart.assert_called_once()
self.assertEqual(db_common.SEND_ALL_TOPIC.encode('utf-8'),
self.assertEqual(db_common.SEND_ALL_TOPIC.encode('utf-8',
'ignore'),
args[0][0][0])
self.assertIsNone(result)

View File

@ -18,7 +18,7 @@ commands = /bin/rm -f .testrepository/times.dbm
stestr run {posargs}
[testenv:fullstack]
basepython = python2.7
basepython = python3
setenv = OS_TEST_PATH=./dragonflow/tests/fullstack
commands =
ostestr --serial -c 0 {posargs}

View File

@ -18,6 +18,8 @@ HOST_IP=$ipaddress
HOSTNAME=$(hostname)
SERVICE_HOST_NAME=\${HOSTNAME}
SERVICE_HOST=$ipaddress
USE_PYTHON3=True
DEVSTACKEOF
~/devstack/stack.sh

View File

@ -16,6 +16,8 @@ cat << DEVSTACKEOF >> devstack/local.conf
# Set this to the address of the main DevStack host running the rest of the
# OpenStack services.
HOSTNAME=$(hostname)
USE_PYTHON3=True
DEVSTACKEOF
~/devstack/stack.sh

View File

@ -21,6 +21,8 @@ FIXED_RANGE=10.100.100.0/24
HOSTNAME=$(hostname)
SERVICE_HOST_NAME=${HOSTNAME}
SERVICE_HOST=$ipaddress
USE_PYTHON3=True
DEVSTACKEOF
~/devstack/stack.sh