Fix incombatilibity with openstacksdk <= 0.17.2

We keep 0.13.0 as minimum supported version of openstacksdk in
lower-constraints.txt, but we've found an issue running with 0.17.2. The
issue is fixed by switching from dict notation to object notation when
accessing properties of calls to openstacksdk as only newer versions of
openstacksdk support dict notation.

Another side of the story is that our lower-constraints job doesn't
really install versions from lower-constraints.txt due to a mistake in
tox.ini. Maybe we could have avoided the aformentioned bug if we had
noticed that before. This is fixed by this commit and that also required
me to bump up the pyroute2 to 0.5.3 in upper-constraints.txt as
apparently 0.5.1 isn't py3-compatible. protobuf is bumped to 3.6.0 as
3.5.2 version apparently wasn't compatible with our own code.

The unit tests are now updated to make sure using dict notation to
access openstacksdk objects will fail on lower-constraints job in the
future.

Change-Id: I5113f7574f4d2e450de95494c9287bb7427e67d4
Closes-Bug: 1830398
This commit is contained in:
Michał Dulko 2019-05-24 17:26:48 +02:00
parent b889b2c1fe
commit 533ab7cff5
5 changed files with 80 additions and 85 deletions

View File

@ -533,7 +533,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
LOG.error('Error when creating %s: %s', resource.resource_key,
response.text)
response.raise_for_status()
return response.json()[resource.resource_key]
response_dict = response.json()[resource.resource_key]
return resource(**response_dict)
def _create_loadbalancer(self, loadbalancer):
request = {
@ -550,15 +551,14 @@ class LBaaSv2Driver(base.LBaaSDriver):
response = self._post_lb_resource(o_lb.LoadBalancer, request)
loadbalancer.id = response['id']
loadbalancer.id = response.id
loadbalancer.port_id = self._get_vip_port(loadbalancer).get("id")
if (loadbalancer.provider is not None and
loadbalancer.provider != response['provider']):
loadbalancer.provider != response.provider):
LOG.error("Request provider(%s) != Response provider(%s)",
loadbalancer.provider,
response['provider'])
loadbalancer.provider, response.provider)
return None
loadbalancer.provider = response['provider']
loadbalancer.provider = response.provider
return loadbalancer
def _find_loadbalancer(self, loadbalancer):
@ -571,10 +571,10 @@ class LBaaSv2Driver(base.LBaaSDriver):
try:
os_lb = next(response) # openstacksdk returns a generator
loadbalancer.id = os_lb['id']
loadbalancer.id = os_lb.id
loadbalancer.port_id = self._get_vip_port(loadbalancer).get("id")
loadbalancer.provider = os_lb['provider']
if os_lb['provisioning_status'] == 'ERROR':
loadbalancer.provider = os_lb.provider
if os_lb.provisioning_status == 'ERROR':
self.release_loadbalancer(loadbalancer)
return None
except (KeyError, StopIteration):
@ -592,7 +592,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
}
self._add_tags('listener', request)
response = self._post_lb_resource(o_lis.Listener, request)
listener.id = response['id']
listener.id = response.id
return listener
def _find_listener(self, listener):
@ -606,7 +606,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
try:
os_listener = next(response)
listener.id = os_listener['id']
listener.id = os_listener.id
except (KeyError, StopIteration):
return None
@ -625,7 +625,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
}
self._add_tags('pool', request)
response = self._post_lb_resource(o_pool.Pool, request)
pool.id = response['id']
pool.id = response.id
return pool
def _find_pool(self, pool, by_listener=True):
@ -639,11 +639,11 @@ class LBaaSv2Driver(base.LBaaSDriver):
try:
if by_listener:
pools = [p for p in response if pool.listener_id
in {l['id'] for l in p['listeners']}]
in {l.id for l in p.listeners}]
else:
pools = [p for p in response if pool.name == p['name']]
pools = [p for p in response if pool.name == p.name]
pool.id = pools[0]['id']
pool.id = pools[0].id
except (KeyError, IndexError):
return None
return pool
@ -662,7 +662,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
self._add_tags('member', request)
response = self._post_lb_resource(o_mem.Member, request,
pool_id=member.pool_id)
member.id = response['id']
member.id = response.id
return member
def _find_member(self, member):
@ -676,7 +676,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
protocol_port=member.port)
try:
member.id = next(response)['id']
member.id = next(response).id
except (KeyError, StopIteration):
return None
@ -722,7 +722,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
return
except (o_exc.ConflictException, o_exc.BadRequestException):
self._wait_for_provisioning(loadbalancer, remaining)
except o_exc.ResourceNotFound:
except o_exc.NotFoundException:
return
raise k_exc.ResourceNotReady(obj)
@ -733,7 +733,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
for remaining in self._provisioning_timer(timeout, interval):
response = lbaas.get_load_balancer(loadbalancer.id)
status = response['provisioning_status']
status = response.provisioning_status
if status == 'ACTIVE':
LOG.debug("Provisioning complete for %(lb)s", {
'lb': loadbalancer})
@ -805,14 +805,14 @@ class LBaaSv2Driver(base.LBaaSDriver):
return None
return obj_lbaas.LBaaSLoadBalancer(
id=response['id'],
port_id=response['vip_port_id'],
name=response['name'],
project_id=response['project_id'],
subnet_id=response['vip_subnet_id'],
ip=response['vip_address'],
id=response.id,
port_id=response.vip_port_id,
name=response.name,
project_id=response.project_id,
subnet_id=response.vip_subnet_id,
ip=response.vip_address,
security_groups=None,
provider=response['provider'])
provider=response.provider)
def get_pool_by_name(self, pool_name, project_id):
lbaas = clients.get_loadbalancer_client()
@ -825,14 +825,14 @@ class LBaaSv2Driver(base.LBaaSDriver):
for entry in pools:
if not entry:
continue
if entry['name'] == pool_name:
listener_id = (entry['listeners'][0]['id'] if
entry['listeners'] else None)
if entry.name == pool_name:
listener_id = (entry.listeners[0].id if
entry.listeners else None)
return obj_lbaas.LBaaSPool(
name=entry['name'], project_id=entry['project_id'],
loadbalancer_id=entry['loadbalancers'][0]['id'],
name=entry.name, project_id=entry.project_id,
loadbalancer_id=entry.loadbalancers[0].id,
listener_id=listener_id,
protocol=entry['protocol'], id=entry['id'])
protocol=entry.protocol, id=entry.id)
return None
def ensure_l7_policy(self, namespace, route_name,
@ -875,7 +875,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
redirect_pool_id=l7_policy.redirect_pool_id,
listener_id=l7_policy.listener_id)
try:
l7_policy.id = next(response)['id']
l7_policy.id = next(response).id
except (KeyError, StopIteration):
return None
return l7_policy
@ -910,7 +910,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
value=l7_rule.value,
compare_type=l7_rule.compare_type)
try:
l7_rule.id = next(response)['id']
l7_rule.id = next(response).id
except (KeyError, StopIteration):
return None
return l7_rule
@ -937,8 +937,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
for entry in l7policy_list:
if not entry:
continue
if (entry['redirect_pool_id'] == pool.id and
entry['id'] != l7policy.id):
if (entry.redirect_pool_id == pool.id and
entry.id != l7policy.id):
return True
return False

View File

@ -308,7 +308,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'vip_address': str(loadbalancer.ip),
'vip_subnet_id': loadbalancer.subnet_id,
}
resp = {'id': loadbalancer_id, 'provider': 'haproxy'}
resp = o_lb.LoadBalancer(id=loadbalancer_id, provider='haproxy')
m_driver._post_lb_resource.return_value = resp
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
@ -336,7 +336,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'vip_subnet_id': loadbalancer.subnet_id,
'provider': loadbalancer.provider,
}
resp = {'id': loadbalancer_id, 'provider': 'amphora'}
resp = o_lb.LoadBalancer(id=loadbalancer_id, provider='amphora')
m_driver._post_lb_resource.return_value = resp
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
@ -364,7 +364,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'vip_subnet_id': loadbalancer.subnet_id,
'provider': loadbalancer.provider,
}
resp = {'id': loadbalancer_id, 'provider': 'haproxy'}
resp = o_lb.LoadBalancer(id=loadbalancer_id, provider='haproxy')
m_driver._post_lb_resource.return_value = resp
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
@ -382,9 +382,8 @@ class TestLBaaSv2Driver(test_base.TestCase):
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
provider='haproxy', security_groups=[])
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
resp = iter([{'id': loadbalancer_id,
'provider': 'haproxy',
'provisioning_status': 'ACTIVE'}])
resp = iter([o_lb.LoadBalancer(id=loadbalancer_id, provider='haproxy',
provisioning_status='ACTIVE')])
lbaas.load_balancers.return_value = resp
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
@ -427,9 +426,8 @@ class TestLBaaSv2Driver(test_base.TestCase):
name='TEST_NAME', project_id='TEST_PROJECT', ip='1.2.3.4',
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1')
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
resp = iter([{'id': loadbalancer_id,
'provider': 'haproxy',
'provisioning_status': 'ERROR'}])
resp = iter([o_lb.LoadBalancer(id=loadbalancer_id, provider='haproxy',
provisioning_status='ERROR')])
lbaas.load_balancers.return_value = resp
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
@ -455,7 +453,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'loadbalancer_id': listener.loadbalancer_id,
'protocol': listener.protocol,
'protocol_port': listener.port}
resp = {'id': listener_id}
resp = o_lis.Listener(id=listener_id)
m_driver._post_lb_resource.return_value = resp
ret = cls._create_listener(m_driver, listener)
@ -473,7 +471,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
port=1234, loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
listener_id = 'A57B7771-6050-4CA8-A63C-443493EC98AB'
lbaas.listeners.return_value = iter([{'id': listener_id}])
lbaas.listeners.return_value = iter([o_lis.Listener(id=listener_id)])
ret = cls._find_listener(m_driver, listener)
lbaas.listeners.assert_called_once_with(
@ -522,7 +520,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'loadbalancer_id': pool.loadbalancer_id,
'protocol': pool.protocol,
'lb_algorithm': lb_algorithm}
resp = {'id': pool_id}
resp = o_pool.Pool(id=pool_id)
m_driver._post_lb_resource.return_value = resp
ret = cls._create_pool(m_driver, pool)
@ -562,8 +560,8 @@ class TestLBaaSv2Driver(test_base.TestCase):
listener_id='A57B7771-6050-4CA8-A63C-443493EC98AB',
loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
pool_id = 'D4F35594-27EB-4F4C-930C-31DD40F53B77'
resp = [{'id': pool_id,
'listeners': [{'id': pool.listener_id}]}]
resp = [o_pool.Pool(id=pool_id,
listeners=[o_lis.Listener(id=pool.listener_id)])]
lbaas.pools.return_value = resp
ret = cls._find_pool(m_driver, pool)
@ -610,7 +608,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
'subnet_id': member.subnet_id,
'address': str(member.ip),
'protocol_port': member.port}
resp = {'id': member_id}
resp = o_mem.Member(id=member_id)
m_driver._post_lb_resource.return_value = resp
ret = cls._create_member(m_driver, member)
@ -630,7 +628,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
port=1234, subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
pool_id='D4F35594-27EB-4F4C-930C-31DD40F53B77')
member_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
resp = iter([{'id': member_id}])
resp = iter([o_mem.Member(id=member_id)])
lbaas.members.return_value = resp
ret = cls._find_member(m_driver, member)
@ -827,7 +825,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
timeout = mock.sentinel.timeout
timer = [mock.sentinel.t0, mock.sentinel.t1]
m_driver._provisioning_timer.return_value = timer
resp = {'provisioning_status': 'ACTIVE'}
resp = o_lb.LoadBalancer(provisioning_status='ACTIVE')
lbaas.get_load_balancer.return_value = resp
cls._wait_for_provisioning(m_driver, loadbalancer, timeout)
@ -842,7 +840,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
timeout = mock.sentinel.timeout
timer = [mock.sentinel.t0, mock.sentinel.t1]
m_driver._provisioning_timer.return_value = timer
resp = {'provisioning_status': 'NOT_ACTIVE'}
resp = o_lb.LoadBalancer(provisioning_status='NOT_ACTIVE')
lbaas.get_load_balancer.return_value = resp
self.assertRaises(k_exc.ResourceNotReady, cls._wait_for_provisioning,
@ -859,7 +857,8 @@ class TestLBaaSv2Driver(test_base.TestCase):
cls = d_lbaasv2.LBaaSv2Driver
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
pools = {'name': 'KUKU', 'id': 'a2a62ea7-e3bf-40df-8c09-aa0c29876a6b'}
pools = o_pool.Pool(name='KUKU',
id='a2a62ea7-e3bf-40df-8c09-aa0c29876a6b')
lbaas.pools.return_value = [pools]
pool_name = 'NOT_KUKU'
project_id = 'TEST_PROJECT'
@ -888,10 +887,9 @@ class TestLBaaSv2Driver(test_base.TestCase):
if listener_is_empty:
resp_listeners = []
else:
resp_listeners = [{"id": pool_listener_id}]
resp_listeners = [o_lis.Listener(id=pool_listener_id)]
listener_id = (resp_listeners[0]['id'] if
resp_listeners else None)
listener_id = (resp_listeners[0].id if resp_listeners else None)
expected_result = obj_lbaas.LBaaSPool(
name=pool_name, project_id=pool_project_id,
loadbalancer_id=pool_lb_id,
@ -899,18 +897,14 @@ class TestLBaaSv2Driver(test_base.TestCase):
protocol=pool_protocol,
id=pool_id)
resp = [{
"protocol": pool_protocol,
"loadbalancers": [
{
"id": pool_lb_id
}
],
"listeners": resp_listeners,
"project_id": pool_project_id,
"id": pool_id,
"name": pool_name
}]
resp = [o_pool.Pool(
protocol=pool_protocol,
loadbalancers=[o_lb.LoadBalancer(id=pool_lb_id)],
listeners=resp_listeners,
project_id=pool_project_id,
id=pool_id,
name=pool_name,
)]
lbaas.pools.return_value = resp
@ -951,13 +945,13 @@ class TestLBaaSv2Driver(test_base.TestCase):
subnet_id=loadbalancer_subnet_id, ip=loadbalancer_vip,
security_groups=None, provider=loadbalancer_provider)
resp = {'id': loadbalancer_id,
'vip_port_id': loadbalancer_vip_port_id,
'name': loadbalancer_name,
'project_id': loadbalancer_project_id,
'vip_subnet_id': loadbalancer_subnet_id,
'vip_address': loadbalancer_vip,
'provider': loadbalancer_provider}
resp = o_lb.LoadBalancer(id=loadbalancer_id,
vip_port_id=loadbalancer_vip_port_id,
name=loadbalancer_name,
project_id=loadbalancer_project_id,
vip_subnet_id=loadbalancer_subnet_id,
vip_address=loadbalancer_vip,
provider=loadbalancer_provider)
lbaas.get_load_balancer.return_value = resp
@ -1067,7 +1061,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
l7policy_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
resp = iter([{'id': l7policy_id}])
resp = iter([o_l7p.L7Policy(id=l7policy_id)])
lbaas.l7_policies.return_value = resp
ret = cls._find_l7_policy(m_driver, l7_policy)
@ -1182,7 +1176,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
value='www.test.com')
l7_rule_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
resp = iter([{'id': l7_rule_id}])
resp = iter([o_l7r.L7Rule(id=l7_rule_id)])
lbaas.l7_rules.return_value = resp
ret = cls._find_l7_rule(m_driver, l7_rule)

View File

@ -87,7 +87,7 @@ pep8==1.5.7
pika==0.10.0
pika-pool==0.1.3
prettytable==0.7.2
protobuf==3.5.2
protobuf==3.6.0
psutil==5.4.3
pycparser==2.18
pyflakes==0.8.1
@ -95,7 +95,7 @@ Pygments==2.2.0
pyinotify==0.9.6
pyparsing==2.2.0
pyperclip==1.6.0
pyroute2==0.5.1
pyroute2==0.5.3
python-dateutil==2.7.0
python-editor==1.0.3
python-keystoneclient==3.15.0

View File

@ -19,9 +19,9 @@ oslo.service!=1.28.1,>=1.24.0 # Apache-2.0
oslo.utils>=3.33.0 # Apache-2.0
os-vif!=1.8.0,>=1.7.0 # Apache-2.0
PrettyTable<0.8,>=0.7.2 # BSD
pyroute2>=0.5.1;sys_platform!='win32' # Apache-2.0 (+ dual licensed GPL2)
pyroute2>=0.5.3;sys_platform!='win32' # Apache-2.0 (+ dual licensed GPL2)
retrying!=1.3.0,>=1.2.3 # Apache-2.0
six>=1.10.0 # MIT
stevedore>=1.20.0 # Apache-2.0
grpcio>=1.12.0 # Apache-2.0
protobuf>=3.5.2 # 3-Clause BSD
protobuf>=3.6.0 # 3-Clause BSD

View File

@ -7,8 +7,9 @@ skipsdist = True
passenv = HOME
setenv = VIRTUAL_ENV={envdir}
usedevelop = True
install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
deps = -r{toxinidir}/requirements.txt
install_command = pip install {opts} {packages}
deps = -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
whitelist_externals = sh
find