Merge "Fix operating status for empty UDP pools" into stable/stein

This commit is contained in:
Zuul 2021-01-15 10:47:10 +00:00 committed by Gerrit Code Review
commit bfe226fe62
6 changed files with 79 additions and 18 deletions

View File

@ -233,7 +233,7 @@ def get_udp_listener_pool_status(listener_id):
elif 'Members' not in resource_ipport_mapping: elif 'Members' not in resource_ipport_mapping:
return {'lvs': { return {'lvs': {
'uuid': resource_ipport_mapping['Pool']['id'], 'uuid': resource_ipport_mapping['Pool']['id'],
'status': constants.DOWN, 'status': constants.UP,
'members': {} 'members': {}
}} }}

View File

@ -65,13 +65,7 @@ MISC_CHECK {
{% endmacro %} {% endmacro %}
{% macro virtualserver_macro(constants, listener, lb_vip_address, default_pool) %} {% macro virtualserver_macro(constants, listener, lb_vip_address, default_pool) %}
{% set need_render = [] %} {% if default_pool %}
{% if default_pool and default_pool.enabled and default_pool.members %}
{% for member in default_pool.members %}
{% do need_render.append(member.enabled) %}
{% endfor %}
{% endif %}
{% if need_render|length > 0 %}
virtual_server {{ lb_vip_address }} {{ listener.protocol_port }} { virtual_server {{ lb_vip_address }} {{ listener.protocol_port }} {
{{ lb_algo_macro(default_pool) }} {{ lb_algo_macro(default_pool) }}
lb_kind NAT lb_kind NAT
@ -93,7 +87,11 @@ virtual_server {{ lb_vip_address }} {{ listener.protocol_port }} {
{{ health_monitor_vs_macro(default_pool) }} {{ health_monitor_vs_macro(default_pool) }}
{% if default_pool.protocol.lower() == "udp" %} {% if default_pool.protocol.lower() == "udp" %}
{% if default_pool.enabled %}
# Configuration for Pool {{ default_pool.id }} # Configuration for Pool {{ default_pool.id }}
{% else %}
# Pool {{ default_pool.id }} is disabled
{% endif %}
{% if default_pool.health_monitor and default_pool.health_monitor.enabled %} {% if default_pool.health_monitor and default_pool.health_monitor.enabled %}
# Configuration for HealthMonitor {{ default_pool.health_monitor.id }} # Configuration for HealthMonitor {{ default_pool.health_monitor.id }}
{% endif %} {% endif %}

View File

@ -351,7 +351,7 @@ class LvsQueryTestCase(base.TestCase):
# the returned resource_ipport_mapping doesn't contains the 'Members' # the returned resource_ipport_mapping doesn't contains the 'Members'
# resources, that means the pool of listener doesn't have a enabled # resources, that means the pool of listener doesn't have a enabled
# pool resource, so the pool is not usable, then the pool status will # pool resource, so the pool is not usable, then the pool status will
# return DOWN. # return UP.
mock_get_resource_ipports.return_value = ( mock_get_resource_ipports.return_value = (
{ {
'Listener': {'id': self.listener_id_v4, 'Listener': {'id': self.listener_id_v4,
@ -361,7 +361,7 @@ class LvsQueryTestCase(base.TestCase):
res = lvs_query.get_udp_listener_pool_status(self.listener_id_v4) res = lvs_query.get_udp_listener_pool_status(self.listener_id_v4)
expected = {'lvs': { expected = {'lvs': {
'uuid': self.pool_id_v4, 'uuid': self.pool_id_v4,
'status': constants.DOWN, 'status': constants.UP,
'members': {} 'members': {}
}} }}
self.assertEqual(expected, res) self.assertEqual(expected, res)

View File

@ -222,6 +222,50 @@ class TestLvsCfg(base.TestCase):
persistence=False, alloc_default_pool=False)) persistence=False, alloc_default_pool=False))
self.assertEqual(exp, rendered_obj) self.assertEqual(exp, rendered_obj)
def test_render_template_udp_with_pool_no_member(self):
exp = ("# Configuration for Loadbalancer sample_loadbalancer_id_1\n"
"# Configuration for Listener sample_listener_id_1\n\n"
"net_namespace amphora-haproxy\n\n"
"virtual_server 10.0.0.2 80 {\n"
" lb_algo rr\n"
" lb_kind NAT\n"
" protocol UDP\n\n\n"
" # Configuration for Pool sample_pool_id_0\n"
"}\n\n")
rendered_obj = self.udp_jinja_cfg.render_loadbalancer_obj(
sample_configs_combined.sample_listener_tuple(
proto=constants.PROTOCOL_UDP, monitor=False,
persistence=False, alloc_default_pool=True,
sample_default_pool=0))
self.assertEqual(exp, rendered_obj)
def test_render_template_udp_with_disabled_pool(self):
exp = ("# Configuration for Loadbalancer sample_loadbalancer_id_1\n"
"# Configuration for Listener sample_listener_id_1\n\n"
"net_namespace amphora-haproxy\n\n"
"virtual_server 10.0.0.2 80 {\n"
" lb_algo rr\n"
" lb_kind NAT\n"
" protocol UDP\n\n\n"
" # Pool sample_pool_id_1 is disabled\n"
" # Configuration for Member sample_member_id_1\n"
" real_server 10.0.0.99 82 {\n"
" weight 13\n\n"
" }\n\n"
" # Configuration for Member sample_member_id_2\n"
" real_server 10.0.0.98 82 {\n"
" weight 13\n\n"
" }\n\n"
"}\n\n")
rendered_obj = self.udp_jinja_cfg.render_loadbalancer_obj(
sample_configs_combined.sample_listener_tuple(
proto=constants.PROTOCOL_UDP, monitor=False,
persistence=False, alloc_default_pool=True,
pool_enabled=False))
self.assertEqual(exp, rendered_obj)
def test_udp_transform_session_persistence(self): def test_udp_transform_session_persistence(self):
persistence_src_ip = ( persistence_src_ip = (
sample_configs_combined.sample_session_persistence_tuple( sample_configs_combined.sample_session_persistence_tuple(

View File

@ -593,7 +593,9 @@ def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
ssl_type_l7=False, pool_cert=False, ssl_type_l7=False, pool_cert=False,
pool_ca_cert=False, pool_crl=False, pool_ca_cert=False, pool_crl=False,
tls_enabled=False, hm_host_http_check=False, tls_enabled=False, hm_host_http_check=False,
id='sample_listener_id_1', recursive_nest=False): id='sample_listener_id_1', recursive_nest=False,
sample_default_pool=1,
pool_enabled=True):
proto = 'HTTP' if proto is None else proto proto = 'HTTP' if proto is None else proto
if be_proto is None: if be_proto is None:
be_proto = 'HTTP' if proto is 'TERMINATED_HTTPS' else proto be_proto = 'HTTP' if proto is 'TERMINATED_HTTPS' else proto
@ -621,7 +623,8 @@ def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert, pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled, pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check, hm_host_http_check=hm_host_http_check,
listener_id='sample_listener_id_1'), listener_id='sample_listener_id_1',
enabled=pool_enabled),
sample_pool_tuple( sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence, proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type, persistence_type=persistence_type,
@ -630,7 +633,8 @@ def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert, pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled, pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check, hm_host_http_check=hm_host_http_check,
listener_id='sample_listener_id_1')] listener_id='sample_listener_id_1',
enabled=pool_enabled)]
l7policies = [ l7policies = [
sample_l7policy_tuple('sample_l7policy_id_1', sample_policy=1), sample_l7policy_tuple('sample_l7policy_id_1', sample_policy=1),
sample_l7policy_tuple('sample_l7policy_id_2', sample_policy=2), sample_l7policy_tuple('sample_l7policy_id_2', sample_policy=2),
@ -653,7 +657,8 @@ def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert, pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled, pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check, hm_host_http_check=hm_host_http_check,
listener_id='sample_listener_id_1')] listener_id='sample_listener_id_1',
enabled=pool_enabled)]
l7policies = [] l7policies = []
listener = in_listener( listener = in_listener(
id=id, id=id,
@ -676,7 +681,9 @@ def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
pool_ca_cert=pool_ca_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, pool_crl=pool_crl,
tls_enabled=tls_enabled, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check hm_host_http_check=hm_host_http_check,
sample_pool=sample_default_pool,
enabled=pool_enabled
) if alloc_default_pool else '', ) if alloc_default_pool else '',
connection_limit=connection_limit, connection_limit=connection_limit,
tls_certificate_id='cont_id_1' if tls else '', tls_certificate_id='cont_id_1' if tls else '',
@ -757,7 +764,8 @@ def sample_pool_tuple(listener_id=None, proto=None, monitor=True,
has_http_reuse=True, pool_cert=False, pool_ca_cert=False, has_http_reuse=True, pool_cert=False, pool_ca_cert=False,
pool_crl=False, tls_enabled=False, pool_crl=False, tls_enabled=False,
hm_host_http_check=False, hm_host_http_check=False,
provisioning_status=constants.ACTIVE): provisioning_status=constants.ACTIVE,
enabled=True):
proto = 'HTTP' if proto is None else proto proto = 'HTTP' if proto is None else proto
monitor_proto = proto if monitor_proto is None else monitor_proto monitor_proto = proto if monitor_proto is None else monitor_proto
in_pool = collections.namedtuple( in_pool = collections.namedtuple(
@ -776,7 +784,13 @@ def sample_pool_tuple(listener_id=None, proto=None, monitor=True,
'persistence_cookie': persistence_cookie} 'persistence_cookie': persistence_cookie}
persis = sample_session_persistence_tuple(**kwargs) persis = sample_session_persistence_tuple(**kwargs)
mon = None mon = None
if sample_pool == 1: if sample_pool == 0:
id = 'sample_pool_id_0'
members = []
if monitor is True:
mon = sample_health_monitor_tuple(
proto=monitor_proto, host_http_check=hm_host_http_check)
elif sample_pool == 1:
id = 'sample_pool_id_1' id = 'sample_pool_id_1'
members = [sample_member_tuple('sample_member_id_1', '10.0.0.99', members = [sample_member_tuple('sample_member_id_1', '10.0.0.99',
monitor_ip_port=monitor_ip_port), monitor_ip_port=monitor_ip_port),
@ -802,7 +816,7 @@ def sample_pool_tuple(listener_id=None, proto=None, monitor=True,
members=members, members=members,
health_monitor=mon, health_monitor=mon,
session_persistence=persis if persistence is True else None, session_persistence=persis if persistence is True else None,
enabled=True, enabled=enabled,
operating_status='ACTIVE', has_http_reuse=has_http_reuse, operating_status='ACTIVE', has_http_reuse=has_http_reuse,
tls_certificate_id='pool_cont_1' if pool_cert else None, tls_certificate_id='pool_cont_1' if pool_cert else None,
ca_tls_certificate_id='pool_ca_1' if pool_ca_cert else None, ca_tls_certificate_id='pool_ca_1' if pool_ca_cert else None,

View File

@ -0,0 +1,5 @@
---
fixes:
- |
Fix an incorrect ``operating_status`` with empty UDP pools. A UDP pool
without any member is now ``ONLINE`` instead of ``OFFLINE``.