Allow members to be set as "backup"
Setting a member as "backup" means no traffic will be sent to it unless all non-backup members are marked as down. This should be essentially the same in every backend provider AFAIU. This was requested by at least one operator (me) and was agreed during the PTG to add value. Story: 2001777 Task: 12483 Change-Id: I953abe71a0988da78efc6b3961f7518c81c2a06d
This commit is contained in:
parent
389931fbe8
commit
e819e4521a
@ -173,6 +173,20 @@ api_version_status:
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
backup:
|
||||
description: |
|
||||
Is the member a backup? Backup members only receive traffic when all
|
||||
non-backup members are down.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
backup-optional:
|
||||
description: |
|
||||
Is the member a backup? Backup members only receive traffic when all
|
||||
non-backup members are down.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
bytes_in:
|
||||
description: |
|
||||
The total bytes received.
|
||||
|
@ -1 +1 @@
|
||||
curl -X POST -H "Content-Type: application/json" -H "X-Auth-Token: <token>" -d '{"member":{"name":"web-server-1","weight":"20","admin_state_up":true,"subnet_id":"bbb35f84-35cc-4b2f-84c2-a6a29bba68aa","address":"192.0.2.16","protocol_port":"80","monitor_port":8080}}' http://198.51.100.10:9876/v2.0/lbaas/pools/4029d267-3983-4224-a3d0-afb3fe16a2cd/members
|
||||
curl -X POST -H "Content-Type: application/json" -H "X-Auth-Token: <token>" -d '{"member":{"name":"web-server-1","weight":"20","admin_state_up":true,"subnet_id":"bbb35f84-35cc-4b2f-84c2-a6a29bba68aa","address":"192.0.2.16","protocol_port":"80","monitor_port":8080,"backup":false}}' http://198.51.100.10:9876/v2.0/lbaas/pools/4029d267-3983-4224-a3d0-afb3fe16a2cd/members
|
||||
|
@ -6,6 +6,7 @@
|
||||
"subnet_id": "bbb35f84-35cc-4b2f-84c2-a6a29bba68aa",
|
||||
"address": "192.0.2.16",
|
||||
"protocol_port": "80",
|
||||
"monitor_port": 8080
|
||||
"monitor_port": 8080,
|
||||
"backup": false
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@
|
||||
"project_id": "e3cd678b11784734bc366148aa37580e",
|
||||
"name": "web-server-1",
|
||||
"weight": 20,
|
||||
"backup": false,
|
||||
"admin_state_up": true,
|
||||
"subnet_id": "bbb35f84-35cc-4b2f-84c2-a6a29bba68aa",
|
||||
"created_at": "2017-05-11T17:21:34",
|
||||
|
@ -4,6 +4,7 @@
|
||||
"project_id": "e3cd678b11784734bc366148aa37580e",
|
||||
"name": "web-server-1",
|
||||
"weight": 20,
|
||||
"backup": false,
|
||||
"admin_state_up": true,
|
||||
"subnet_id": "bbb35f84-35cc-4b2f-84c2-a6a29bba68aa",
|
||||
"created_at": "2017-05-11T17:21:34",
|
||||
|
@ -1 +1 @@
|
||||
curl -X PUT -H "Content-Type: application/json" -H "X-Auth-Token: <token>" -d '{"member":{"name":"web-server-1-2","weight":"0","admin_state_up":"true","monitor_address":"192.0.2.40","monitor_port":8888}}' http://198.51.100.10:9876/v2.0/lbaas/pools/4029d267-3983-4224-a3d0-afb3fe16a2cd/members/957a1ace-1bd2-449b-8455-820b6e4b63f3
|
||||
curl -X PUT -H "Content-Type: application/json" -H "X-Auth-Token: <token>" -d '{"member":{"name":"web-server-1-2","weight":"0","admin_state_up":"true","monitor_address":"192.0.2.40","monitor_port":8888,"backup":false}}' http://198.51.100.10:9876/v2.0/lbaas/pools/4029d267-3983-4224-a3d0-afb3fe16a2cd/members/957a1ace-1bd2-449b-8455-820b6e4b63f3
|
||||
|
@ -4,6 +4,7 @@
|
||||
"weight": "0",
|
||||
"admin_state_up": "true",
|
||||
"monitor_address": "192.0.2.40",
|
||||
"monitor_port": 8888
|
||||
"monitor_port": 8888,
|
||||
"backup": false
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@
|
||||
"project_id": "e3cd678b11784734bc366148aa37580e",
|
||||
"name": "web-server-1",
|
||||
"weight": 20,
|
||||
"backup": false,
|
||||
"admin_state_up": true,
|
||||
"subnet_id": "bbb35f84-35cc-4b2f-84c2-a6a29bba68aa",
|
||||
"created_at": "2017-05-11T17:21:34",
|
||||
|
@ -5,6 +5,7 @@
|
||||
"project_id": "e3cd678b11784734bc366148aa37580e",
|
||||
"name": "web-server-1",
|
||||
"weight": 20,
|
||||
"backup": false,
|
||||
"admin_state_up": true,
|
||||
"subnet_id": "bbb35f84-35cc-4b2f-84c2-a6a29bba68aa",
|
||||
"created_at": "2017-05-11T17:21:34",
|
||||
|
@ -48,6 +48,7 @@ Response Parameters
|
||||
|
||||
- address: address-member
|
||||
- admin_state_up: admin_state_up
|
||||
- backup: backup
|
||||
- created_at: created_at
|
||||
- id: member-id
|
||||
- monitor_address: monitor_address
|
||||
@ -105,6 +106,8 @@ request:
|
||||
|
||||
- ``admin_state_up``. Default is ``true``.
|
||||
|
||||
- ``backup``. Default is ``false``.
|
||||
|
||||
- ``weight``. Default is ``1``.
|
||||
|
||||
If you omit the ``subnet_id`` parameter, the ``vip_subnet_id`` for the parent
|
||||
@ -145,6 +148,7 @@ Request
|
||||
|
||||
- admin_state_up: admin_state_up-default-optional
|
||||
- address: address
|
||||
- backup: backup-optional
|
||||
- monitor_address: monitor_address-optional
|
||||
- monitor_port: monitor_port-optional
|
||||
- name: name-optional
|
||||
@ -173,6 +177,7 @@ Response Parameters
|
||||
|
||||
- address: address-member
|
||||
- admin_state_up: admin_state_up
|
||||
- backup: backup
|
||||
- created_at: created_at
|
||||
- id: member-id
|
||||
- monitor_address: monitor_address
|
||||
@ -238,6 +243,7 @@ Response Parameters
|
||||
|
||||
- address: address-member
|
||||
- admin_state_up: admin_state_up
|
||||
- backup: backup
|
||||
- created_at: created_at
|
||||
- id: member-id
|
||||
- monitor_address: monitor_address
|
||||
@ -295,6 +301,7 @@ Request
|
||||
.. rest_parameters:: ../parameters.yaml
|
||||
|
||||
- admin_state_up: admin_state_up-default-optional
|
||||
- backup: backup-optional
|
||||
- member_id: path-member-id
|
||||
- monitor_address: monitor_address-optional
|
||||
- monitor_port: monitor_port-optional
|
||||
@ -321,6 +328,7 @@ Response Parameters
|
||||
|
||||
- address: address-member
|
||||
- admin_state_up: admin_state_up
|
||||
- backup: backup
|
||||
- created_at: created_at
|
||||
- id: member-id
|
||||
- monitor_address: monitor_address
|
||||
@ -385,6 +393,7 @@ Request
|
||||
|
||||
- admin_state_up: admin_state_up-default-optional
|
||||
- address: address
|
||||
- backup: backup-optional
|
||||
- monitor_address: monitor_address-optional
|
||||
- monitor_port: monitor_port-optional
|
||||
- name: name-optional
|
||||
|
@ -49,7 +49,7 @@ class RootController(rest.RestController):
|
||||
self._versions.append(
|
||||
{
|
||||
'status': 'CURRENT',
|
||||
'updated': '2017-06-22T00:00:00Z',
|
||||
'updated': '2018-03-14T00:00:00Z',
|
||||
'id': 'v2.0'
|
||||
})
|
||||
if not (v1_enabled or v2_enabled):
|
||||
|
@ -34,6 +34,7 @@ class MemberResponse(BaseMemberType):
|
||||
address = wtypes.wsattr(types.IPAddressType())
|
||||
protocol_port = wtypes.wsattr(wtypes.IntegerType())
|
||||
weight = wtypes.wsattr(wtypes.IntegerType())
|
||||
backup = wtypes.wsattr(bool)
|
||||
subnet_id = wtypes.wsattr(wtypes.UuidType())
|
||||
project_id = wtypes.wsattr(wtypes.StringType())
|
||||
created_at = wtypes.wsattr(wtypes.datetime.datetime)
|
||||
@ -73,6 +74,7 @@ class MemberPOST(BaseMemberType):
|
||||
mandatory=True)
|
||||
weight = wtypes.wsattr(wtypes.IntegerType(
|
||||
minimum=constants.MIN_WEIGHT, maximum=constants.MAX_WEIGHT), default=1)
|
||||
backup = wtypes.wsattr(bool, default=False)
|
||||
subnet_id = wtypes.wsattr(wtypes.UuidType())
|
||||
# TODO(johnsom) Remove after deprecation (R series)
|
||||
project_id = wtypes.wsattr(wtypes.StringType(max_length=36))
|
||||
@ -92,6 +94,7 @@ class MemberPUT(BaseMemberType):
|
||||
admin_state_up = wtypes.wsattr(bool)
|
||||
weight = wtypes.wsattr(wtypes.IntegerType(
|
||||
minimum=constants.MIN_WEIGHT, maximum=constants.MAX_WEIGHT))
|
||||
backup = wtypes.wsattr(bool)
|
||||
monitor_port = wtypes.wsattr(wtypes.IntegerType(
|
||||
minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER))
|
||||
monitor_address = wtypes.wsattr(types.IPAddressType())
|
||||
@ -115,6 +118,7 @@ class MemberSingleCreate(BaseMemberType):
|
||||
mandatory=True)
|
||||
weight = wtypes.wsattr(wtypes.IntegerType(
|
||||
minimum=constants.MIN_WEIGHT, maximum=constants.MAX_WEIGHT), default=1)
|
||||
backup = wtypes.wsattr(bool, default=False)
|
||||
subnet_id = wtypes.wsattr(wtypes.UuidType())
|
||||
monitor_port = wtypes.wsattr(wtypes.IntegerType(
|
||||
minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER))
|
||||
|
@ -313,7 +313,7 @@ class Pool(BaseDataModel):
|
||||
class Member(BaseDataModel):
|
||||
|
||||
def __init__(self, id=None, project_id=None, pool_id=None, ip_address=None,
|
||||
protocol_port=None, weight=None, enabled=None,
|
||||
protocol_port=None, weight=None, backup=None, enabled=None,
|
||||
subnet_id=None, operating_status=None, pool=None,
|
||||
created_at=None, updated_at=None, provisioning_status=None,
|
||||
name=None, monitor_address=None, monitor_port=None):
|
||||
@ -323,6 +323,7 @@ class Member(BaseDataModel):
|
||||
self.ip_address = ip_address
|
||||
self.protocol_port = protocol_port
|
||||
self.weight = weight
|
||||
self.backup = backup
|
||||
self.enabled = enabled
|
||||
self.subnet_id = subnet_id
|
||||
self.operating_status = operating_status
|
||||
|
@ -273,7 +273,8 @@ class JinjaTemplater(object):
|
||||
'subnet_id': member.subnet_id,
|
||||
'operating_status': member.operating_status,
|
||||
'monitor_address': member.monitor_address,
|
||||
'monitor_port': member.monitor_port
|
||||
'monitor_port': member.monitor_port,
|
||||
'backup': member.backup
|
||||
}
|
||||
|
||||
def _transform_health_monitor(self, monitor):
|
||||
|
@ -175,9 +175,14 @@ frontend {{ listener.id }}
|
||||
{% else %}
|
||||
{% set proxy_protocol_opt = "" %}
|
||||
{% endif %}
|
||||
{{ "server %s %s:%d weight %s%s%s%s"|e|format(
|
||||
{% if member.backup %}
|
||||
{% set member_backup_opt = " backup" %}
|
||||
{% else %}
|
||||
{% set member_backup_opt = "" %}
|
||||
{% endif %}
|
||||
{{ "server %s %s:%d weight %s%s%s%s%s"|e|format(
|
||||
member.id, member.address, member.protocol_port, member.weight,
|
||||
hm_opt, persistence_opt, proxy_protocol_opt)|trim() }}
|
||||
hm_opt, persistence_opt, proxy_protocol_opt, member_backup_opt)|trim() }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
@ -245,6 +250,7 @@ backend {{ pool.id }}
|
||||
{% if listener.connection_limit is defined %}
|
||||
fullconn {{ listener.connection_limit }}
|
||||
{% endif %}
|
||||
option allbackups
|
||||
{% for member in pool.members if member.enabled %}
|
||||
{{- member_macro(constants, pool, member) -}}
|
||||
{% endfor %}
|
||||
|
@ -0,0 +1,34 @@
|
||||
# Copyright 2016 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
"""add backup field to member
|
||||
|
||||
Revision ID: ba35e0fb88e1
|
||||
Revises: 034756a182a2
|
||||
Create Date: 2018-03-14 00:46:16.281857
|
||||
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ba35e0fb88e1'
|
||||
down_revision = '034756a182a2'
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(u'member', sa.Column(u'backup', sa.Boolean(),
|
||||
nullable=False, default=False))
|
@ -190,6 +190,7 @@ class Member(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
|
||||
ip_address = sa.Column('ip_address', sa.String(64), nullable=False)
|
||||
protocol_port = sa.Column(sa.Integer, nullable=False)
|
||||
weight = sa.Column(sa.Integer, nullable=True)
|
||||
backup = sa.Column(sa.Boolean(), nullable=False)
|
||||
monitor_address = sa.Column(sa.String(64), nullable=True)
|
||||
monitor_port = sa.Column(sa.Integer, nullable=True)
|
||||
provisioning_status = sa.Column(
|
||||
|
@ -62,8 +62,8 @@ def create_load_balancer(lb_dict):
|
||||
lb_dict['id'] = uuidutils.generate_uuid()
|
||||
if lb_dict.get('vip'):
|
||||
lb_dict['vip']['load_balancer_id'] = lb_dict.get('id')
|
||||
lb_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
lb_dict['operating_status'] = constants.OFFLINE
|
||||
lb_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
lb_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
return lb_dict
|
||||
|
||||
|
||||
@ -76,8 +76,8 @@ def create_listener(listener_dict, lb_id):
|
||||
else:
|
||||
listener_dict['load_balancer_id'] = lb_id
|
||||
|
||||
listener_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
listener_dict['operating_status'] = constants.OFFLINE
|
||||
listener_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
listener_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
# NOTE(blogan): Throwing away because we should not store secure data
|
||||
# in the database nor should we send it to a handler.
|
||||
if 'tls_termination' in listener_dict:
|
||||
@ -103,8 +103,8 @@ def create_listener(listener_dict, lb_id):
|
||||
def create_l7policy(l7policy_dict, lb_id, listener_id):
|
||||
l7policy_dict = validate.sanitize_l7policy_api_args(l7policy_dict,
|
||||
create=True)
|
||||
l7policy_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
l7policy_dict['operating_status'] = constants.OFFLINE
|
||||
l7policy_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
l7policy_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
if not l7policy_dict.get('id'):
|
||||
l7policy_dict['id'] = uuidutils.generate_uuid()
|
||||
l7policy_dict['listener_id'] = listener_id
|
||||
@ -132,8 +132,8 @@ def create_l7policy(l7policy_dict, lb_id, listener_id):
|
||||
|
||||
|
||||
def create_l7rule(l7rule_dict, l7policy_id):
|
||||
l7rule_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
l7rule_dict['operating_status'] = constants.OFFLINE
|
||||
l7rule_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
l7rule_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
if not l7rule_dict.get('id'):
|
||||
l7rule_dict['id'] = uuidutils.generate_uuid()
|
||||
l7rule_dict['l7policy_id'] = l7policy_id
|
||||
@ -157,24 +157,26 @@ def create_pool(pool_dict, lb_id=None):
|
||||
prepped_members = []
|
||||
for member_dict in pool_dict.get('members'):
|
||||
prepped_members.append(create_member(member_dict, pool_dict['id']))
|
||||
pool_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
pool_dict['operating_status'] = constants.OFFLINE
|
||||
pool_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
pool_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
return pool_dict
|
||||
|
||||
|
||||
def create_member(member_dict, pool_id, has_health_monitor=False):
|
||||
member_dict['pool_id'] = pool_id
|
||||
member_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
member_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
if has_health_monitor:
|
||||
member_dict['operating_status'] = constants.OFFLINE
|
||||
member_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
else:
|
||||
member_dict['operating_status'] = constants.NO_MONITOR
|
||||
member_dict[constants.OPERATING_STATUS] = constants.NO_MONITOR
|
||||
if 'backup' not in member_dict:
|
||||
member_dict['backup'] = False
|
||||
return member_dict
|
||||
|
||||
|
||||
def create_health_monitor(hm_dict, pool_id=None):
|
||||
hm_dict['provisioning_status'] = constants.PENDING_CREATE
|
||||
hm_dict['operating_status'] = constants.OFFLINE
|
||||
hm_dict[constants.PROVISIONING_STATUS] = constants.PENDING_CREATE
|
||||
hm_dict[constants.OPERATING_STATUS] = constants.OFFLINE
|
||||
if pool_id:
|
||||
hm_dict['id'] = pool_id
|
||||
hm_dict['pool_id'] = pool_id
|
||||
|
@ -61,7 +61,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
response = self.get(self.member_path.format(
|
||||
member_id=api_member.get('id'))).json.get(self.root_tag)
|
||||
self.assertEqual(api_member, response)
|
||||
@ -69,7 +69,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_authorized(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
|
||||
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
|
||||
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
|
||||
@ -99,7 +99,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_not_authorized(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
|
||||
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
|
||||
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
|
||||
@ -112,7 +112,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_hides_deleted(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
|
||||
response = self.get(self.members_path)
|
||||
objects = response.json.get(self.root_tag_list)
|
||||
@ -129,10 +129,10 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_all(self):
|
||||
api_m_1 = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
api_m_2 = self.create_member(
|
||||
self.pool_id, '10.0.0.2', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.2', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
# Original objects didn't have the updated operating/provisioning
|
||||
# status that exists in the DB.
|
||||
@ -150,10 +150,10 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_all_authorized(self):
|
||||
api_m_1 = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
api_m_2 = self.create_member(
|
||||
self.pool_id, '10.0.0.2', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.2', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
# Original objects didn't have the updated operating/provisioning
|
||||
# status that exists in the DB.
|
||||
@ -196,10 +196,10 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_all_not_authorized(self):
|
||||
api_m_1 = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
api_m_2 = self.create_member(
|
||||
self.pool_id, '10.0.0.2', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.2', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
# Original objects didn't have the updated operating/provisioning
|
||||
# status that exists in the DB.
|
||||
@ -218,11 +218,11 @@ class TestMember(base.BaseAPITest):
|
||||
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
|
||||
|
||||
def test_get_all_sorted(self):
|
||||
self.create_member(self.pool_id, '10.0.0.1', 80, name='member1')
|
||||
self.create_member(self.pool_id, '192.0.2.1', 80, name='member1')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.2', 80, name='member2')
|
||||
self.create_member(self.pool_id, '192.0.2.2', 80, name='member2')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.3', 80, name='member3')
|
||||
self.create_member(self.pool_id, '192.0.2.3', 80, name='member3')
|
||||
self.set_lb_status(self.lb_id)
|
||||
|
||||
response = self.get(self.members_path,
|
||||
@ -243,11 +243,11 @@ class TestMember(base.BaseAPITest):
|
||||
list(reversed(member_id_names_desc)))
|
||||
|
||||
def test_get_all_limited(self):
|
||||
self.create_member(self.pool_id, '10.0.0.1', 80, name='member1')
|
||||
self.create_member(self.pool_id, '192.0.2.1', 80, name='member1')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.2', 80, name='member2')
|
||||
self.create_member(self.pool_id, '192.0.2.2', 80, name='member2')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.3', 80, name='member3')
|
||||
self.create_member(self.pool_id, '192.0.2.3', 80, name='member3')
|
||||
self.set_lb_status(self.lb_id)
|
||||
|
||||
# First two -- should have 'next' link
|
||||
@ -279,11 +279,11 @@ class TestMember(base.BaseAPITest):
|
||||
self.assertItemsEqual(['previous', 'next'], [l['rel'] for l in links])
|
||||
|
||||
def test_get_all_fields_filter(self):
|
||||
self.create_member(self.pool_id, '10.0.0.1', 80, name='member1')
|
||||
self.create_member(self.pool_id, '192.0.2.1', 80, name='member1')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.2', 80, name='member2')
|
||||
self.create_member(self.pool_id, '192.0.2.2', 80, name='member2')
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id, '10.0.0.3', 80, name='member3')
|
||||
self.create_member(self.pool_id, '192.0.2.3', 80, name='member3')
|
||||
self.set_lb_status(self.lb_id)
|
||||
|
||||
members = self.get(self.members_path, params={
|
||||
@ -295,17 +295,17 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_get_all_filter(self):
|
||||
mem1 = self.create_member(self.pool_id,
|
||||
'10.0.0.1',
|
||||
'192.0.2.1',
|
||||
80,
|
||||
name='member1').get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id,
|
||||
'10.0.0.2',
|
||||
'192.0.2.2',
|
||||
80,
|
||||
name='member2').get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.create_member(self.pool_id,
|
||||
'10.0.0.3',
|
||||
'192.0.2.3',
|
||||
80,
|
||||
name='member3').get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
@ -323,8 +323,8 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_create_sans_listener(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.assertEqual('10.0.0.1', api_member['address'])
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.assertEqual('192.0.2.1', api_member['address'])
|
||||
self.assertEqual(80, api_member['protocol_port'])
|
||||
self.assertIsNotNone(api_member['created_at'])
|
||||
self.assertIsNone(api_member['updated_at'])
|
||||
@ -367,10 +367,10 @@ class TestMember(base.BaseAPITest):
|
||||
return_value=override_credentials):
|
||||
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
|
||||
|
||||
self.assertEqual('10.0.0.1', api_member['address'])
|
||||
self.assertEqual('192.0.2.1', api_member['address'])
|
||||
self.assertEqual(80, api_member['protocol_port'])
|
||||
self.assertIsNotNone(api_member['created_at'])
|
||||
self.assertIsNone(api_member['updated_at'])
|
||||
@ -396,7 +396,7 @@ class TestMember(base.BaseAPITest):
|
||||
with mock.patch.object(octavia.common.context.Context, 'project_id',
|
||||
self.project_id):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80, status=403)
|
||||
self.pool_id, '192.0.2.1', 80, status=403)
|
||||
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
|
||||
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_member)
|
||||
|
||||
@ -404,9 +404,18 @@ class TestMember(base.BaseAPITest):
|
||||
def test_create_with_project_id_is_ignored(self):
|
||||
pid = uuidutils.generate_uuid()
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80, project_id=pid).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80, project_id=pid).get(self.root_tag)
|
||||
self.assertEqual(self.project_id, api_member['project_id'])
|
||||
|
||||
def test_create_backup(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '192.0.2.1', 80, backup=True).get(self.root_tag)
|
||||
self.assertTrue(api_member['backup'])
|
||||
self.set_lb_status(self.lb_id)
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '192.0.2.1', 81, backup=False).get(self.root_tag)
|
||||
self.assertFalse(api_member['backup'])
|
||||
|
||||
def test_bad_create(self):
|
||||
member = {'name': 'test1'}
|
||||
self.post(self.members_path, self._build_body(member), status=400)
|
||||
@ -414,7 +423,7 @@ class TestMember(base.BaseAPITest):
|
||||
def test_create_with_bad_handler(self):
|
||||
self.handler_mock().member.create.side_effect = Exception()
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.assert_correct_status(
|
||||
lb_id=self.lb_id, listener_id=self.listener_id,
|
||||
pool_id=self.pool_with_listener_id,
|
||||
@ -426,12 +435,12 @@ class TestMember(base.BaseAPITest):
|
||||
member_op_status=constants.NO_MONITOR)
|
||||
|
||||
def test_full_batch_members(self):
|
||||
member1 = {'address': '10.0.0.1', 'protocol_port': 80}
|
||||
member2 = {'address': '10.0.0.2', 'protocol_port': 80}
|
||||
member3 = {'address': '10.0.0.3', 'protocol_port': 80}
|
||||
member4 = {'address': '10.0.0.4', 'protocol_port': 80}
|
||||
member5 = {'address': '10.0.0.5', 'protocol_port': 80}
|
||||
member6 = {'address': '10.0.0.6', 'protocol_port': 80}
|
||||
member1 = {'address': '192.0.2.1', 'protocol_port': 80}
|
||||
member2 = {'address': '192.0.2.2', 'protocol_port': 80}
|
||||
member3 = {'address': '192.0.2.3', 'protocol_port': 80}
|
||||
member4 = {'address': '192.0.2.4', 'protocol_port': 80}
|
||||
member5 = {'address': '192.0.2.5', 'protocol_port': 80}
|
||||
member6 = {'address': '192.0.2.6', 'protocol_port': 80}
|
||||
members = [member1, member2, member3, member4]
|
||||
for m in members:
|
||||
self.create_member(pool_id=self.pool_id, **m)
|
||||
@ -446,12 +455,12 @@ class TestMember(base.BaseAPITest):
|
||||
).json.get(self.root_tag_list)
|
||||
|
||||
expected_members = [
|
||||
('10.0.0.1', 80, 'PENDING_UPDATE'),
|
||||
('10.0.0.2', 80, 'PENDING_UPDATE'),
|
||||
('10.0.0.3', 80, 'PENDING_DELETE'),
|
||||
('10.0.0.4', 80, 'PENDING_DELETE'),
|
||||
('10.0.0.5', 80, 'PENDING_CREATE'),
|
||||
('10.0.0.6', 80, 'PENDING_CREATE'),
|
||||
('192.0.2.1', 80, 'PENDING_UPDATE'),
|
||||
('192.0.2.2', 80, 'PENDING_UPDATE'),
|
||||
('192.0.2.3', 80, 'PENDING_DELETE'),
|
||||
('192.0.2.4', 80, 'PENDING_DELETE'),
|
||||
('192.0.2.5', 80, 'PENDING_CREATE'),
|
||||
('192.0.2.6', 80, 'PENDING_CREATE'),
|
||||
]
|
||||
|
||||
member_ids = {}
|
||||
@ -463,21 +472,22 @@ class TestMember(base.BaseAPITest):
|
||||
member_ids[(rm['address'], rm['protocol_port'])] = rm['id']
|
||||
handler_args = self.handler_mock().member.batch_update.call_args[0]
|
||||
self.assertEqual(
|
||||
[member_ids[('10.0.0.3', 80)], member_ids[('10.0.0.4', 80)]],
|
||||
[member_ids[('192.0.2.3', 80)], member_ids[('192.0.2.4', 80)]],
|
||||
handler_args[0])
|
||||
self.assertEqual(
|
||||
[member_ids[('10.0.0.5', 80)], member_ids[('10.0.0.6', 80)]],
|
||||
[member_ids[('192.0.2.5', 80)], member_ids[('192.0.2.6', 80)]],
|
||||
handler_args[1])
|
||||
self.assertEqual(2, len(handler_args[2]))
|
||||
updated_members = [
|
||||
(handler_args[2][0].address, handler_args[2][0].protocol_port),
|
||||
(handler_args[2][1].address, handler_args[2][1].protocol_port)
|
||||
]
|
||||
self.assertEqual([('10.0.0.1', 80), ('10.0.0.2', 80)], updated_members)
|
||||
self.assertEqual([('192.0.2.1', 80), ('192.0.2.2', 80)],
|
||||
updated_members)
|
||||
|
||||
def test_create_batch_members(self):
|
||||
member5 = {'address': '10.0.0.5', 'protocol_port': 80}
|
||||
member6 = {'address': '10.0.0.6', 'protocol_port': 80}
|
||||
member5 = {'address': '192.0.2.5', 'protocol_port': 80}
|
||||
member6 = {'address': '192.0.2.6', 'protocol_port': 80}
|
||||
|
||||
req_dict = [member5, member6]
|
||||
body = {self.root_tag_list: req_dict}
|
||||
@ -488,8 +498,8 @@ class TestMember(base.BaseAPITest):
|
||||
).json.get(self.root_tag_list)
|
||||
|
||||
expected_members = [
|
||||
('10.0.0.5', 80, 'PENDING_CREATE'),
|
||||
('10.0.0.6', 80, 'PENDING_CREATE'),
|
||||
('192.0.2.5', 80, 'PENDING_CREATE'),
|
||||
('192.0.2.6', 80, 'PENDING_CREATE'),
|
||||
]
|
||||
|
||||
member_ids = {}
|
||||
@ -502,13 +512,13 @@ class TestMember(base.BaseAPITest):
|
||||
handler_args = self.handler_mock().member.batch_update.call_args[0]
|
||||
self.assertEqual(0, len(handler_args[0]))
|
||||
self.assertEqual(
|
||||
[member_ids[('10.0.0.5', 80)], member_ids[('10.0.0.6', 80)]],
|
||||
[member_ids[('192.0.2.5', 80)], member_ids[('192.0.2.6', 80)]],
|
||||
handler_args[1])
|
||||
self.assertEqual(0, len(handler_args[2]))
|
||||
|
||||
def test_update_batch_members(self):
|
||||
member1 = {'address': '10.0.0.1', 'protocol_port': 80}
|
||||
member2 = {'address': '10.0.0.2', 'protocol_port': 80}
|
||||
member1 = {'address': '192.0.2.1', 'protocol_port': 80}
|
||||
member2 = {'address': '192.0.2.2', 'protocol_port': 80}
|
||||
members = [member1, member2]
|
||||
for m in members:
|
||||
self.create_member(pool_id=self.pool_id, **m)
|
||||
@ -523,8 +533,8 @@ class TestMember(base.BaseAPITest):
|
||||
).json.get(self.root_tag_list)
|
||||
|
||||
expected_members = [
|
||||
('10.0.0.1', 80, 'PENDING_UPDATE'),
|
||||
('10.0.0.2', 80, 'PENDING_UPDATE'),
|
||||
('192.0.2.1', 80, 'PENDING_UPDATE'),
|
||||
('192.0.2.2', 80, 'PENDING_UPDATE'),
|
||||
]
|
||||
|
||||
member_ids = {}
|
||||
@ -542,11 +552,12 @@ class TestMember(base.BaseAPITest):
|
||||
(handler_args[2][0].address, handler_args[2][0].protocol_port),
|
||||
(handler_args[2][1].address, handler_args[2][1].protocol_port)
|
||||
]
|
||||
self.assertEqual([('10.0.0.1', 80), ('10.0.0.2', 80)], updated_members)
|
||||
self.assertEqual([('192.0.2.1', 80), ('192.0.2.2', 80)],
|
||||
updated_members)
|
||||
|
||||
def test_delete_batch_members(self):
|
||||
member3 = {'address': '10.0.0.3', 'protocol_port': 80}
|
||||
member4 = {'address': '10.0.0.4', 'protocol_port': 80}
|
||||
member3 = {'address': '192.0.2.3', 'protocol_port': 80}
|
||||
member4 = {'address': '192.0.2.4', 'protocol_port': 80}
|
||||
members = [member3, member4]
|
||||
for m in members:
|
||||
self.create_member(pool_id=self.pool_id, **m)
|
||||
@ -561,8 +572,8 @@ class TestMember(base.BaseAPITest):
|
||||
).json.get(self.root_tag_list)
|
||||
|
||||
expected_members = [
|
||||
('10.0.0.3', 80, 'PENDING_DELETE'),
|
||||
('10.0.0.4', 80, 'PENDING_DELETE'),
|
||||
('192.0.2.3', 80, 'PENDING_DELETE'),
|
||||
('192.0.2.4', 80, 'PENDING_DELETE'),
|
||||
]
|
||||
|
||||
member_ids = {}
|
||||
@ -574,15 +585,15 @@ class TestMember(base.BaseAPITest):
|
||||
member_ids[(rm['address'], rm['protocol_port'])] = rm['id']
|
||||
handler_args = self.handler_mock().member.batch_update.call_args[0]
|
||||
self.assertEqual(
|
||||
[member_ids[('10.0.0.3', 80)], member_ids[('10.0.0.4', 80)]],
|
||||
[member_ids[('192.0.2.3', 80)], member_ids[('192.0.2.4', 80)]],
|
||||
handler_args[0])
|
||||
self.assertEqual(0, len(handler_args[1]))
|
||||
self.assertEqual(0, len(handler_args[2]))
|
||||
|
||||
def test_create_with_attached_listener(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.assertEqual('10.0.0.1', api_member['address'])
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.assertEqual('192.0.2.1', api_member['address'])
|
||||
self.assertEqual(80, api_member['protocol_port'])
|
||||
self.assert_correct_status(
|
||||
lb_id=self.lb_id, listener_id=self.listener_id,
|
||||
@ -599,10 +610,10 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_create_with_monitor_address_and_port(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80,
|
||||
self.pool_with_listener_id, '192.0.2.1', 80,
|
||||
monitor_address='192.0.2.3',
|
||||
monitor_port=80).get(self.root_tag)
|
||||
self.assertEqual('10.0.0.1', api_member['address'])
|
||||
self.assertEqual('192.0.2.1', api_member['address'])
|
||||
self.assertEqual(80, api_member['protocol_port'])
|
||||
self.assertEqual('192.0.2.3', api_member['monitor_address'])
|
||||
self.assertEqual(80, api_member['monitor_port'])
|
||||
@ -625,7 +636,7 @@ class TestMember(base.BaseAPITest):
|
||||
1, 1, 1, 1)
|
||||
self.set_lb_status(self.lb_id)
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.assert_correct_status(
|
||||
lb_id=self.lb_id, listener_id=self.listener_id,
|
||||
pool_id=self.pool_with_listener_id, member_id=api_member.get('id'),
|
||||
@ -636,7 +647,7 @@ class TestMember(base.BaseAPITest):
|
||||
member_op_status=constants.OFFLINE)
|
||||
|
||||
def test_duplicate_create(self):
|
||||
member = {'address': '10.0.0.1', 'protocol_port': 80,
|
||||
member = {'address': '192.0.2.1', 'protocol_port': 80,
|
||||
'project_id': self.project_id}
|
||||
self.post(self.members_path, self._build_body(member))
|
||||
self.set_lb_status(self.lb_id)
|
||||
@ -648,7 +659,7 @@ class TestMember(base.BaseAPITest):
|
||||
net_mock.return_value.get_subnet = mock.Mock(
|
||||
side_effect=network_base.SubnetNotFound('Subnet not found'))
|
||||
subnet_id = uuidutils.generate_uuid()
|
||||
response = self.create_member(self.pool_id, '10.0.0.1', 80,
|
||||
response = self.create_member(self.pool_id, '192.0.2.1', 80,
|
||||
subnet_id=subnet_id, status=400)
|
||||
err_msg = 'Subnet ' + subnet_id + ' not found.'
|
||||
self.assertEqual(response.get('faultstring'), err_msg)
|
||||
@ -659,20 +670,20 @@ class TestMember(base.BaseAPITest):
|
||||
subnet_id = uuidutils.generate_uuid()
|
||||
net_mock.return_value.get_subnet.return_value = subnet_id
|
||||
response = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80,
|
||||
self.pool_id, '192.0.2.1', 80,
|
||||
subnet_id=subnet_id).get(self.root_tag)
|
||||
self.assertEqual('10.0.0.1', response['address'])
|
||||
self.assertEqual('192.0.2.1', response['address'])
|
||||
self.assertEqual(80, response['protocol_port'])
|
||||
self.assertEqual(subnet_id, response['subnet_id'])
|
||||
|
||||
def test_create_bad_port_number(self):
|
||||
member = {'address': '10.0.0.3',
|
||||
member = {'address': '192.0.2.3',
|
||||
'protocol_port': constants.MIN_PORT_NUMBER - 1}
|
||||
resp = self.post(self.members_path, self._build_body(member),
|
||||
status=400)
|
||||
self.assertIn('Value should be greater or equal to',
|
||||
resp.json.get('faultstring'))
|
||||
member = {'address': '10.0.0.3',
|
||||
member = {'address': '192.0.2.3',
|
||||
'protocol_port': constants.MAX_PORT_NUMBER + 1}
|
||||
resp = self.post(self.members_path, self._build_body(member),
|
||||
status=400)
|
||||
@ -681,14 +692,14 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_create_over_quota(self):
|
||||
self.start_quota_mock(data_models.Member)
|
||||
member = {'address': '10.0.0.3', 'protocol_port': 81}
|
||||
member = {'address': '192.0.2.3', 'protocol_port': 81}
|
||||
self.post(self.members_path, self._build_body(member), status=403)
|
||||
|
||||
def test_update_with_attached_listener(self):
|
||||
old_name = "name1"
|
||||
new_name = "name2"
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80,
|
||||
self.pool_with_listener_id, '192.0.2.1', 80,
|
||||
name=old_name).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
new_member = {'name': new_name}
|
||||
@ -714,7 +725,7 @@ class TestMember(base.BaseAPITest):
|
||||
old_name = "name1"
|
||||
new_name = "name2"
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80,
|
||||
self.pool_with_listener_id, '192.0.2.1', 80,
|
||||
name=old_name).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
new_member = {'name': new_name}
|
||||
@ -766,7 +777,7 @@ class TestMember(base.BaseAPITest):
|
||||
old_name = "name1"
|
||||
new_name = "name2"
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80,
|
||||
self.pool_with_listener_id, '192.0.2.1', 80,
|
||||
name=old_name).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
new_member = {'name': new_name}
|
||||
@ -796,7 +807,7 @@ class TestMember(base.BaseAPITest):
|
||||
old_name = "name1"
|
||||
new_name = "name2"
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80, name=old_name).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80, name=old_name).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
member_path = self.member_path.format(
|
||||
member_id=api_member.get('id'))
|
||||
@ -820,14 +831,14 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_bad_update(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
new_member = {'protocol_port': 'ten'}
|
||||
self.put(self.member_path.format(member_id=api_member.get('id')),
|
||||
self._build_body(new_member), status=400)
|
||||
|
||||
def test_update_with_bad_handler(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80,
|
||||
self.pool_with_listener_id, '192.0.2.1', 80,
|
||||
name="member1").get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
new_member = {'name': "member2"}
|
||||
@ -841,7 +852,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
member = self.get(self.member_path_listener.format(
|
||||
member_id=api_member.get('id'))).json.get(self.root_tag)
|
||||
@ -871,7 +882,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_authorized(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
member = self.get(self.member_path_listener.format(
|
||||
member_id=api_member.get('id'))).json.get(self.root_tag)
|
||||
@ -925,7 +936,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_not_authorized(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
member = self.get(self.member_path_listener.format(
|
||||
member_id=api_member.get('id'))).json.get(self.root_tag)
|
||||
@ -958,7 +969,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_with_bad_handler(self):
|
||||
api_member = self.create_member(
|
||||
self.pool_with_listener_id, '10.0.0.1', 80).get(self.root_tag)
|
||||
self.pool_with_listener_id, '192.0.2.1', 80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
member = self.get(self.member_path_listener.format(
|
||||
member_id=api_member.get('id'))).json.get(self.root_tag)
|
||||
@ -979,12 +990,12 @@ class TestMember(base.BaseAPITest):
|
||||
member_prov_status=constants.ERROR)
|
||||
|
||||
def test_create_when_lb_pending_update(self):
|
||||
self.create_member(self.pool_id, address="10.0.0.2",
|
||||
self.create_member(self.pool_id, address="192.0.2.2",
|
||||
protocol_port=80)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.put(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
body={'loadbalancer': {'name': 'test_name_change'}})
|
||||
member = {'address': '10.0.0.1', 'protocol_port': 80,
|
||||
member = {'address': '192.0.2.1', 'protocol_port': 80,
|
||||
'project_id': self.project_id}
|
||||
self.post(self.members_path,
|
||||
body=self._build_body(member),
|
||||
@ -992,7 +1003,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_update_when_lb_pending_update(self):
|
||||
member = self.create_member(
|
||||
self.pool_id, address="10.0.0.1", protocol_port=80,
|
||||
self.pool_id, address="192.0.2.1", protocol_port=80,
|
||||
name="member1").get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.put(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
@ -1003,7 +1014,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_when_lb_pending_update(self):
|
||||
member = self.create_member(
|
||||
self.pool_id, address="10.0.0.1",
|
||||
self.pool_id, address="192.0.2.1",
|
||||
protocol_port=80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.put(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
@ -1012,19 +1023,19 @@ class TestMember(base.BaseAPITest):
|
||||
member_id=member.get('id')), status=409)
|
||||
|
||||
def test_create_when_lb_pending_delete(self):
|
||||
self.create_member(self.pool_id, address="10.0.0.1",
|
||||
self.create_member(self.pool_id, address="192.0.2.1",
|
||||
protocol_port=80)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
params={'cascade': "true"})
|
||||
member = {'address': '10.0.0.2', 'protocol_port': 88,
|
||||
member = {'address': '192.0.2.2', 'protocol_port': 88,
|
||||
'project_id': self.project_id}
|
||||
self.post(self.members_path, body=self._build_body(member),
|
||||
status=409)
|
||||
|
||||
def test_update_when_lb_pending_delete(self):
|
||||
member = self.create_member(
|
||||
self.pool_id, address="10.0.0.1", protocol_port=80,
|
||||
self.pool_id, address="192.0.2.1", protocol_port=80,
|
||||
name="member1").get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
@ -1034,7 +1045,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_when_lb_pending_delete(self):
|
||||
member = self.create_member(
|
||||
self.pool_id, address="10.0.0.1",
|
||||
self.pool_id, address="192.0.2.1",
|
||||
protocol_port=80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id)
|
||||
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
|
||||
@ -1044,7 +1055,7 @@ class TestMember(base.BaseAPITest):
|
||||
|
||||
def test_delete_already_deleted(self):
|
||||
member = self.create_member(
|
||||
self.pool_id, address="10.0.0.1",
|
||||
self.pool_id, address="192.0.2.1",
|
||||
protocol_port=80).get(self.root_tag)
|
||||
self.set_lb_status(self.lb_id, status=constants.DELETED)
|
||||
self.delete(self.member_path.format(
|
||||
|
@ -104,7 +104,8 @@ class ModelTestMixin(object):
|
||||
'protocol_port': 80,
|
||||
'provisioning_status': constants.ACTIVE,
|
||||
'operating_status': constants.ONLINE,
|
||||
'enabled': True}
|
||||
'enabled': True,
|
||||
'backup': False}
|
||||
kwargs.update(overrides)
|
||||
return self._insert(session, models.Member, kwargs)
|
||||
|
||||
|
@ -335,7 +335,7 @@ class AllRepositoriesTest(base.OctaviaDBTestBase):
|
||||
def test_create_load_balancer_tree(self):
|
||||
project_id = uuidutils.generate_uuid()
|
||||
member = {'project_id': project_id, 'ip_address': '11.0.0.1',
|
||||
'protocol_port': 80, 'enabled': True,
|
||||
'protocol_port': 80, 'enabled': True, 'backup': False,
|
||||
'operating_status': constants.ONLINE,
|
||||
'provisioning_status': constants.PENDING_CREATE,
|
||||
'id': uuidutils.generate_uuid()}
|
||||
@ -1472,7 +1472,7 @@ class AllRepositoriesTest(base.OctaviaDBTestBase):
|
||||
ip_address='192.0.2.1', protocol_port=80,
|
||||
provisioning_status=constants.ACTIVE,
|
||||
operating_status=constants.ONLINE,
|
||||
enabled=True, pool_id=pool.id)
|
||||
enabled=True, pool_id=pool.id, backup=False)
|
||||
self.assertTrue(self.repos.check_quota_met(self.session,
|
||||
self.session,
|
||||
models.Member,
|
||||
@ -1501,7 +1501,7 @@ class AllRepositoriesTest(base.OctaviaDBTestBase):
|
||||
ip_address='192.0.2.1', protocol_port=80,
|
||||
provisioning_status=constants.DELETED,
|
||||
operating_status=constants.ONLINE,
|
||||
enabled=True, pool_id=pool.id)
|
||||
enabled=True, pool_id=pool.id, backup=False)
|
||||
self.assertFalse(self.repos.check_quota_met(self.session,
|
||||
self.session,
|
||||
models.Member,
|
||||
@ -1887,7 +1887,8 @@ class PoolRepositoryTest(BaseRepositoryTest):
|
||||
ip_address="10.0.0.1",
|
||||
protocol_port=80, enabled=True,
|
||||
provisioning_status=constants.ACTIVE,
|
||||
operating_status=constants.ONLINE)
|
||||
operating_status=constants.ONLINE,
|
||||
backup=False)
|
||||
new_pool = self.pool_repo.get(self.session, id=pool.id)
|
||||
self.assertEqual(1, len(new_pool.members))
|
||||
self.assertEqual(member, new_pool.members[0])
|
||||
@ -1941,7 +1942,8 @@ class PoolRepositoryTest(BaseRepositoryTest):
|
||||
protocol_port=80,
|
||||
provisioning_status=constants.ACTIVE,
|
||||
operating_status=constants.ONLINE,
|
||||
enabled=True)
|
||||
enabled=True,
|
||||
backup=False)
|
||||
sp = self.sp_repo.create(
|
||||
self.session, pool_id=pool.id,
|
||||
type=constants.SESSION_PERSISTENCE_HTTP_COOKIE,
|
||||
@ -1980,7 +1982,8 @@ class MemberRepositoryTest(BaseRepositoryTest):
|
||||
protocol_port=80,
|
||||
operating_status=constants.ONLINE,
|
||||
provisioning_status=constants.ACTIVE,
|
||||
enabled=True)
|
||||
enabled=True,
|
||||
backup=False)
|
||||
return member
|
||||
|
||||
def test_get(self):
|
||||
|
@ -48,6 +48,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -85,6 +86,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -113,6 +115,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -126,6 +129,32 @@ class TestHaproxyCfg(base.TestCase):
|
||||
sample_configs.sample_base_expected_config(backend=be),
|
||||
rendered_obj)
|
||||
|
||||
def test_render_template_member_backup(self):
|
||||
be = ("backend sample_pool_id_1\n"
|
||||
" mode http\n"
|
||||
" balance roundrobin\n"
|
||||
" cookie SRV insert indirect nocache\n"
|
||||
" timeout check 31s\n"
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"addr 192.168.1.1 port 9000 "
|
||||
"cookie sample_member_id_1\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"addr 192.168.1.1 port 9000 "
|
||||
"cookie sample_member_id_2 backup\n\n")
|
||||
rendered_obj = self.jinja_cfg.render_loadbalancer_obj(
|
||||
sample_configs.sample_amphora_tuple(),
|
||||
sample_configs.sample_listener_tuple(monitor_ip_port=True,
|
||||
backup_member=True))
|
||||
self.assertEqual(
|
||||
sample_configs.sample_base_expected_config(backend=be),
|
||||
rendered_obj)
|
||||
|
||||
def test_render_template_member_monitor_addr_port(self):
|
||||
be = ("backend sample_pool_id_1\n"
|
||||
" mode http\n"
|
||||
@ -135,6 +164,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"addr 192.168.1.1 port 9000 "
|
||||
@ -165,6 +195,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check check-ssl verify none inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -191,6 +222,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" timeout check 31s\n"
|
||||
" option ssl-hello-chk\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -210,6 +242,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" balance roundrobin\n"
|
||||
" cookie SRV insert indirect nocache\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13 "
|
||||
"cookie sample_member_id_1\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13 "
|
||||
@ -229,6 +262,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option external-check\n"
|
||||
" external-check command /var/lib/octavia/ping-wrapper.sh\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -255,6 +289,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" balance roundrobin\n"
|
||||
" cookie SRV insert indirect nocache\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13 "
|
||||
"cookie sample_member_id_1\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13 "
|
||||
@ -276,6 +311,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" mode tcp\n"
|
||||
" balance roundrobin\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13\n\n")
|
||||
rendered_obj = self.jinja_cfg.render_loadbalancer_obj(
|
||||
@ -290,6 +326,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" mode http\n"
|
||||
" balance roundrobin\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13\n\n")
|
||||
rendered_obj = self.jinja_cfg.render_loadbalancer_obj(
|
||||
@ -309,6 +346,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 "
|
||||
@ -332,6 +370,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 "
|
||||
@ -373,6 +412,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13 check "
|
||||
"inter 30s fall 3 rise 2 cookie sample_member_id_1\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13 check "
|
||||
@ -386,6 +426,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option httpchk GET /healthmon.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_3 10.0.0.97:82 weight 13 check "
|
||||
"inter 30s fall 3 rise 2 cookie sample_member_id_3\n\n")
|
||||
rendered_obj = self.jinja_cfg.render_loadbalancer_obj(
|
||||
@ -404,6 +445,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" http-check expect rstatus 418\n"
|
||||
" option forwardfor\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -429,6 +471,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" option forwardfor\n"
|
||||
" http-request set-header X-Forwarded-Port %[dst_port]\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1\n"
|
||||
@ -451,6 +494,7 @@ class TestHaproxyCfg(base.TestCase):
|
||||
" cookie SRV insert indirect nocache\n"
|
||||
" timeout check 31s\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 "
|
||||
"weight 13 check inter 30s fall 3 rise 2 "
|
||||
"cookie sample_member_id_1 send-proxy\n"
|
||||
|
@ -74,7 +74,8 @@ RET_MEMBER_1 = {
|
||||
'enabled': True,
|
||||
'operating_status': 'ACTIVE',
|
||||
'monitor_address': None,
|
||||
'monitor_port': None}
|
||||
'monitor_port': None,
|
||||
'backup': False}
|
||||
|
||||
RET_MEMBER_2 = {
|
||||
'id': 'sample_member_id_2',
|
||||
@ -85,7 +86,8 @@ RET_MEMBER_2 = {
|
||||
'enabled': True,
|
||||
'operating_status': 'ACTIVE',
|
||||
'monitor_address': None,
|
||||
'monitor_port': None}
|
||||
'monitor_port': None,
|
||||
'backup': False}
|
||||
|
||||
RET_MEMBER_3 = {
|
||||
'id': 'sample_member_id_3',
|
||||
@ -96,7 +98,8 @@ RET_MEMBER_3 = {
|
||||
'enabled': True,
|
||||
'operating_status': 'ACTIVE',
|
||||
'monitor_address': None,
|
||||
'monitor_port': None}
|
||||
'monitor_port': None,
|
||||
'backup': False}
|
||||
|
||||
RET_POOL_1 = {
|
||||
'id': 'sample_pool_id_1',
|
||||
@ -406,7 +409,7 @@ def sample_listener_tuple(proto=None, monitor=True, persistence=True,
|
||||
tls=False, sni=False, peer_port=None, topology=None,
|
||||
l7=False, enabled=True, insert_headers=None,
|
||||
be_proto=None, monitor_ip_port=False,
|
||||
monitor_proto=None):
|
||||
monitor_proto=None, backup_member=False):
|
||||
proto = 'HTTP' if proto is None else proto
|
||||
if be_proto is None:
|
||||
be_proto = 'HTTP' if proto is 'TERMINATED_HTTPS' else proto
|
||||
@ -445,7 +448,8 @@ def sample_listener_tuple(proto=None, monitor=True, persistence=True,
|
||||
proto=be_proto, monitor=monitor, persistence=persistence,
|
||||
persistence_type=persistence_type,
|
||||
persistence_cookie=persistence_cookie,
|
||||
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto)]
|
||||
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
|
||||
backup_member=backup_member)]
|
||||
l7policies = []
|
||||
return in_listener(
|
||||
id='sample_listener_id_1',
|
||||
@ -517,7 +521,7 @@ def sample_tls_container_tuple(id='cont_id_1', certificate=None,
|
||||
def sample_pool_tuple(proto=None, monitor=True, persistence=True,
|
||||
persistence_type=None, persistence_cookie=None,
|
||||
sample_pool=1, monitor_ip_port=False,
|
||||
monitor_proto=None):
|
||||
monitor_proto=None, backup_member=False):
|
||||
proto = 'HTTP' if proto is None else proto
|
||||
monitor_proto = proto if monitor_proto is None else monitor_proto
|
||||
in_pool = collections.namedtuple(
|
||||
@ -532,7 +536,8 @@ def sample_pool_tuple(proto=None, monitor=True, persistence=True,
|
||||
members = [sample_member_tuple('sample_member_id_1', '10.0.0.99',
|
||||
monitor_ip_port=monitor_ip_port),
|
||||
sample_member_tuple('sample_member_id_2', '10.0.0.98',
|
||||
monitor_ip_port=monitor_ip_port)]
|
||||
monitor_ip_port=monitor_ip_port,
|
||||
backup=backup_member)]
|
||||
if monitor is True:
|
||||
mon = sample_health_monitor_tuple(proto=monitor_proto)
|
||||
elif sample_pool == 2:
|
||||
@ -553,12 +558,13 @@ def sample_pool_tuple(proto=None, monitor=True, persistence=True,
|
||||
|
||||
|
||||
def sample_member_tuple(id, ip, enabled=True, operating_status='ACTIVE',
|
||||
monitor_ip_port=False):
|
||||
monitor_ip_port=False, backup=False):
|
||||
in_member = collections.namedtuple('member',
|
||||
'id, ip_address, protocol_port, '
|
||||
'weight, subnet_id, '
|
||||
'enabled, operating_status, '
|
||||
'monitor_address, monitor_port')
|
||||
'monitor_address, monitor_port, '
|
||||
'backup')
|
||||
monitor_address = '192.168.1.1' if monitor_ip_port else None
|
||||
monitor_port = 9000 if monitor_ip_port else None
|
||||
return in_member(
|
||||
@ -570,7 +576,8 @@ def sample_member_tuple(id, ip, enabled=True, operating_status='ACTIVE',
|
||||
enabled=enabled,
|
||||
operating_status=operating_status,
|
||||
monitor_address=monitor_address,
|
||||
monitor_port=monitor_port)
|
||||
monitor_port=monitor_port,
|
||||
backup=backup)
|
||||
|
||||
|
||||
def sample_session_persistence_tuple(persistence_type=None,
|
||||
@ -713,6 +720,7 @@ def sample_base_expected_config(frontend=None, backend=None,
|
||||
" option httpchk GET /index.html\n"
|
||||
" http-check expect rstatus 418\n"
|
||||
" fullconn 98\n"
|
||||
" option allbackups\n"
|
||||
" server sample_member_id_1 10.0.0.99:82 weight 13 "
|
||||
"check inter 30s fall 3 rise 2 cookie sample_member_id_1\n"
|
||||
" server sample_member_id_2 10.0.0.98:82 weight 13 "
|
||||
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
Members have a new boolean option `backup`. When set to `true`, the member
|
||||
will not receive traffic until all non-backup members are offline. Once
|
||||
all non-backup members are offline, traffic will begin balancing between
|
||||
the backup members.
|
Loading…
Reference in New Issue
Block a user