Browse Source

Check LB "provisioning_status" and improving Netcat Cirros HTTP stub

We need to check LB "provisioning_status" in addition to
"operating_status" to make sure that the LB is ACTIVE.

Improve Netcat command used to emulate HTTP server/stub on Cirros
members. Current implementation doesn't send any HTTP response
header back to client (not standard HTTP) and tools like: WGET and CURL
are facing issues because of that, for example:
CURL -  "curl: (56) Recv failure: Connection reset by peer"
WGET - "Length: unspecified" + retries.

Change-Id: I650d7bc09a19264f8a9f811b6cd5bac7a26c107e
changes/80/775680/2
Arkady Shtempler 6 months ago
parent
commit
0e1ed4901c
  1. 12
      tobiko/openstack/stacks/_octavia.py
  2. 3
      tobiko/tests/scenario/octavia/test_traffic.py
  3. 31
      tobiko/tests/scenario/octavia/waiters.py

12
tobiko/openstack/stacks/_octavia.py

@ -68,9 +68,15 @@ class OctaviaCirrosServerStackFixture(_cirros.CirrosServerStackFixture):
# clients connect at the same time. For concurrency testing,
# OctaviaCentosServerStackFixture is more suited to handle multiple
# requests.
return ("#!/bin/sh\n"
"sudo nc -k -p 80 -e echo -e \"HTTP/1.0 200 OK\r\n"
"\r\n$(hostname)\"\n")
return (
"#!/bin/sh\n"
"sudo nc -k -p 80 -e echo -e \"HTTP/1.1 200 OK\r\n"
"Content-Length: $(hostname | head -c-1 | wc -c )\r\n"
"Server: $(hostname)\r\n"
"Content-type: text/html; charset=utf-8\r\n"
"Connection: close\r\n\r\n"
"$(hostname)\"\n")
class OctaviaServerStackFixture(OctaviaCirrosServerStackFixture):

3
tobiko/tests/scenario/octavia/test_traffic.py

@ -61,6 +61,9 @@ class OctaviaBasicTrafficScenarioTest(octavia_base.OctaviaTest):
self.listener_stack,
self.member2_stack, self.request)
# Wait for LB is provisioned and ACTIVE
waiters.wait_for_loadbalancer_is_active(self.loadbalancer_stack)
# Check if load balancer is functional
waiters.wait_for_loadbalancer_functional(self.loadbalancer_stack,
self.client_stack,

31
tobiko/tests/scenario/octavia/waiters.py

@ -53,6 +53,32 @@ def wait_lb_operating_status(lb_id, operating_status):
lb_id)
def wait_resource_provisioning_status(resource_type, provisioning_status,
resource_get, *args):
start = time.time()
while time.time() - start < CONF.tobiko.octavia.check_timeout:
res = resource_get(*args)
if res['provisioning_status'] == provisioning_status:
return
time.sleep(CONF.tobiko.octavia.check_interval)
raise exceptions.TimeoutException(
reason=("Cannot get provisioning_status '{}' from {} {} "
"within the timeout period.".format(provisioning_status,
resource_type, args)))
def wait_lb_provisioning_status(lb_id, provisioning_status):
LOG.debug("Wait for loadbalancer {} to have '{}' "
"provisioning_status".format(lb_id, provisioning_status))
wait_resource_provisioning_status("loadbalancer",
provisioning_status,
octavia.get_loadbalancer,
lb_id)
def wait_for_request_data(client_stack, server_ip_address,
server_protocol, server_port, request_function):
"""Wait until a request on a server succeeds
@ -79,6 +105,11 @@ def wait_for_request_data(client_stack, server_ip_address,
server_ip_address, server_port, server_protocol)))
def wait_for_loadbalancer_is_active(loadbalancer_stack):
loadbalancer_id = loadbalancer_stack.loadbalancer_id
wait_lb_provisioning_status(loadbalancer_id, 'ACTIVE')
def wait_for_loadbalancer_functional(loadbalancer_stack, client_stack,
loadbalancer_vip, loadbalancer_protocol,
loadbalancer_port, request_function):

Loading…
Cancel
Save