Change amphora statistics to use deltas
Amphora statistics packets should report deltas instead of absolutes for all relevant metrics. Change-Id: I5cf6f1f20f2c6f1da39982b2d88e036eefe48b2f Co-Authored-By: Anushka Singh <anushka.singh.2511@gmail.com> Co-Authored-By: Stephanie Djajadi <stephanie.djajadi@gmail.com>
This commit is contained in:
parent
51b93c0022
commit
d15cccff2f
@ -17,10 +17,12 @@
|
|||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
|
import stat
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import simplejson
|
||||||
|
|
||||||
from octavia.amphorae.backends.agent.api_server import util
|
from octavia.amphorae.backends.agent.api_server import util
|
||||||
from octavia.amphorae.backends.health_daemon import health_sender
|
from octavia.amphorae.backends.health_daemon import health_sender
|
||||||
@ -38,8 +40,57 @@ SEQ = 0
|
|||||||
#
|
#
|
||||||
# ver 1 - Adds UDP listener status when no pool or members are present
|
# ver 1 - Adds UDP listener status when no pool or members are present
|
||||||
# ver 2 - Switch to all listeners in a single combined haproxy config
|
# ver 2 - Switch to all listeners in a single combined haproxy config
|
||||||
#
|
# ver 3 - Switch stats reporting to deltas
|
||||||
MSG_VER = 2
|
|
||||||
|
MSG_VER = 3
|
||||||
|
|
||||||
|
DELTA_METRICS = ('bin', 'bout', 'ereq', 'stot')
|
||||||
|
|
||||||
|
# Filesystem persistent counters for statistics deltas
|
||||||
|
COUNTERS = None
|
||||||
|
COUNTERS_FILE = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_counters_file():
|
||||||
|
global COUNTERS_FILE
|
||||||
|
if COUNTERS_FILE is None:
|
||||||
|
stats_file_path = os.path.join(
|
||||||
|
CONF.haproxy_amphora.base_path, "stats_counters.json")
|
||||||
|
# Open for read+write and create if necessary
|
||||||
|
flags = os.O_RDWR | os.O_CREAT
|
||||||
|
# mode 00644
|
||||||
|
mode = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP
|
||||||
|
try:
|
||||||
|
COUNTERS_FILE = os.fdopen(
|
||||||
|
os.open(stats_file_path, flags, mode), 'r+')
|
||||||
|
except OSError:
|
||||||
|
LOG.info("Failed to open `%s`, ignoring...", stats_file_path)
|
||||||
|
COUNTERS_FILE.seek(0)
|
||||||
|
return COUNTERS_FILE
|
||||||
|
|
||||||
|
|
||||||
|
def get_counters():
|
||||||
|
global COUNTERS
|
||||||
|
if COUNTERS is None:
|
||||||
|
try:
|
||||||
|
COUNTERS = simplejson.load(get_counters_file()) or {}
|
||||||
|
except (simplejson.JSONDecodeError, AttributeError):
|
||||||
|
COUNTERS = {}
|
||||||
|
return COUNTERS
|
||||||
|
|
||||||
|
|
||||||
|
def persist_counters():
|
||||||
|
"""Attempt to persist the latest statistics values"""
|
||||||
|
if COUNTERS is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
stats = simplejson.dumps(COUNTERS)
|
||||||
|
counters_file = get_counters_file()
|
||||||
|
counters_file.truncate(0)
|
||||||
|
counters_file.write(stats)
|
||||||
|
counters_file.flush()
|
||||||
|
except (OSError, AttributeError):
|
||||||
|
LOG.warning("Couldn't persist statistics counter file!")
|
||||||
|
|
||||||
|
|
||||||
def list_sock_stat_files(hadir=None):
|
def list_sock_stat_files(hadir=None):
|
||||||
@ -83,11 +134,12 @@ def run_sender(cmd_queue):
|
|||||||
LOG.error('Keepalived is configured but not running, '
|
LOG.error('Keepalived is configured but not running, '
|
||||||
'skipping health heartbeat.')
|
'skipping health heartbeat.')
|
||||||
else:
|
else:
|
||||||
LOG.error('Failed to check keepalived and haproxy status due '
|
LOG.exception('Failed to check keepalived and haproxy status '
|
||||||
'to exception %s, skipping health heartbeat.', e)
|
'due to exception %s, skipping health '
|
||||||
|
'heartbeat.', e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error('Failed to check keepalived and haproxy status due to '
|
LOG.exception('Failed to check keepalived and haproxy status due '
|
||||||
'exception %s, skipping health heartbeat.', e)
|
'to exception %s, skipping health heartbeat.', e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = cmd_queue.get_nowait()
|
cmd = cmd_queue.get_nowait()
|
||||||
@ -109,33 +161,59 @@ def get_stats(stat_sock_file):
|
|||||||
return stats, pool_status
|
return stats, pool_status
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_stats_deltas(listener_id, row):
|
||||||
|
counters = get_counters()
|
||||||
|
listener_counters = counters.get(listener_id, {})
|
||||||
|
counters[listener_id] = listener_counters
|
||||||
|
|
||||||
|
delta_values = {}
|
||||||
|
for metric_key in DELTA_METRICS:
|
||||||
|
current_value = int(row[metric_key])
|
||||||
|
# Get existing counter for our metrics
|
||||||
|
last_value = listener_counters.get(metric_key, 0)
|
||||||
|
# Store the new absolute value
|
||||||
|
listener_counters[metric_key] = current_value
|
||||||
|
# Calculate a delta for each metric
|
||||||
|
delta = current_value - last_value
|
||||||
|
# Did HAProxy restart or reset counters?
|
||||||
|
if delta < 0:
|
||||||
|
delta = current_value # If so, reset ours.
|
||||||
|
delta_values[metric_key] = delta
|
||||||
|
|
||||||
|
return delta_values
|
||||||
|
|
||||||
|
|
||||||
def build_stats_message():
|
def build_stats_message():
|
||||||
# Example version 2 message without UDP:
|
"""Build a stats message based on retrieved listener statistics.
|
||||||
# {
|
|
||||||
# "id": "<amphora_id>",
|
Example version 3 message without UDP (note that values are deltas,
|
||||||
# "seq": 67,
|
not absolutes)::
|
||||||
# "listeners": {
|
|
||||||
# "<listener_id>": {
|
{"id": "<amphora_id>",
|
||||||
# "status": "OPEN",
|
"seq": 67,
|
||||||
# "stats": {
|
"listeners": {
|
||||||
# "tx": 0,
|
"<listener_id>": {
|
||||||
# "rx": 0,
|
"status": "OPEN",
|
||||||
# "conns": 0,
|
"stats": {
|
||||||
# "totconns": 0,
|
"tx": 0,
|
||||||
# "ereq": 0
|
"rx": 0,
|
||||||
# }
|
"conns": 0,
|
||||||
# }
|
"totconns": 0,
|
||||||
# },
|
"ereq": 0
|
||||||
# "pools": {
|
}
|
||||||
# "<pool_id>:<listener_id>": {
|
}
|
||||||
# "status": "UP",
|
},
|
||||||
# "members": {
|
"pools": {
|
||||||
# "<member_id>": "no check"
|
"<pool_id>:<listener_id>": {
|
||||||
# }
|
"status": "UP",
|
||||||
# }
|
"members": {
|
||||||
# },
|
"<member_id>": "no check"
|
||||||
# "ver": 2
|
}
|
||||||
# }
|
}
|
||||||
|
},
|
||||||
|
"ver": 3
|
||||||
|
}
|
||||||
|
"""
|
||||||
global SEQ
|
global SEQ
|
||||||
msg = {'id': CONF.amphora_agent.amphora_id,
|
msg = {'id': CONF.amphora_agent.amphora_id,
|
||||||
'seq': SEQ, 'listeners': {}, 'pools': {},
|
'seq': SEQ, 'listeners': {}, 'pools': {},
|
||||||
@ -149,13 +227,14 @@ def build_stats_message():
|
|||||||
for row in stats:
|
for row in stats:
|
||||||
if row['svname'] == 'FRONTEND':
|
if row['svname'] == 'FRONTEND':
|
||||||
listener_id = row['pxname']
|
listener_id = row['pxname']
|
||||||
|
delta_values = calculate_stats_deltas(listener_id, row)
|
||||||
msg['listeners'][listener_id] = {
|
msg['listeners'][listener_id] = {
|
||||||
'status': row['status'],
|
'status': row['status'],
|
||||||
'stats': {'tx': int(row['bout']),
|
'stats': {'tx': delta_values['bout'],
|
||||||
'rx': int(row['bin']),
|
'rx': delta_values['bin'],
|
||||||
'conns': int(row['scur']),
|
'conns': int(row['scur']),
|
||||||
'totconns': int(row['stot']),
|
'totconns': delta_values['stot'],
|
||||||
'ereq': int(row['ereq'])}}
|
'ereq': delta_values['ereq']}}
|
||||||
for pool_id, pool in pool_status.items():
|
for pool_id, pool in pool_status.items():
|
||||||
msg['pools'][pool_id] = {"status": pool['status'],
|
msg['pools'][pool_id] = {"status": pool['status'],
|
||||||
"members": pool['members']}
|
"members": pool['members']}
|
||||||
@ -166,16 +245,18 @@ def build_stats_message():
|
|||||||
listeners_stats = keepalivedlvs_query.get_udp_listeners_stats()
|
listeners_stats = keepalivedlvs_query.get_udp_listeners_stats()
|
||||||
if listeners_stats:
|
if listeners_stats:
|
||||||
for listener_id, listener_stats in listeners_stats.items():
|
for listener_id, listener_stats in listeners_stats.items():
|
||||||
|
delta_values = calculate_stats_deltas(
|
||||||
|
listener_id, listener_stats['stats'])
|
||||||
pool_status = keepalivedlvs_query.get_udp_listener_pool_status(
|
pool_status = keepalivedlvs_query.get_udp_listener_pool_status(
|
||||||
listener_id)
|
listener_id)
|
||||||
udp_listener_dict = dict()
|
udp_listener_dict = dict()
|
||||||
udp_listener_dict['status'] = listener_stats['status']
|
udp_listener_dict['status'] = listener_stats['status']
|
||||||
udp_listener_dict['stats'] = {
|
udp_listener_dict['stats'] = {
|
||||||
'tx': listener_stats['stats']['bout'],
|
'tx': delta_values['bout'],
|
||||||
'rx': listener_stats['stats']['bin'],
|
'rx': delta_values['bin'],
|
||||||
'conns': listener_stats['stats']['scur'],
|
'conns': listener_stats['stats']['scur'],
|
||||||
'totconns': listener_stats['stats']['stot'],
|
'totconns': delta_values['stot'],
|
||||||
'ereq': listener_stats['stats']['ereq']
|
'ereq': delta_values['ereq']
|
||||||
}
|
}
|
||||||
if pool_status:
|
if pool_status:
|
||||||
pool_id = pool_status['lvs']['uuid']
|
pool_id = pool_status['lvs']['uuid']
|
||||||
@ -184,4 +265,5 @@ def build_stats_message():
|
|||||||
"members": pool_status['lvs']['members']
|
"members": pool_status['lvs']['members']
|
||||||
}
|
}
|
||||||
msg['listeners'][listener_id] = udp_listener_dict
|
msg['listeners'][listener_id] = udp_listener_dict
|
||||||
|
persist_counters()
|
||||||
return msg
|
return msg
|
||||||
|
@ -208,6 +208,10 @@ class ListenerStatistics(BaseDataModel):
|
|||||||
self.bytes_out += other.bytes_out
|
self.bytes_out += other.bytes_out
|
||||||
self.request_errors += other.request_errors
|
self.request_errors += other.request_errors
|
||||||
self.total_connections += other.total_connections
|
self.total_connections += other.total_connections
|
||||||
|
else:
|
||||||
|
raise TypeError( # noqa: O342
|
||||||
|
"unsupported operand type(s) for +=: '{0}' and '{1}'".format(
|
||||||
|
type(self), type(other)))
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ import sqlalchemy
|
|||||||
from stevedore import driver as stevedore_driver
|
from stevedore import driver as stevedore_driver
|
||||||
|
|
||||||
from octavia.common import constants
|
from octavia.common import constants
|
||||||
|
from octavia.common import data_models
|
||||||
from octavia.common import stats
|
from octavia.common import stats
|
||||||
from octavia.controller.healthmanager.health_drivers import update_base
|
from octavia.controller.healthmanager.health_drivers import update_base
|
||||||
from octavia.db import api as db_api
|
from octavia.db import api as db_api
|
||||||
@ -461,10 +462,6 @@ class UpdateHealthDb(update_base.HealthUpdateBase):
|
|||||||
|
|
||||||
class UpdateStatsDb(update_base.StatsUpdateBase, stats.StatsMixin):
|
class UpdateStatsDb(update_base.StatsUpdateBase, stats.StatsMixin):
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(UpdateStatsDb, self).__init__()
|
|
||||||
self.repo_listener = repo.ListenerRepository()
|
|
||||||
|
|
||||||
def update_stats(self, health_message, srcaddr):
|
def update_stats(self, health_message, srcaddr):
|
||||||
# The executor will eat any exceptions from the update_stats code
|
# The executor will eat any exceptions from the update_stats code
|
||||||
# so we need to wrap it and log the unhandled exception
|
# so we need to wrap it and log the unhandled exception
|
||||||
@ -533,7 +530,28 @@ class UpdateStatsDb(update_base.StatsUpdateBase, stats.StatsMixin):
|
|||||||
"ver": 2
|
"ver": 2
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Example V3 message::
|
||||||
|
|
||||||
|
See V2 message, except values are deltas rather than absolutes.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
version = health_message.get("ver", 1)
|
||||||
|
|
||||||
|
if version <= 2:
|
||||||
|
self.version2(health_message)
|
||||||
|
elif version == 3:
|
||||||
|
self.version3(health_message)
|
||||||
|
else:
|
||||||
|
LOG.warning("Unknown message version: %s, ignoring...", version)
|
||||||
|
|
||||||
|
def version2(self, health_message):
|
||||||
|
"""Parse version 1 and 2 of the health message.
|
||||||
|
|
||||||
|
:param health_message: health message dictionary
|
||||||
|
:type health_message: dict
|
||||||
|
"""
|
||||||
|
|
||||||
session = db_api.get_session()
|
session = db_api.get_session()
|
||||||
|
|
||||||
amphora_id = health_message['id']
|
amphora_id = health_message['id']
|
||||||
@ -545,8 +563,36 @@ class UpdateStatsDb(update_base.StatsUpdateBase, stats.StatsMixin):
|
|||||||
'active_connections': stats['conns'],
|
'active_connections': stats['conns'],
|
||||||
'total_connections': stats['totconns'],
|
'total_connections': stats['totconns'],
|
||||||
'request_errors': stats['ereq']}
|
'request_errors': stats['ereq']}
|
||||||
LOG.debug("Updating listener stats in db and sending event.")
|
LOG.debug("Updating listener stats in db."
|
||||||
LOG.debug("Listener %s / Amphora %s stats: %s",
|
"Listener %s / Amphora %s stats: %s",
|
||||||
listener_id, amphora_id, stats)
|
listener_id, amphora_id, stats)
|
||||||
self.listener_stats_repo.replace(
|
self.listener_stats_repo.replace(
|
||||||
session, listener_id, amphora_id, **stats)
|
session, listener_id, amphora_id, **stats)
|
||||||
|
|
||||||
|
def version3(self, health_message):
|
||||||
|
"""Parse version 3 of the health message.
|
||||||
|
|
||||||
|
:param health_message: health message dictionary
|
||||||
|
:type health_message: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = db_api.get_session()
|
||||||
|
|
||||||
|
amphora_id = health_message['id']
|
||||||
|
listeners = health_message['listeners']
|
||||||
|
for listener_id, listener in listeners.items():
|
||||||
|
|
||||||
|
delta_stats = listener.get('stats')
|
||||||
|
delta_stats_model = data_models.ListenerStatistics(
|
||||||
|
listener_id=listener_id,
|
||||||
|
amphora_id=amphora_id,
|
||||||
|
bytes_in=delta_stats['rx'],
|
||||||
|
bytes_out=delta_stats['tx'],
|
||||||
|
active_connections=delta_stats['conns'],
|
||||||
|
total_connections=delta_stats['totconns'],
|
||||||
|
request_errors=delta_stats['ereq']
|
||||||
|
)
|
||||||
|
LOG.debug("Updating listener stats in db."
|
||||||
|
"Listener %s / Amphora %s stats: %s",
|
||||||
|
listener_id, amphora_id, delta_stats_model.to_dict())
|
||||||
|
self.listener_stats_repo.increment(session, delta_stats_model)
|
||||||
|
@ -175,6 +175,21 @@ class ListenerStatistics(base_models.BASE):
|
|||||||
'Current value is %(value)d.') % data)
|
'Current value is %(value)d.') % data)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
def __iadd__(self, other):
|
||||||
|
|
||||||
|
if isinstance(other, (ListenerStatistics,
|
||||||
|
data_models.ListenerStatistics)):
|
||||||
|
self.bytes_in += other.bytes_in
|
||||||
|
self.bytes_out += other.bytes_out
|
||||||
|
self.request_errors += other.request_errors
|
||||||
|
self.total_connections += other.total_connections
|
||||||
|
else:
|
||||||
|
raise TypeError( # noqa: O342
|
||||||
|
"unsupported operand type(s) for +=: '{0}' and '{1}'".format(
|
||||||
|
type(self), type(other)))
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
class Member(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
|
class Member(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
|
||||||
models.TimestampMixin, base_models.NameMixin,
|
models.TimestampMixin, base_models.NameMixin,
|
||||||
|
@ -1222,8 +1222,39 @@ class ListenerStatisticsRepository(BaseRepository):
|
|||||||
model_kwargs['amphora_id'] = amphora_id
|
model_kwargs['amphora_id'] = amphora_id
|
||||||
self.create(session, **model_kwargs)
|
self.create(session, **model_kwargs)
|
||||||
|
|
||||||
|
def increment(self, session, delta_stats):
|
||||||
|
"""Updates a listener's statistics, incrementing by the passed deltas.
|
||||||
|
|
||||||
|
:param session: A Sql Alchemy database session
|
||||||
|
:param delta_stats: Listener statistics deltas to add
|
||||||
|
:type delta_stats: octavia.common.data_models.ListenerStatistics
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
with session.begin(subtransactions=True):
|
||||||
|
count = session.query(self.model_class).filter_by(
|
||||||
|
listener_id=delta_stats.listener_id,
|
||||||
|
amphora_id=delta_stats.amphora_id).count()
|
||||||
|
if count:
|
||||||
|
existing_stats = session.query(
|
||||||
|
self.model_class).with_for_update().filter_by(
|
||||||
|
listener_id=delta_stats.listener_id,
|
||||||
|
amphora_id=delta_stats.amphora_id).one()
|
||||||
|
existing_stats += delta_stats
|
||||||
|
existing_stats.active_connections = (
|
||||||
|
delta_stats.active_connections)
|
||||||
|
else:
|
||||||
|
self.create(session, **delta_stats.to_dict())
|
||||||
|
|
||||||
def update(self, session, listener_id, **model_kwargs):
|
def update(self, session, listener_id, **model_kwargs):
|
||||||
"""Updates a listener's statistics by a listener's id."""
|
"""Updates a listener's statistics, overriding with the passed values.
|
||||||
|
|
||||||
|
:param session: A Sql Alchemy database session
|
||||||
|
:param listener_id: The UUID of the listener to update
|
||||||
|
:type listener_id: str
|
||||||
|
:param model_kwargs: Entity attributes that should be updated
|
||||||
|
|
||||||
|
"""
|
||||||
with session.begin(subtransactions=True):
|
with session.begin(subtransactions=True):
|
||||||
session.query(self.model_class).filter_by(
|
session.query(self.model_class).filter_by(
|
||||||
listener_id=listener_id).update(model_kwargs)
|
listener_id=listener_id).update(model_kwargs)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -12,21 +12,26 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
|
import os
|
||||||
import queue
|
import queue
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_config import fixture as oslo_fixture
|
from oslo_config import fixture as oslo_fixture
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
import simplejson
|
||||||
|
|
||||||
from octavia.amphorae.backends.health_daemon import health_daemon
|
from octavia.amphorae.backends.health_daemon import health_daemon
|
||||||
from octavia.common import constants
|
from octavia.common import constants
|
||||||
|
from octavia.tests.common import utils as test_utils
|
||||||
import octavia.tests.unit.base as base
|
import octavia.tests.unit.base as base
|
||||||
|
|
||||||
|
|
||||||
|
LB_ID1 = uuidutils.generate_uuid()
|
||||||
LISTENER_ID1 = uuidutils.generate_uuid()
|
LISTENER_ID1 = uuidutils.generate_uuid()
|
||||||
LISTENER_ID2 = uuidutils.generate_uuid()
|
LISTENER_ID2 = uuidutils.generate_uuid()
|
||||||
LISTENER_IDS = [LISTENER_ID1, LISTENER_ID2]
|
LISTENER_IDS = [LISTENER_ID1, LISTENER_ID2]
|
||||||
|
AMPHORA_ID = uuidutils.generate_uuid()
|
||||||
BASE_PATH = '/tmp/test'
|
BASE_PATH = '/tmp/test'
|
||||||
SAMPLE_POOL_STATUS = {
|
SAMPLE_POOL_STATUS = {
|
||||||
'432fc8b3-d446-48d4-bb64-13beb90e22bc': {
|
'432fc8b3-d446-48d4-bb64-13beb90e22bc': {
|
||||||
@ -45,46 +50,46 @@ SAMPLE_BOGUS_POOL_STATUS = {LISTENER_ID1: {
|
|||||||
'302e33d9-dee1-4de9-98d5-36329a06fb58':
|
'302e33d9-dee1-4de9-98d5-36329a06fb58':
|
||||||
'DOWN'}}}
|
'DOWN'}}}
|
||||||
|
|
||||||
SAMPLE_STATS = ({'': '', 'status': 'OPEN', 'lastchg': '',
|
FRONTEND_STATS = {'': '', 'status': 'OPEN', 'lastchg': '',
|
||||||
'weight': '', 'slim': '2000', 'pid': '1', 'comp_byp': '0',
|
'weight': '', 'slim': '2000', 'pid': '1', 'comp_byp': '0',
|
||||||
'lastsess': '', 'rate_lim': '0', 'check_duration': '',
|
'lastsess': '', 'rate_lim': '0', 'check_duration': '',
|
||||||
'rate': '0', 'req_rate': '0', 'check_status': '',
|
'rate': '0', 'req_rate': '0', 'check_status': '',
|
||||||
'econ': '', 'comp_out': '0', 'wredis': '', 'dresp': '0',
|
'econ': '', 'comp_out': '0', 'wredis': '', 'dresp': '0',
|
||||||
'ereq': '0', 'tracked': '', 'comp_in': '0',
|
'ereq': '5', 'tracked': '', 'comp_in': '0',
|
||||||
'pxname': LISTENER_ID1,
|
'pxname': LISTENER_ID1,
|
||||||
'dreq': '0', 'hrsp_5xx': '0', 'last_chk': '',
|
'dreq': '0', 'hrsp_5xx': '0', 'last_chk': '',
|
||||||
'check_code': '', 'sid': '0', 'bout': '0', 'hrsp_1xx': '0',
|
'check_code': '', 'sid': '0', 'bout': '10', 'hrsp_1xx': '0',
|
||||||
'qlimit': '', 'hrsp_other': '0', 'bin': '0', 'rtime': '',
|
'qlimit': '', 'hrsp_other': '0', 'bin': '5', 'rtime': '',
|
||||||
'smax': '0', 'req_tot': '0', 'lbtot': '', 'stot': '0',
|
'smax': '0', 'req_tot': '0', 'lbtot': '', 'stot': '0',
|
||||||
'wretr': '', 'req_rate_max': '0', 'ttime': '', 'iid': '2',
|
'wretr': '', 'req_rate_max': '0', 'ttime': '', 'iid': '2',
|
||||||
'hrsp_4xx': '0', 'chkfail': '', 'hanafail': '',
|
'hrsp_4xx': '0', 'chkfail': '', 'hanafail': '',
|
||||||
'downtime': '', 'qcur': '', 'eresp': '', 'comp_rsp': '0',
|
'downtime': '', 'qcur': '', 'eresp': '', 'comp_rsp': '0',
|
||||||
'cli_abrt': '', 'ctime': '', 'qtime': '', 'srv_abrt': '',
|
'cli_abrt': '', 'ctime': '', 'qtime': '', 'srv_abrt': '',
|
||||||
'throttle': '', 'last_agt': '', 'scur': '0', 'type': '0',
|
'throttle': '', 'last_agt': '', 'scur': '0', 'type': '0',
|
||||||
'bck': '', 'qmax': '', 'rate_max': '0', 'hrsp_2xx': '0',
|
'bck': '', 'qmax': '', 'rate_max': '0', 'hrsp_2xx': '0',
|
||||||
'act': '', 'chkdown': '', 'svname': 'FRONTEND',
|
'act': '', 'chkdown': '', 'svname': 'FRONTEND',
|
||||||
'hrsp_3xx': '0'},
|
'hrsp_3xx': '0'}
|
||||||
{'': '', 'status': 'no check', 'lastchg': '', 'weight': '1',
|
MEMBER_STATS = {'': '', 'status': 'no check', 'lastchg': '', 'weight': '1',
|
||||||
'slim': '', 'pid': '1', 'comp_byp': '', 'lastsess': '-1',
|
'slim': '', 'pid': '1', 'comp_byp': '', 'lastsess': '-1',
|
||||||
'rate_lim': '', 'check_duration': '', 'rate': '0',
|
'rate_lim': '', 'check_duration': '', 'rate': '0',
|
||||||
'req_rate': '', 'check_status': '', 'econ': '0',
|
'req_rate': '', 'check_status': '', 'econ': '0',
|
||||||
'comp_out': '', 'wredis': '0', 'dresp': '0', 'ereq': '',
|
'comp_out': '', 'wredis': '0', 'dresp': '0', 'ereq': '',
|
||||||
'tracked': '', 'comp_in': '',
|
'tracked': '', 'comp_in': '',
|
||||||
'pxname': '432fc8b3-d446-48d4-bb64-13beb90e22bc',
|
'pxname': '432fc8b3-d446-48d4-bb64-13beb90e22bc',
|
||||||
'dreq': '', 'hrsp_5xx': '0', 'last_chk': '',
|
'dreq': '', 'hrsp_5xx': '0', 'last_chk': '',
|
||||||
'check_code': '', 'sid': '1', 'bout': '0', 'hrsp_1xx': '0',
|
'check_code': '', 'sid': '1', 'bout': '0', 'hrsp_1xx': '0',
|
||||||
'qlimit': '', 'hrsp_other': '0', 'bin': '0', 'rtime': '0',
|
'qlimit': '', 'hrsp_other': '0', 'bin': '0', 'rtime': '0',
|
||||||
'smax': '0', 'req_tot': '', 'lbtot': '0', 'stot': '0',
|
'smax': '0', 'req_tot': '', 'lbtot': '0', 'stot': '0',
|
||||||
'wretr': '0', 'req_rate_max': '', 'ttime': '0', 'iid': '3',
|
'wretr': '0', 'req_rate_max': '', 'ttime': '0', 'iid': '3',
|
||||||
'hrsp_4xx': '0', 'chkfail': '', 'hanafail': '0',
|
'hrsp_4xx': '0', 'chkfail': '', 'hanafail': '0',
|
||||||
'downtime': '', 'qcur': '0', 'eresp': '0', 'comp_rsp': '',
|
'downtime': '', 'qcur': '0', 'eresp': '0', 'comp_rsp': '',
|
||||||
'cli_abrt': '0', 'ctime': '0', 'qtime': '0', 'srv_abrt': '0',
|
'cli_abrt': '0', 'ctime': '0', 'qtime': '0', 'srv_abrt': '0',
|
||||||
'throttle': '', 'last_agt': '', 'scur': '0', 'type': '2',
|
'throttle': '', 'last_agt': '', 'scur': '0', 'type': '2',
|
||||||
'bck': '0', 'qmax': '0', 'rate_max': '0', 'hrsp_2xx': '0',
|
'bck': '0', 'qmax': '0', 'rate_max': '0', 'hrsp_2xx': '0',
|
||||||
'act': '1', 'chkdown': '',
|
'act': '1', 'chkdown': '',
|
||||||
'svname': '302e33d9-dee1-4de9-98d5-36329a06fb58',
|
'svname': '302e33d9-dee1-4de9-98d5-36329a06fb58',
|
||||||
'hrsp_3xx': '0'},
|
'hrsp_3xx': '0'}
|
||||||
{'': '', 'status': 'UP', 'lastchg': '122', 'weight': '1',
|
BACKEND_STATS = {'': '', 'status': 'UP', 'lastchg': '122', 'weight': '1',
|
||||||
'slim': '200', 'pid': '1', 'comp_byp': '0', 'lastsess': '-1',
|
'slim': '200', 'pid': '1', 'comp_byp': '0', 'lastsess': '-1',
|
||||||
'rate_lim': '', 'check_duration': '', 'rate': '0',
|
'rate_lim': '', 'check_duration': '', 'rate': '0',
|
||||||
'req_rate': '', 'check_status': '', 'econ': '0',
|
'req_rate': '', 'check_status': '', 'econ': '0',
|
||||||
@ -101,14 +106,15 @@ SAMPLE_STATS = ({'': '', 'status': 'OPEN', 'lastchg': '',
|
|||||||
'srv_abrt': '0', 'throttle': '', 'last_agt': '', 'scur': '0',
|
'srv_abrt': '0', 'throttle': '', 'last_agt': '', 'scur': '0',
|
||||||
'type': '1', 'bck': '0', 'qmax': '0', 'rate_max': '0',
|
'type': '1', 'bck': '0', 'qmax': '0', 'rate_max': '0',
|
||||||
'hrsp_2xx': '0', 'act': '1', 'chkdown': '0',
|
'hrsp_2xx': '0', 'act': '1', 'chkdown': '0',
|
||||||
'svname': 'BACKEND', 'hrsp_3xx': '0'})
|
'svname': 'BACKEND', 'hrsp_3xx': '0'}
|
||||||
|
SAMPLE_STATS = (FRONTEND_STATS, MEMBER_STATS, BACKEND_STATS)
|
||||||
|
|
||||||
SAMPLE_STATS_MSG = {
|
SAMPLE_STATS_MSG = {
|
||||||
'listeners': {
|
'listeners': {
|
||||||
LISTENER_ID1: {
|
LISTENER_ID1: {
|
||||||
'stats': {
|
'stats': {
|
||||||
'totconns': 0, 'conns': 0,
|
'totconns': 0, 'conns': 0,
|
||||||
'tx': 0, 'rx': 0, 'ereq': 0},
|
'tx': 8, 'rx': 4, 'ereq': 5},
|
||||||
'status': 'OPEN'},
|
'status': 'OPEN'},
|
||||||
},
|
},
|
||||||
'pools': {
|
'pools': {
|
||||||
@ -119,8 +125,29 @@ SAMPLE_STATS_MSG = {
|
|||||||
'members': {'e657f950-a6a2-4d28-bffa-0c8a8c05f815': 'DOWN'},
|
'members': {'e657f950-a6a2-4d28-bffa-0c8a8c05f815': 'DOWN'},
|
||||||
'status': 'UP'},
|
'status': 'UP'},
|
||||||
},
|
},
|
||||||
'id': None,
|
'id': AMPHORA_ID,
|
||||||
'seq': 0,
|
'seq': mock.ANY,
|
||||||
|
'ver': health_daemon.MSG_VER
|
||||||
|
}
|
||||||
|
|
||||||
|
SAMPLE_MSG_HAPROXY_RESTART = {
|
||||||
|
'listeners': {
|
||||||
|
LISTENER_ID1: {
|
||||||
|
'stats': {
|
||||||
|
'totconns': 0, 'conns': 0,
|
||||||
|
'tx': 10, 'rx': 5, 'ereq': 5},
|
||||||
|
'status': 'OPEN'},
|
||||||
|
},
|
||||||
|
'pools': {
|
||||||
|
'432fc8b3-d446-48d4-bb64-13beb90e22bc': {
|
||||||
|
'members': {'302e33d9-dee1-4de9-98d5-36329a06fb58': 'DOWN'},
|
||||||
|
'status': 'UP'},
|
||||||
|
'3661ed10-99db-4d2c-bffb-99b60eb876ff': {
|
||||||
|
'members': {'e657f950-a6a2-4d28-bffa-0c8a8c05f815': 'DOWN'},
|
||||||
|
'status': 'UP'},
|
||||||
|
},
|
||||||
|
'id': AMPHORA_ID,
|
||||||
|
'seq': mock.ANY,
|
||||||
'ver': health_daemon.MSG_VER
|
'ver': health_daemon.MSG_VER
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,6 +158,10 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
super(TestHealthDaemon, self).setUp()
|
super(TestHealthDaemon, self).setUp()
|
||||||
conf = oslo_fixture.Config(cfg.CONF)
|
conf = oslo_fixture.Config(cfg.CONF)
|
||||||
conf.config(group="haproxy_amphora", base_path=BASE_PATH)
|
conf.config(group="haproxy_amphora", base_path=BASE_PATH)
|
||||||
|
conf.config(group="amphora_agent", amphora_id=AMPHORA_ID)
|
||||||
|
file_name = os.path.join(BASE_PATH, "stats_counters")
|
||||||
|
self.mock_open = self.useFixture(
|
||||||
|
test_utils.OpenFixture(file_name)).mock_open
|
||||||
|
|
||||||
@mock.patch('octavia.amphorae.backends.agent.'
|
@mock.patch('octavia.amphorae.backends.agent.'
|
||||||
'api_server.util.get_loadbalancers')
|
'api_server.util.get_loadbalancers')
|
||||||
@ -296,18 +327,33 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
'health_daemon.list_sock_stat_files')
|
'health_daemon.list_sock_stat_files')
|
||||||
def test_build_stats_message(self, mock_list_files,
|
def test_build_stats_message(self, mock_list_files,
|
||||||
mock_get_stats, mock_is_running):
|
mock_get_stats, mock_is_running):
|
||||||
mock_list_files.return_value = {LISTENER_ID1: 'TEST',
|
health_daemon.COUNTERS = None
|
||||||
LISTENER_ID2: 'TEST2'}
|
health_daemon.COUNTERS_FILE = None
|
||||||
|
lb1_stats_socket = '/var/lib/octavia/{0}/haproxy.sock'.format(LB_ID1)
|
||||||
|
mock_list_files.return_value = {LB_ID1: lb1_stats_socket}
|
||||||
|
|
||||||
mock_is_running.return_value = True
|
mock_is_running.return_value = True
|
||||||
mock_get_stats.return_value = SAMPLE_STATS, SAMPLE_POOL_STATUS
|
mock_get_stats.return_value = SAMPLE_STATS, SAMPLE_POOL_STATUS
|
||||||
|
|
||||||
msg = health_daemon.build_stats_message()
|
with mock.patch('os.open'), mock.patch.object(
|
||||||
|
os, 'fdopen', self.mock_open) as mock_fdopen:
|
||||||
|
mock_fdopen().read.return_value = simplejson.dumps({
|
||||||
|
LISTENER_ID1: {'bin': 1, 'bout': 2},
|
||||||
|
})
|
||||||
|
msg = health_daemon.build_stats_message()
|
||||||
|
|
||||||
self.assertEqual(SAMPLE_STATS_MSG, msg)
|
self.assertEqual(SAMPLE_STATS_MSG, msg)
|
||||||
|
|
||||||
mock_get_stats.assert_any_call('TEST')
|
mock_get_stats.assert_any_call(lb1_stats_socket)
|
||||||
mock_get_stats.assert_any_call('TEST2')
|
mock_fdopen().write.assert_called_once_with(simplejson.dumps({
|
||||||
|
LISTENER_ID1: {
|
||||||
|
'bin': int(FRONTEND_STATS['bin']),
|
||||||
|
'bout': int(FRONTEND_STATS['bout']),
|
||||||
|
'ereq': int(FRONTEND_STATS['ereq']),
|
||||||
|
'stot': int(FRONTEND_STATS['stot'])
|
||||||
|
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
@mock.patch('octavia.amphorae.backends.agent.api_server.'
|
@mock.patch('octavia.amphorae.backends.agent.api_server.'
|
||||||
'util.is_lb_running')
|
'util.is_lb_running')
|
||||||
@ -318,15 +364,19 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
def test_build_stats_message_no_listener(self, mock_list_files,
|
def test_build_stats_message_no_listener(self, mock_list_files,
|
||||||
mock_get_stats,
|
mock_get_stats,
|
||||||
mock_is_running):
|
mock_is_running):
|
||||||
mock_list_files.return_value = {LISTENER_ID1: 'TEST',
|
health_daemon.COUNTERS = None
|
||||||
LISTENER_ID2: 'TEST2'}
|
health_daemon.COUNTERS_FILE = None
|
||||||
|
lb1_stats_socket = '/var/lib/octavia/{0}/haproxy.sock'.format(LB_ID1)
|
||||||
|
mock_list_files.return_value = {LB_ID1: lb1_stats_socket}
|
||||||
|
|
||||||
mock_is_running.side_effect = [True, False]
|
mock_is_running.return_value = False
|
||||||
mock_get_stats.return_value = SAMPLE_STATS, SAMPLE_POOL_STATUS
|
|
||||||
|
|
||||||
health_daemon.build_stats_message()
|
with mock.patch('os.open'), mock.patch.object(
|
||||||
|
os, 'fdopen', self.mock_open) as mock_fdopen:
|
||||||
|
health_daemon.build_stats_message()
|
||||||
|
|
||||||
self.assertEqual(1, mock_get_stats.call_count)
|
self.assertEqual(0, mock_get_stats.call_count)
|
||||||
|
self.assertEqual(0, mock_fdopen().read.call_count)
|
||||||
|
|
||||||
@mock.patch("octavia.amphorae.backends.utils.keepalivedlvs_query."
|
@mock.patch("octavia.amphorae.backends.utils.keepalivedlvs_query."
|
||||||
"get_udp_listener_pool_status")
|
"get_udp_listener_pool_status")
|
||||||
@ -335,8 +385,10 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
@mock.patch("octavia.amphorae.backends.agent.api_server.util."
|
@mock.patch("octavia.amphorae.backends.agent.api_server.util."
|
||||||
"get_udp_listeners")
|
"get_udp_listeners")
|
||||||
def test_build_stats_message_with_udp_listener(
|
def test_build_stats_message_with_udp_listener(
|
||||||
self, mock_get_udp_listeners, mock_get_listener_stats,
|
self, mock_get_udp_listeners,
|
||||||
mock_get_pool_status):
|
mock_get_listener_stats, mock_get_pool_status):
|
||||||
|
health_daemon.COUNTERS = None
|
||||||
|
health_daemon.COUNTERS_FILE = None
|
||||||
udp_listener_id1 = uuidutils.generate_uuid()
|
udp_listener_id1 = uuidutils.generate_uuid()
|
||||||
udp_listener_id2 = uuidutils.generate_uuid()
|
udp_listener_id2 = uuidutils.generate_uuid()
|
||||||
udp_listener_id3 = uuidutils.generate_uuid()
|
udp_listener_id3 = uuidutils.generate_uuid()
|
||||||
@ -346,10 +398,11 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
mock_get_udp_listeners.return_value = [udp_listener_id1,
|
mock_get_udp_listeners.return_value = [udp_listener_id1,
|
||||||
udp_listener_id2,
|
udp_listener_id2,
|
||||||
udp_listener_id3]
|
udp_listener_id3]
|
||||||
|
|
||||||
mock_get_listener_stats.return_value = {
|
mock_get_listener_stats.return_value = {
|
||||||
udp_listener_id1: {
|
udp_listener_id1: {
|
||||||
'status': constants.OPEN,
|
'status': constants.OPEN,
|
||||||
'stats': {'bin': 6387472, 'stot': 5, 'bout': 7490,
|
'stats': {'bin': 5, 'stot': 5, 'bout': 10,
|
||||||
'ereq': 0, 'scur': 0}},
|
'ereq': 0, 'scur': 0}},
|
||||||
udp_listener_id3: {
|
udp_listener_id3: {
|
||||||
'status': constants.DOWN,
|
'status': constants.DOWN,
|
||||||
@ -373,7 +426,7 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
udp_listener_id1: {
|
udp_listener_id1: {
|
||||||
'status': constants.OPEN,
|
'status': constants.OPEN,
|
||||||
'stats': {'conns': 0, 'totconns': 5, 'ereq': 0,
|
'stats': {'conns': 0, 'totconns': 5, 'ereq': 0,
|
||||||
'rx': 6387472, 'tx': 7490}},
|
'rx': 4, 'tx': 8}},
|
||||||
udp_listener_id3: {
|
udp_listener_id3: {
|
||||||
'status': constants.DOWN,
|
'status': constants.DOWN,
|
||||||
'stats': {'conns': 0, 'totconns': 0, 'ereq': 0,
|
'stats': {'conns': 0, 'totconns': 0, 'ereq': 0,
|
||||||
@ -384,10 +437,58 @@ class TestHealthDaemon(base.TestCase):
|
|||||||
'members': {
|
'members': {
|
||||||
member_id1: constants.UP,
|
member_id1: constants.UP,
|
||||||
member_id2: constants.UP}}},
|
member_id2: constants.UP}}},
|
||||||
'id': None,
|
'id': AMPHORA_ID,
|
||||||
'seq': mock.ANY, 'ver': health_daemon.MSG_VER}
|
'seq': mock.ANY, 'ver': health_daemon.MSG_VER}
|
||||||
msg = health_daemon.build_stats_message()
|
|
||||||
|
with mock.patch('os.open'), mock.patch.object(
|
||||||
|
os, 'fdopen', self.mock_open) as mock_fdopen:
|
||||||
|
mock_fdopen().read.return_value = simplejson.dumps({
|
||||||
|
udp_listener_id1: {
|
||||||
|
'bin': 1, 'bout': 2, "ereq": 0, "stot": 0}
|
||||||
|
})
|
||||||
|
msg = health_daemon.build_stats_message()
|
||||||
|
|
||||||
self.assertEqual(expected, msg)
|
self.assertEqual(expected, msg)
|
||||||
|
mock_fdopen().write.assert_called_once_with(simplejson.dumps({
|
||||||
|
udp_listener_id1: {'bin': 5, 'bout': 10, 'ereq': 0, 'stot': 5},
|
||||||
|
udp_listener_id3: {'bin': 0, 'bout': 0, 'ereq': 0, 'stot': 0},
|
||||||
|
}))
|
||||||
|
|
||||||
|
@mock.patch('octavia.amphorae.backends.agent.api_server.'
|
||||||
|
'util.is_lb_running')
|
||||||
|
@mock.patch('octavia.amphorae.backends.health_daemon.'
|
||||||
|
'health_daemon.get_stats')
|
||||||
|
@mock.patch('octavia.amphorae.backends.health_daemon.'
|
||||||
|
'health_daemon.list_sock_stat_files')
|
||||||
|
def test_haproxy_restart(self, mock_list_files,
|
||||||
|
mock_get_stats, mock_is_running):
|
||||||
|
health_daemon.COUNTERS = None
|
||||||
|
health_daemon.COUNTERS_FILE = None
|
||||||
|
lb1_stats_socket = '/var/lib/octavia/{0}/haproxy.sock'.format(LB_ID1)
|
||||||
|
mock_list_files.return_value = {LB_ID1: lb1_stats_socket}
|
||||||
|
|
||||||
|
mock_is_running.return_value = True
|
||||||
|
mock_get_stats.return_value = SAMPLE_STATS, SAMPLE_POOL_STATUS
|
||||||
|
|
||||||
|
with mock.patch('os.open'), mock.patch.object(
|
||||||
|
os, 'fdopen', self.mock_open) as mock_fdopen:
|
||||||
|
mock_fdopen().read.return_value = simplejson.dumps({
|
||||||
|
LISTENER_ID1: {'bin': 15, 'bout': 20},
|
||||||
|
})
|
||||||
|
msg = health_daemon.build_stats_message()
|
||||||
|
|
||||||
|
self.assertEqual(SAMPLE_MSG_HAPROXY_RESTART, msg)
|
||||||
|
|
||||||
|
mock_get_stats.assert_any_call(lb1_stats_socket)
|
||||||
|
mock_fdopen().write.assert_called_once_with(simplejson.dumps({
|
||||||
|
LISTENER_ID1: {
|
||||||
|
'bin': int(FRONTEND_STATS['bin']),
|
||||||
|
'bout': int(FRONTEND_STATS['bout']),
|
||||||
|
'ereq': int(FRONTEND_STATS['ereq']),
|
||||||
|
'stot': int(FRONTEND_STATS['stot'])
|
||||||
|
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
|
||||||
class FileNotFoundError(IOError):
|
class FileNotFoundError(IOError):
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
|
import random
|
||||||
|
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
|
||||||
@ -27,6 +28,7 @@ class TestDataModels(base.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
|
||||||
self.LB_ID = uuidutils.generate_uuid()
|
self.LB_ID = uuidutils.generate_uuid()
|
||||||
|
self.LISTENER_ID = uuidutils.generate_uuid()
|
||||||
self.PROJECT_ID = uuidutils.generate_uuid()
|
self.PROJECT_ID = uuidutils.generate_uuid()
|
||||||
self.SERVER_GROUP_ID = uuidutils.generate_uuid()
|
self.SERVER_GROUP_ID = uuidutils.generate_uuid()
|
||||||
self.CREATED_AT = datetime.datetime.now()
|
self.CREATED_AT = datetime.datetime.now()
|
||||||
@ -479,3 +481,55 @@ class TestDataModels(base.TestCase):
|
|||||||
test_Quota_obj.update(update_dict)
|
test_Quota_obj.update(update_dict)
|
||||||
|
|
||||||
self.assertEqual(reference_Quota_obj, test_Quota_obj)
|
self.assertEqual(reference_Quota_obj, test_Quota_obj)
|
||||||
|
|
||||||
|
def test_ListenerStatistics_iadd(self):
|
||||||
|
# test incrementing add function
|
||||||
|
|
||||||
|
bytes_in1 = random.randrange(1000000000)
|
||||||
|
bytes_out1 = random.randrange(1000000000)
|
||||||
|
active_conns1 = random.randrange(1000000000)
|
||||||
|
total_conns1 = random.randrange(1000000000)
|
||||||
|
request_errors1 = random.randrange(1000000000)
|
||||||
|
stats_1 = data_models.ListenerStatistics(
|
||||||
|
listener_id=self.LISTENER_ID,
|
||||||
|
amphora_id=self.AMP_ID,
|
||||||
|
bytes_in=bytes_in1,
|
||||||
|
bytes_out=bytes_out1,
|
||||||
|
active_connections=active_conns1,
|
||||||
|
total_connections=total_conns1,
|
||||||
|
request_errors=request_errors1
|
||||||
|
)
|
||||||
|
|
||||||
|
bytes_in2 = random.randrange(1000000000)
|
||||||
|
bytes_out2 = random.randrange(1000000000)
|
||||||
|
active_conns2 = random.randrange(1000000000)
|
||||||
|
total_conns2 = random.randrange(1000000000)
|
||||||
|
request_errors2 = random.randrange(1000000000)
|
||||||
|
stats_2 = data_models.ListenerStatistics(
|
||||||
|
listener_id="listener 2",
|
||||||
|
amphora_id="amphora 2",
|
||||||
|
bytes_in=bytes_in2,
|
||||||
|
bytes_out=bytes_out2,
|
||||||
|
active_connections=active_conns2,
|
||||||
|
total_connections=total_conns2,
|
||||||
|
request_errors=request_errors2
|
||||||
|
)
|
||||||
|
|
||||||
|
# test successful +=
|
||||||
|
stats_1 += stats_2
|
||||||
|
|
||||||
|
# not a delta, so it won't be incremented
|
||||||
|
self.assertEqual(stats_1.active_connections, active_conns1)
|
||||||
|
self.assertEqual(stats_1.listener_id, self.LISTENER_ID)
|
||||||
|
self.assertEqual(stats_1.amphora_id, self.AMP_ID)
|
||||||
|
|
||||||
|
# deltas will be incremented
|
||||||
|
self.assertEqual(stats_1.bytes_in, bytes_in1 + bytes_in2)
|
||||||
|
self.assertEqual(stats_1.bytes_out, bytes_out1 + bytes_out2)
|
||||||
|
self.assertEqual(stats_1.total_connections,
|
||||||
|
total_conns1 + total_conns2)
|
||||||
|
self.assertEqual(stats_1.request_errors,
|
||||||
|
request_errors1 + request_errors2)
|
||||||
|
|
||||||
|
# test incrementing an incompatible object
|
||||||
|
self.assertRaises(TypeError, stats_1.__iadd__, "boom")
|
||||||
|
@ -1385,7 +1385,7 @@ class TestUpdateStatsDb(base.TestCase):
|
|||||||
self.loadbalancer_repo.get.return_value = self.loadbalancer
|
self.loadbalancer_repo.get.return_value = self.loadbalancer
|
||||||
|
|
||||||
@mock.patch('octavia.db.api.get_session')
|
@mock.patch('octavia.db.api.get_session')
|
||||||
def test_update_stats(self, mock_session):
|
def test_update_stats_v1(self, mock_session):
|
||||||
|
|
||||||
health = {
|
health = {
|
||||||
"id": self.amphora_id,
|
"id": self.amphora_id,
|
||||||
@ -1422,11 +1422,53 @@ class TestUpdateStatsDb(base.TestCase):
|
|||||||
total_connections=self.listener_stats.total_connections,
|
total_connections=self.listener_stats.total_connections,
|
||||||
request_errors=self.listener_stats.request_errors)
|
request_errors=self.listener_stats.request_errors)
|
||||||
|
|
||||||
# Test with missing DB listener
|
# Test with update failure
|
||||||
self.sm.repo_listener.get.return_value = None
|
mock_session.side_effect = Exception
|
||||||
|
self.sm.update_stats(health, '192.0.2.1')
|
||||||
|
|
||||||
|
@mock.patch('octavia.db.api.get_session')
|
||||||
|
def test_update_stats_v3(self, mock_session):
|
||||||
|
|
||||||
|
health = {
|
||||||
|
"id": self.amphora_id,
|
||||||
|
"seq": random.randint(0, 100),
|
||||||
|
"ver": 3,
|
||||||
|
"listeners": {
|
||||||
|
self.listener_id: {
|
||||||
|
"status": constants.OPEN,
|
||||||
|
"stats": {
|
||||||
|
"ereq": self.listener_stats.request_errors,
|
||||||
|
"conns": self.listener_stats.active_connections,
|
||||||
|
"totconns": self.listener_stats.total_connections,
|
||||||
|
"rx": self.listener_stats.bytes_in,
|
||||||
|
"tx": self.listener_stats.bytes_out,
|
||||||
|
},
|
||||||
|
"pools": {
|
||||||
|
"pool-id-1": {
|
||||||
|
"status": constants.UP,
|
||||||
|
"members": {"member-id-1": constants.ONLINE}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
delta_stats_model = data_models.ListenerStatistics(
|
||||||
|
listener_id=self.listener_id,
|
||||||
|
amphora_id=self.amphora_id,
|
||||||
|
bytes_in=self.listener_stats.bytes_in,
|
||||||
|
bytes_out=self.listener_stats.bytes_out,
|
||||||
|
active_connections=self.listener_stats.active_connections,
|
||||||
|
total_connections=self.listener_stats.total_connections,
|
||||||
|
request_errors=self.listener_stats.request_errors
|
||||||
|
)
|
||||||
|
mock_session.return_value = 'blah'
|
||||||
|
|
||||||
self.sm.update_stats(health, '192.0.2.1')
|
self.sm.update_stats(health, '192.0.2.1')
|
||||||
|
|
||||||
|
self.listener_stats_repo.increment.assert_called_once_with(
|
||||||
|
'blah', delta_stats_model)
|
||||||
|
|
||||||
# Test with update failure
|
# Test with update failure
|
||||||
mock_session.side_effect = Exception
|
mock_session.side_effect = Exception
|
||||||
self.sm.update_stats(health, '192.0.2.1')
|
self.sm.update_stats(health, '192.0.2.1')
|
||||||
|
Loading…
x
Reference in New Issue
Block a user