local HA tests
Added some local HAHT tests - unit tests no longer work concurrently - bindep.txt added for binary dependencies needed for tests Closes-Bug: 1600019 Partially implements blueprint: high-availability-design Change-Id: If4213ede7bde4d78d710ce75c0147a4889387da9
This commit is contained in:
parent
9402a87373
commit
2dd4dfb4ab
|
@ -0,0 +1,14 @@
|
||||||
|
python-all-dev
|
||||||
|
python3-all-dev
|
||||||
|
libvirt-dev
|
||||||
|
libxml2-dev
|
||||||
|
libxslt1-dev
|
||||||
|
# libmysqlclient-dev
|
||||||
|
# libpq-dev
|
||||||
|
libsqlite3-dev
|
||||||
|
libffi-dev
|
||||||
|
# mysql-client
|
||||||
|
# mysql-server
|
||||||
|
# postgresql
|
||||||
|
# postgresql-client
|
||||||
|
rabbitmq-server
|
|
@ -123,6 +123,7 @@ def setup_logging():
|
||||||
|
|
||||||
def find_paste_config():
|
def find_paste_config():
|
||||||
config_path = cfg.CONF.find_file(cfg.CONF.api_paste_config)
|
config_path = cfg.CONF.find_file(cfg.CONF.api_paste_config)
|
||||||
|
|
||||||
if not config_path:
|
if not config_path:
|
||||||
raise cfg.ConfigFilesNotFoundError(
|
raise cfg.ConfigFilesNotFoundError(
|
||||||
config_files=[cfg.CONF.api_paste_config])
|
config_files=[cfg.CONF.api_paste_config])
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
[composite:congress]
|
||||||
|
use = egg:Paste#urlmap
|
||||||
|
/: congressversions
|
||||||
|
/v1: congress_api_v1
|
||||||
|
|
||||||
|
[pipeline:congressversions]
|
||||||
|
pipeline = cors catch_errors congressversionapp
|
||||||
|
|
||||||
|
[app:congressversionapp]
|
||||||
|
paste.app_factory = congress.api.versions:Versions.factory
|
||||||
|
|
||||||
|
[composite:congress_api_v1]
|
||||||
|
use = call:congress.auth:pipeline_factory
|
||||||
|
keystone = cors request_id catch_errors authtoken keystonecontext congress_api
|
||||||
|
noauth = cors request_id catch_errors congress_api
|
||||||
|
|
||||||
|
[app:congress_api]
|
||||||
|
paste.app_factory = congress.service:congress_app_factory
|
||||||
|
|
||||||
|
[filter:request_id]
|
||||||
|
paste.filter_factory = oslo_middleware:RequestId.factory
|
||||||
|
|
||||||
|
[filter:catch_errors]
|
||||||
|
paste.filter_factory = oslo_middleware:CatchErrors.factory
|
||||||
|
|
||||||
|
[filter:keystonecontext]
|
||||||
|
paste.filter_factory = congress.auth:CongressKeystoneContext.factory
|
||||||
|
|
||||||
|
[filter:authtoken]
|
||||||
|
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
|
||||||
|
|
||||||
|
[filter:cors]
|
||||||
|
paste.filter_factory = oslo_middleware.cors:filter_factory
|
||||||
|
oslo_config_project = congress
|
|
@ -1,2 +1,3 @@
|
||||||
[database]
|
[database]
|
||||||
connection = 'sqlite://'
|
connection = 'sqlite://'
|
||||||
|
# connection = mysql+pymysql://root:password@127.0.0.1/congress?charset=utf8
|
|
@ -0,0 +1,9 @@
|
||||||
|
[DEFAULT]
|
||||||
|
bind_port = 4001
|
||||||
|
auth_strategy = noauth
|
||||||
|
datasource_sync_period = 5
|
||||||
|
debug = True
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# connection = mysql+pymysql://root:password@127.0.0.1/congress?charset=utf8
|
||||||
|
connection = 'sqlite:///congress/tests/haht/test.db'
|
|
@ -0,0 +1,9 @@
|
||||||
|
[DEFAULT]
|
||||||
|
bind_port = 4002
|
||||||
|
auth_strategy = noauth
|
||||||
|
datasource_sync_period = 5
|
||||||
|
debug = True
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# connection = mysql+pymysql://root:password@127.0.0.1/congress?charset=utf8
|
||||||
|
connection = 'sqlite:///congress/tests/haht/test.db'
|
Binary file not shown.
|
@ -0,0 +1,487 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (c) 2016 VMware, Inc. All rights reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
test_congress_haht
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Replicated policy engine high availability tests for `congress` module.
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Note: monkey patch to allow running this test standalone under 'nose'
|
||||||
|
import eventlet
|
||||||
|
eventlet.monkey_patch()
|
||||||
|
|
||||||
|
# import mock
|
||||||
|
|
||||||
|
# from oslo_config import cfg
|
||||||
|
# cfg.CONF.distributed_architecture = True
|
||||||
|
# import neutronclient.v2_0
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
# from congress.common import config
|
||||||
|
# from congress.datasources import neutronv2_driver
|
||||||
|
# from congress.datasources import nova_driver
|
||||||
|
from congress.db import api as db
|
||||||
|
from congress.db import db_policy_rules
|
||||||
|
# from congress import harness
|
||||||
|
# from congress.tests.api import base as api_base
|
||||||
|
from congress.tests import base
|
||||||
|
# from congress.tests.datasources import test_neutron_driver as test_neutron
|
||||||
|
from congress.tests import helper
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import tenacity
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCongressHAHT(base.SqlTestCase):
|
||||||
|
|
||||||
|
class client(object):
|
||||||
|
version = '/v1'
|
||||||
|
|
||||||
|
def __init__(self, port, host='0.0.0.0'):
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.base_url = 'http://' + host + ':%d' % port
|
||||||
|
|
||||||
|
def url(self, suffix=None):
|
||||||
|
if suffix is None:
|
||||||
|
return self.base_url
|
||||||
|
else:
|
||||||
|
return self.base_url + self.version + '/' + suffix
|
||||||
|
|
||||||
|
def get(self, suffix=None):
|
||||||
|
return requests.get(self.url(suffix))
|
||||||
|
|
||||||
|
def delete(self, suffix=None):
|
||||||
|
return requests.delete(self.url(suffix))
|
||||||
|
|
||||||
|
def post(self, suffix=None, json=None):
|
||||||
|
x = requests.post(self.url(suffix), json=json)
|
||||||
|
# print("status: %s, text: %s" % (x.status_code, x.text))
|
||||||
|
return x
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestCongressHAHT, self).setUp()
|
||||||
|
assert sys.executable is not None,\
|
||||||
|
'test cannot proceed when sys.executable is None'
|
||||||
|
|
||||||
|
# establish clean starting DB
|
||||||
|
self.clean_db()
|
||||||
|
shutil.copy(helper.test_path('haht/test.db.clean'),
|
||||||
|
helper.test_path('haht/test.db'))
|
||||||
|
|
||||||
|
self.clients = []
|
||||||
|
self.procs = []
|
||||||
|
self.outfiles = {}
|
||||||
|
self.errfiles = {}
|
||||||
|
|
||||||
|
self.pe1 = self.start_pe(1, 4001)
|
||||||
|
self.pe2 = self.start_pe(2, 4002)
|
||||||
|
|
||||||
|
def dump_nodes_logs(self):
|
||||||
|
LOG.error('PE1 process output:\n%s' %
|
||||||
|
self.read_output_file(self.outfiles[1]))
|
||||||
|
LOG.error('PE2 process output:\n%s' %
|
||||||
|
self.read_output_file(self.outfiles[2]))
|
||||||
|
|
||||||
|
def clean_db(self):
|
||||||
|
session = db.get_session()
|
||||||
|
with session.begin(subtransactions=True):
|
||||||
|
session.query(db_policy_rules.Policy).delete()
|
||||||
|
session.query(db_policy_rules.PolicyRule).delete()
|
||||||
|
|
||||||
|
def start_pe(self, num, port):
|
||||||
|
self.outfiles[num] = tempfile.NamedTemporaryFile(
|
||||||
|
mode='a+', suffix='.out',
|
||||||
|
prefix='congress-pe%d-%d-' % (num, port),
|
||||||
|
dir='/tmp')
|
||||||
|
|
||||||
|
self.errfiles[num] = tempfile.NamedTemporaryFile(
|
||||||
|
mode='a+', suffix='.err',
|
||||||
|
prefix='congress-pe%d-%d-' % (num, port),
|
||||||
|
dir='/tmp')
|
||||||
|
|
||||||
|
args = [sys.executable,
|
||||||
|
'congress/server/congress_server.py',
|
||||||
|
'--node-id',
|
||||||
|
'node_%d' % num,
|
||||||
|
'--api',
|
||||||
|
'--policy-engine',
|
||||||
|
'--config-file',
|
||||||
|
'congress/tests/etc/congress.conf.test.ha_pe%d' % num]
|
||||||
|
pe = subprocess.Popen(args,
|
||||||
|
stdout=self.outfiles[num],
|
||||||
|
stderr=self.outfiles[num],
|
||||||
|
cwd=helper.root_path())
|
||||||
|
self.addCleanup(pe.kill)
|
||||||
|
pe = self.client(port)
|
||||||
|
try:
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: pe.get().status_code, 200)
|
||||||
|
except tenacity.RetryError:
|
||||||
|
out = self.read_output_file(self.outfiles[num])
|
||||||
|
LOG.error('PE%d failed to start. Process output:\n%s' % (num, out))
|
||||||
|
raise
|
||||||
|
return pe
|
||||||
|
|
||||||
|
def read_output_file(self, file):
|
||||||
|
file.flush()
|
||||||
|
file.seek(0)
|
||||||
|
return ''.join(file.readlines())
|
||||||
|
|
||||||
|
def tail(self, thing, length=20):
|
||||||
|
lines = thing.split('\n')
|
||||||
|
return '\n'.join(lines[-length:])
|
||||||
|
|
||||||
|
def test_policy_create_delete(self):
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
# check policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.get('policies/alice').status_code, 200)
|
||||||
|
# check policy alice in PE2
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: self.pe2.get('policies/alice').status_code, 200)
|
||||||
|
# create policy bob in PE2
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies', json={'name': 'bob'}).status_code, 201)
|
||||||
|
# check policy bob in PE2
|
||||||
|
self.assertEqual(self.pe2.get('policies/bob').status_code, 200)
|
||||||
|
# check policy bob in PE1
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: self.pe1.get('policies/bob').status_code, 200)
|
||||||
|
|
||||||
|
# check policy listings
|
||||||
|
self.assertEqual(len(self.pe1.get('policies').json()['results']), 4)
|
||||||
|
self.assertEqual(len(self.pe2.get('policies').json()['results']), 4)
|
||||||
|
|
||||||
|
# delete policy alice in PE2, and check deleted on both PE
|
||||||
|
self.assertEqual(self.pe2.delete('policies/alice').status_code, 200)
|
||||||
|
self.assertEqual(self.pe2.get('policies/alice').status_code, 404)
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: self.pe1.get('policies/alice').status_code, 404)
|
||||||
|
|
||||||
|
# delete policy bob in PE2, and check deleted on both PE
|
||||||
|
self.assertEqual(self.pe2.delete('policies/bob').status_code, 200)
|
||||||
|
self.assertEqual(self.pe2.get('policies/bob').status_code, 404)
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: self.pe1.get('policies/bob').status_code, 404)
|
||||||
|
|
||||||
|
def test_policy_rule_crud(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
# add rule to PE1
|
||||||
|
j = {'rule': 'p(x) :- q(x)', 'name': 'rule1'}
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/rules').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
# retry necessary because of synchronization
|
||||||
|
helper.retry_check_function_return_value(
|
||||||
|
lambda: len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
# add rule to PE2
|
||||||
|
j = {'rule': 'q(1)', 'name': 'rule2'}
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
# check 2 rule in each pe
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe2.get('policies/alice/rules').json()['results']), 2)
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe1.get('policies/alice/rules').json()['results']), 2)
|
||||||
|
|
||||||
|
# grab rule IDs
|
||||||
|
rules = self.pe2.get('policies/alice/rules').json()['results']
|
||||||
|
id1 = next(x['id'] for x in rules if x['name'] == 'rule1')
|
||||||
|
id2 = next(x['id'] for x in rules if x['name'] == 'rule2')
|
||||||
|
|
||||||
|
# show rules by id
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/rules/%s' % id1).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe2.get('policies/alice/rules/%s' % id1).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/rules/%s' % id2).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe2.get('policies/alice/rules/%s' % id2).status_code, 200)
|
||||||
|
|
||||||
|
# list tables
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe1.get('policies/alice/tables').json()['results']), 2)
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe2.get('policies/alice/tables').json()['results']), 2)
|
||||||
|
|
||||||
|
# show tables
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/tables/p').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe2.get('policies/alice/tables/p').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/tables/q').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe2.get('policies/alice/tables/q').status_code, 200)
|
||||||
|
|
||||||
|
# delete from PE1 and check both have 1 rule left
|
||||||
|
self.assertEqual(self.pe1.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % id1).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
# delete from PE2 and check both have 0 rules left
|
||||||
|
self.assertEqual(self.pe2.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % id2).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 0)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 0)
|
||||||
|
except Exception:
|
||||||
|
self.dump_nodes_logs()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_conflicting_policy_create_delete(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 409)
|
||||||
|
|
||||||
|
# create policy bob in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'bob'}).status_code, 201)
|
||||||
|
self.assertEqual(self.pe2.delete(
|
||||||
|
suffix='policies/bob').status_code, 200)
|
||||||
|
# BUG: should be 201 right away not 409
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'bob'}).status_code, 409)
|
||||||
|
time.sleep(10)
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'bob'}).status_code, 201)
|
||||||
|
except Exception:
|
||||||
|
LOG.error('PE1 process output:\n%s' %
|
||||||
|
self.read_output_file(self.outfiles[1]))
|
||||||
|
LOG.error('PE2 process output:\n%s' %
|
||||||
|
self.read_output_file(self.outfiles[2]))
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_policy_rule_create_delete(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
# add rule to PE1 (retry since 500 on first attempt)
|
||||||
|
j = {'rule': 'p(x) :- q(x)', 'name': 'rule1'}
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/rules').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
time.sleep(10) # wait for sync before reading from PE2
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
# add rule to PE2
|
||||||
|
j = {'rule': 'q(1)', 'name': 'rule2'}
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
# check 2 rule in each pe
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe2.get('policies/alice/rules').json()['results']), 2)
|
||||||
|
self.assertEqual(len(
|
||||||
|
self.pe1.get('policies/alice/rules').json()['results']), 2)
|
||||||
|
# grab rule IDs
|
||||||
|
rules = self.pe2.get('policies/alice/rules').json()['results']
|
||||||
|
id1 = next(x['id'] for x in rules if x['name'] == 'rule1')
|
||||||
|
id2 = next(x['id'] for x in rules if x['name'] == 'rule2')
|
||||||
|
# delete from PE1 and check both have 1 rule left
|
||||||
|
self.assertEqual(self.pe1.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % id1).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 1)
|
||||||
|
# delete from PE2 and check both have 0 rules left
|
||||||
|
self.assertEqual(self.pe2.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % id2).status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 0)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe2.get('policies/alice/rules').
|
||||||
|
json()['results']), 0)
|
||||||
|
except Exception:
|
||||||
|
self.dump_nodes_logs()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_policy_rule_create_delete_interference(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
j = {'rule': 'p(x) :- q(x)', 'name': 'rule1'}
|
||||||
|
|
||||||
|
rule_create_res = self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j)
|
||||||
|
self.assertEqual(rule_create_res.status_code, 201)
|
||||||
|
rule_id = rule_create_res.json()['id']
|
||||||
|
self.assertEqual(self.pe1.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % rule_id).status_code, 200)
|
||||||
|
|
||||||
|
# BUG: should be 201 right away not 409
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 409)
|
||||||
|
time.sleep(10)
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self.dump_nodes_logs()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_policy_rule_duplicate(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
j = {'rule': 'p(x) :- q(x)', 'name': 'rule1'}
|
||||||
|
|
||||||
|
# inconsistent behavior depending on whether duplicate request
|
||||||
|
# is processed on same or different PE node
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 409)
|
||||||
|
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.pe1.get('policies/alice/rules').status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
len(self.pe1.get('policies/alice/rules').
|
||||||
|
json()['results']), 2)
|
||||||
|
except Exception:
|
||||||
|
self.dump_nodes_logs()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_policy_rule_evaluation(self):
|
||||||
|
try:
|
||||||
|
# create policy alice in PE1
|
||||||
|
self.assertEqual(self.pe1.post(
|
||||||
|
suffix='policies', json={'name': 'alice'}).status_code, 201)
|
||||||
|
# add rule to PE1
|
||||||
|
j = {'rule': 'p(x) :- q(x)', 'name': 'rule0'}
|
||||||
|
res = self.pe1.post(
|
||||||
|
suffix='policies/alice/rules', json=j)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
r_id = res.json()['id']
|
||||||
|
|
||||||
|
# add data to PE1
|
||||||
|
j = {'rule': ' q( 1 ) ', 'name': 'rule1'}
|
||||||
|
res = self.pe1.post(
|
||||||
|
suffix='policies/alice/rules', json=j)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
q1_id = res.json()['id']
|
||||||
|
|
||||||
|
# # add data to PE2
|
||||||
|
j = {'rule': ' q ( 2 ) ', 'name': 'rule2'}
|
||||||
|
self.assertEqual(self.pe2.post(
|
||||||
|
suffix='policies/alice/rules', json=j).status_code, 201)
|
||||||
|
|
||||||
|
# time.sleep(6)
|
||||||
|
# print(self.pe1.get('policies/alice/tables/p/rows').json())
|
||||||
|
# print(self.pe2.get('policies/alice/tables/p/rows').json())
|
||||||
|
#
|
||||||
|
# time.sleep(6)
|
||||||
|
# print(self.pe1.get('policies/alice/tables/p/rows').json())
|
||||||
|
# print(self.pe2.get('policies/alice/tables/p/rows').json())
|
||||||
|
#
|
||||||
|
# self.assertEqual(self.pe1.delete(
|
||||||
|
# suffix='policies/alice/rules/%s' % q1_id).status_code, 200)
|
||||||
|
#
|
||||||
|
# time.sleep(6)
|
||||||
|
# print(self.pe1.get('policies/alice/tables/p/rows').json())
|
||||||
|
# print(self.pe2.get('policies/alice/tables/p/rows').json())
|
||||||
|
# assert False
|
||||||
|
|
||||||
|
# eval on PE1
|
||||||
|
helper.retry_check_function_return_value_table(
|
||||||
|
lambda: [x['data'] for x in
|
||||||
|
self.pe1.get('policies/alice/tables/p/rows').json()[
|
||||||
|
'results']],
|
||||||
|
[[1], [2]])
|
||||||
|
|
||||||
|
# eval on PE2
|
||||||
|
helper.retry_check_function_return_value_table(
|
||||||
|
lambda: [x['data'] for x in
|
||||||
|
self.pe2.get('policies/alice/tables/p/rows').json()[
|
||||||
|
'results']],
|
||||||
|
[[1], [2]])
|
||||||
|
|
||||||
|
self.assertEqual(self.pe1.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % q1_id).status_code, 200)
|
||||||
|
|
||||||
|
# eval on PE1
|
||||||
|
helper.retry_check_function_return_value_table(
|
||||||
|
lambda: [x['data'] for x in
|
||||||
|
self.pe1.get('policies/alice/tables/p/rows').json()[
|
||||||
|
'results']],
|
||||||
|
[[2]])
|
||||||
|
|
||||||
|
# eval on PE2
|
||||||
|
helper.retry_check_function_return_value_table(
|
||||||
|
lambda: [x['data'] for x in
|
||||||
|
self.pe2.get('policies/alice/tables/p/rows').json()[
|
||||||
|
'results']],
|
||||||
|
[[2]])
|
||||||
|
|
||||||
|
self.assertEqual(self.pe2.delete(
|
||||||
|
suffix='policies/alice/rules/%s' % r_id).status_code, 200)
|
||||||
|
helper.retry_check_function_return_value(lambda: self.pe1.get(
|
||||||
|
'policies/alice/tables/p/rows').status_code, 404)
|
||||||
|
helper.retry_check_function_return_value(lambda: self.pe2.get(
|
||||||
|
'policies/alice/tables/p/rows').status_code, 404)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self.dump_nodes_logs()
|
||||||
|
raise
|
|
@ -130,10 +130,12 @@ def api_module_path():
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def test_path():
|
def test_path(file=None):
|
||||||
"""Return path to root of top-level tests."""
|
"""Return path to root of top-level tests. Joined with file if provided."""
|
||||||
path = source_path()
|
path = source_path()
|
||||||
path = os.path.join(path, "tests")
|
path = os.path.join(path, "tests")
|
||||||
|
if file is not None:
|
||||||
|
path = os.path.join(path, file)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
@ -420,6 +422,24 @@ def retry_til_exception(expected_exception, f):
|
||||||
raise TestFailureException("Wrong exception thrown: %s" % e)
|
raise TestFailureException("Wrong exception thrown: %s" % e)
|
||||||
|
|
||||||
|
|
||||||
|
@tenacity.retry(stop=tenacity.stop_after_attempt(20),
|
||||||
|
wait=tenacity.wait_fixed(1))
|
||||||
|
def retry_check_function_return_value_table(f, expected_values):
|
||||||
|
"""Check if function f returns expected table."""
|
||||||
|
result = f()
|
||||||
|
actual = set(tuple(x) for x in result)
|
||||||
|
correct = set(tuple(x) for x in expected_values)
|
||||||
|
extra = actual - correct
|
||||||
|
missing = correct - actual
|
||||||
|
if len(extra) > 0 or len(missing) > 0:
|
||||||
|
s = "Actual: %s\nExpected: %s\n" % (result, expected_values)
|
||||||
|
if len(extra) > 0:
|
||||||
|
s += "Extra: %s\n" % extra
|
||||||
|
if len(missing) > 0:
|
||||||
|
s += "Missing: %s\n" % missing
|
||||||
|
raise TestFailureException(s)
|
||||||
|
|
||||||
|
|
||||||
class FakeRequest(object):
|
class FakeRequest(object):
|
||||||
def __init__(self, body):
|
def __init__(self, body):
|
||||||
self.body = json.dumps(body)
|
self.body = json.dumps(body)
|
||||||
|
|
13
tox.ini
13
tox.ini
|
@ -17,7 +17,7 @@ deps = -r{toxinidir}/requirements.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
commands =
|
commands =
|
||||||
find . -type f -name "*.py[c|o]" -delete
|
find . -type f -name "*.py[c|o]" -delete
|
||||||
python setup.py testr --slowest --testr-args='{posargs}'
|
python setup.py testr --slowest --testr-args='{posargs} --concurrency=1'
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
usedevelop = False
|
usedevelop = False
|
||||||
|
@ -60,3 +60,14 @@ commands = python setup.py build_sphinx
|
||||||
|
|
||||||
[testenv:releasenotes]
|
[testenv:releasenotes]
|
||||||
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
|
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
|
||||||
|
|
||||||
|
[testenv:bindep]
|
||||||
|
# Do not install any requirements. We want this to be fast and work even if
|
||||||
|
# system dependencies are missing, since it's used to tell you what system
|
||||||
|
# dependencies are missing! This also means that bindep must be installed
|
||||||
|
# separately, outside of the requirements files, and develop mode disabled
|
||||||
|
# explicitly to avoid unnecessarily installing the checked-out repo too (this
|
||||||
|
# further relies on "tox.skipsdist = True" above).
|
||||||
|
deps = bindep
|
||||||
|
commands = bindep test
|
||||||
|
usedevelop = False
|
||||||
|
|
Loading…
Reference in New Issue