Add MySQL int-test helper client
Adding a helper client for MySQL will enable int-tests involving actual data operations. The test_helper.py file was also modified to push more boiler-plate code into the base class, as it has become apparent that only one 'add' 'remove' and 'verify' method is probably adecuate for most datastores (given the right sane parameters). The Redis helper was also cleaned up to use the new format. Change-Id: Iff34ca0b922adc9eea8b3861b05adf0251109221 Co-Authored-By: Petr Malik <pmalik@tesora.com> Co-Authored-By: Peter Stachowski <peter@tesora.com>
This commit is contained in:
parent
7f799cf5e4
commit
2b98c53862
|
@ -39,11 +39,7 @@ class TestGroup(object):
|
|||
runner_module_name, class_prefix, runner_base_name,
|
||||
self.TEST_RUNNERS_NS)
|
||||
runner = runner_cls(*args, **kwargs)
|
||||
helper_cls = self._load_dynamic_class(
|
||||
self.TEST_HELPER_MODULE_NAME, class_prefix,
|
||||
self.TEST_HELPER_BASE_NAME, self.TEST_HELPERS_NS)
|
||||
runner._test_helper = helper_cls(self._build_class_name(
|
||||
class_prefix, self.TEST_HELPER_BASE_NAME, strip_test=True))
|
||||
runner._test_helper = self.get_helper()
|
||||
return runner
|
||||
|
||||
def get_helper(self):
|
||||
|
|
|
@ -37,7 +37,8 @@ class UserActionsGroup(TestGroup):
|
|||
def create_initialized_instance(self):
|
||||
"""Create an instance with initial users."""
|
||||
self.instance_create_runner.run_initialized_instance_create(
|
||||
with_dbs=False, with_users=True, configuration_id=None)
|
||||
with_dbs=False, with_users=True, configuration_id=None,
|
||||
create_helper_user=False)
|
||||
|
||||
@test(runs_after=[create_initialized_instance])
|
||||
def create_user_databases(self):
|
||||
|
@ -151,7 +152,7 @@ class UserActionsGroup(TestGroup):
|
|||
"""Ensure deleting a system user fails."""
|
||||
self.test_runner.run_system_user_delete()
|
||||
|
||||
@test(runs_after=[delete_system_user])
|
||||
@test(depends_on=[create_user_databases], runs_after=[delete_system_user])
|
||||
def delete_user_databases(self):
|
||||
"""Delete the user databases."""
|
||||
self.database_actions_runner.run_database_delete()
|
||||
|
|
|
@ -22,7 +22,7 @@ class CassandrabHelper(TestHelper):
|
|||
super(CassandrabHelper, self).__init__(expected_override_name)
|
||||
|
||||
def get_valid_database_definitions(self):
|
||||
return [{"name": 'db1'}, {"name": 'db2'}]
|
||||
return [{"name": 'db1'}, {"name": 'db2'}, {"name": 'db3'}]
|
||||
|
||||
def get_valid_user_definitions(self):
|
||||
return [{'name': 'user1', 'password': 'password1',
|
||||
|
|
|
@ -13,18 +13,21 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from trove.tests.scenario.helpers.test_helper import TestHelper
|
||||
from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
||||
|
||||
|
||||
class MysqlHelper(TestHelper):
|
||||
class MysqlHelper(SqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(MysqlHelper, self).__init__(expected_override_name)
|
||||
super(MysqlHelper, self).__init__(expected_override_name, 'mysql')
|
||||
|
||||
def get_helper_credentials(self):
|
||||
return {'name': 'lite', 'password': 'litepass', 'database': 'firstdb'}
|
||||
|
||||
def get_valid_database_definitions(self):
|
||||
return [{'name': 'db1', 'character_set': 'latin2',
|
||||
'collate': 'latin2_general_ci'},
|
||||
{'name': 'db2'}]
|
||||
{'name': 'db2'}, {"name": 'db3'}]
|
||||
|
||||
def get_valid_user_definitions(self):
|
||||
return [{'name': 'user1', 'password': 'password1', 'databases': [],
|
||||
|
|
|
@ -29,48 +29,14 @@ class RedisHelper(TestHelper):
|
|||
self.value_pattern = 'id:%s'
|
||||
self.label_value = 'value_set'
|
||||
|
||||
self.tiny_data_label = 'tiny'
|
||||
self.tiny_data_start = 1
|
||||
self.tiny_data_size = 100
|
||||
|
||||
self.tiny2_data_label = 'tiny2'
|
||||
self.tiny2_data_start = 500
|
||||
self.tiny2_data_size = 100
|
||||
|
||||
self.small_data_label = 'small'
|
||||
self.small_data_start = 1000
|
||||
self.small_data_size = 1000
|
||||
|
||||
self.large_data_label = 'large'
|
||||
self.large_data_start = 100000
|
||||
self.large_data_size = 100000
|
||||
|
||||
def create_client(self, host, *args, **kwargs):
|
||||
# Redis is set up without a password at the moment.
|
||||
password = None
|
||||
client = redis.StrictRedis(password=password, host=host)
|
||||
user = self.get_helper_credentials()
|
||||
client = redis.StrictRedis(password=user['password'], host=host)
|
||||
return client
|
||||
|
||||
# Add data overrides
|
||||
def add_tiny_data(self, host, *args, **kwargs):
|
||||
self._add_data(self.tiny_data_label, self.tiny_data_start,
|
||||
self.tiny_data_size, host, *args, **kwargs)
|
||||
|
||||
def add_tiny2_data(self, host, *args, **kwargs):
|
||||
self._add_data(self.tiny2_data_label, self.tiny2_data_start,
|
||||
self.tiny2_data_size, host, *args, **kwargs)
|
||||
|
||||
def add_small_data(self, host, *args, **kwargs):
|
||||
self._add_data(self.small_data_label, self.small_data_start,
|
||||
self.small_data_size, host, *args, **kwargs)
|
||||
|
||||
def add_large_data(self, host, *args, **kwargs):
|
||||
self._add_data(self.large_data_label, self.large_data_start,
|
||||
self.large_data_size, host, *args, **kwargs)
|
||||
|
||||
def _add_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
"""Add the actual data here."""
|
||||
def add_actual_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
client = self.get_client(host, *args, **kwargs)
|
||||
test_set = client.get(data_label)
|
||||
if not test_set:
|
||||
|
@ -81,25 +47,8 @@ class RedisHelper(TestHelper):
|
|||
client.set(data_label, self.label_value)
|
||||
|
||||
# Remove data overrides
|
||||
def remove_tiny_data(self, host, *args, **kwargs):
|
||||
self._remove_data(self.tiny_data_label, self.tiny_data_start,
|
||||
self.tiny_data_size, host, *args, **kwargs)
|
||||
|
||||
def remove_tiny2_data(self, host, *args, **kwargs):
|
||||
self._remove_data(self.tiny2_data_label, self.tiny2_data_start,
|
||||
self.tiny2_data_size, host, *args, **kwargs)
|
||||
|
||||
def remove_small_data(self, host, *args, **kwargs):
|
||||
self._remove_data(self.small_data_label, self.small_data_start,
|
||||
self.small_data_size, host, *args, **kwargs)
|
||||
|
||||
def remove_large_data(self, host, *args, **kwargs):
|
||||
self._remove_data(self.large_data_label, self.large_data_start,
|
||||
self.large_data_size, host, *args, **kwargs)
|
||||
|
||||
def _remove_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
"""Remove the actual data here."""
|
||||
def remove_actual_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
client = self.get_client(host, *args, **kwargs)
|
||||
test_set = client.get(data_label)
|
||||
if test_set:
|
||||
|
@ -109,25 +58,8 @@ class RedisHelper(TestHelper):
|
|||
client.expire(data_label, 0)
|
||||
|
||||
# Verify data overrides
|
||||
def verify_tiny_data(self, host, *args, **kwargs):
|
||||
self._verify_data(self.tiny_data_label, self.tiny_data_start,
|
||||
self.tiny_data_size, host, *args, **kwargs)
|
||||
|
||||
def verify_tiny2_data(self, host, *args, **kwargs):
|
||||
self._verify_data(self.tiny2_data_label, self.tiny2_data_start,
|
||||
self.tiny2_data_size, host, *args, **kwargs)
|
||||
|
||||
def verify_small_data(self, host, *args, **kwargs):
|
||||
self._verify_data(self.small_data_label, self.small_data_start,
|
||||
self.small_data_size, host, *args, **kwargs)
|
||||
|
||||
def verify_large_data(self, host, *args, **kwargs):
|
||||
self._verify_data(self.large_data_label, self.large_data_start,
|
||||
self.large_data_size, host, *args, **kwargs)
|
||||
|
||||
def _verify_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
"""Verify the actual data here."""
|
||||
def verify_actual_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
client = self.get_client(host, *args, **kwargs)
|
||||
# make sure the data is there - tests edge cases and a random one
|
||||
self._verify_data_point(client, data_label, self.label_value)
|
||||
|
|
|
@ -0,0 +1,125 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import MetaData, Table, Column, Integer
|
||||
|
||||
from trove.tests.scenario.helpers.test_helper import TestHelper
|
||||
from trove.tests.scenario.runners.test_runners import TestRunner
|
||||
|
||||
|
||||
class SqlHelper(TestHelper):
|
||||
|
||||
"""This mixin provides data handling helper functions for SQL datastores.
|
||||
"""
|
||||
|
||||
DATA_COLUMN_NAME = 'value'
|
||||
|
||||
def __init__(self, expected_override_name, protocol, port=None):
|
||||
super(SqlHelper, self).__init__(expected_override_name)
|
||||
|
||||
self.protocol = protocol
|
||||
self.port = port
|
||||
self.credentials = self.get_helper_credentials()
|
||||
self.test_schema = self.credentials['database']
|
||||
|
||||
self._schema_metadata = MetaData()
|
||||
self._data_cache = dict()
|
||||
|
||||
def create_client(self, host, *args, **kwargs):
|
||||
return sqlalchemy.create_engine(self._get_connection_string(host))
|
||||
|
||||
def _get_connection_string(self, host):
|
||||
if self.port:
|
||||
host = "%s:%d" % (host, self.port)
|
||||
|
||||
credentials = {'protocol': self.protocol,
|
||||
'host': host,
|
||||
'user': self.credentials.get('name', ''),
|
||||
'password': self.credentials.get('password', ''),
|
||||
'database': self.credentials.get('database', '')}
|
||||
return ('%(protocol)s://%(user)s:%(password)s@%(host)s/%(database)s'
|
||||
% credentials)
|
||||
|
||||
# Add data overrides
|
||||
def add_actual_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
client = self.get_client(host, *args, **kwargs)
|
||||
self._create_data_table(client, self.test_schema, data_label)
|
||||
count = self._count_data_rows(client, self.test_schema, data_label)
|
||||
if count == 0:
|
||||
self._insert_data_rows(client, self.test_schema, data_label,
|
||||
data_size)
|
||||
|
||||
def _create_data_table(self, client, schema_name, table_name):
|
||||
Table(
|
||||
table_name, self._schema_metadata,
|
||||
Column(self.DATA_COLUMN_NAME, Integer(),
|
||||
nullable=False, default=0),
|
||||
keep_existing=True, schema=schema_name
|
||||
).create(client, checkfirst=True)
|
||||
|
||||
def _count_data_rows(self, client, schema_name, table_name):
|
||||
data_table = self._get_schema_table(schema_name, table_name)
|
||||
return client.execute(data_table.count()).scalar()
|
||||
|
||||
def _insert_data_rows(self, client, schema_name, table_name, data_size):
|
||||
data_table = self._get_schema_table(schema_name, table_name)
|
||||
client.execute(data_table.insert(), self._get_dataset(data_size))
|
||||
|
||||
def _get_schema_table(self, schema_name, table_name):
|
||||
qualified_table_name = '%s.%s' % (schema_name, table_name)
|
||||
return self._schema_metadata.tables.get(qualified_table_name)
|
||||
|
||||
def _get_dataset(self, data_size):
|
||||
cache_key = str(data_size)
|
||||
if cache_key in self._data_cache:
|
||||
return self._data_cache.get(cache_key)
|
||||
|
||||
data = self._generate_dataset(data_size)
|
||||
self._data_cache[cache_key] = data
|
||||
return data
|
||||
|
||||
def _generate_dataset(self, data_size):
|
||||
return [{self.DATA_COLUMN_NAME: value}
|
||||
for value in range(1, data_size + 1)]
|
||||
|
||||
# Remove data overrides
|
||||
def remove_actual_data(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
client = self.get_client(host)
|
||||
self._drop_table(client, self.test_schema, data_label)
|
||||
|
||||
def _drop_table(self, client, schema_name, table_name):
|
||||
data_table = self._get_schema_table(schema_name, table_name)
|
||||
data_table.drop(client, checkfirst=True)
|
||||
|
||||
# Verify data overrides
|
||||
def verify_actual_data(self, data_label, data_Start, data_size, host,
|
||||
*args, **kwargs):
|
||||
expected_data = [(item[self.DATA_COLUMN_NAME],)
|
||||
for item in self._get_dataset(data_size)]
|
||||
client = self.get_client(host, *args, **kwargs)
|
||||
actual_data = self._select_data_rows(client, self.test_schema,
|
||||
data_label)
|
||||
|
||||
TestRunner.assert_equal(len(expected_data), len(actual_data),
|
||||
"Unexpected number of result rows.")
|
||||
TestRunner.assert_list_elements_equal(
|
||||
expected_data, actual_data, "Unexpected rows in the result set.")
|
||||
|
||||
def _select_data_rows(self, client, schema_name, table_name):
|
||||
data_table = self._get_schema_table(schema_name, table_name)
|
||||
return client.execute(data_table.select()).fetchall()
|
|
@ -20,10 +20,13 @@ from time import sleep
|
|||
|
||||
|
||||
class DataType(Enum):
|
||||
|
||||
"""
|
||||
Represent the type of data to add to a datastore. This allows for
|
||||
multiple 'states' of data that can be verified after actions are
|
||||
performed by Trove.
|
||||
If new entries are added here, sane values should be added to the
|
||||
_fn_data dictionary defined in TestHelper.
|
||||
"""
|
||||
|
||||
# very tiny amount of data, useful for testing replication
|
||||
|
@ -39,6 +42,7 @@ class DataType(Enum):
|
|||
|
||||
|
||||
class TestHelper(object):
|
||||
|
||||
"""
|
||||
Base class for all 'Helper' classes.
|
||||
|
||||
|
@ -48,17 +52,22 @@ class TestHelper(object):
|
|||
etc. should be handled by these classes.
|
||||
"""
|
||||
|
||||
# Define the actions that can be done on each DataType
|
||||
FN_ACTION_ADD = 'add'
|
||||
FN_ACTION_REMOVE = 'remove'
|
||||
FN_ACTION_VERIFY = 'verify'
|
||||
FN_ACTIONS = [FN_ACTION_ADD, FN_ACTION_REMOVE, FN_ACTION_VERIFY]
|
||||
# Define the actions that can be done on each DataType. When adding
|
||||
# a new action, remember to modify _data_fns
|
||||
FN_ADD = 'add'
|
||||
FN_REMOVE = 'remove'
|
||||
FN_VERIFY = 'verify'
|
||||
FN_TYPES = [FN_ADD, FN_REMOVE, FN_VERIFY]
|
||||
|
||||
# Artificial 'DataType' name to use for the methods that do the
|
||||
# actual data manipulation work.
|
||||
DT_ACTUAL = 'actual'
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
"""Initialize the helper class by creating a number of stub
|
||||
functions that each datastore specific class can chose to
|
||||
override. Basically, the functions are of the form:
|
||||
{FN_ACTION_*}_{DataType.name}_data
|
||||
{FN_TYPE}_{DataType.name}_data
|
||||
For example:
|
||||
add_tiny_data
|
||||
add_small_data
|
||||
|
@ -66,28 +75,51 @@ class TestHelper(object):
|
|||
verify_large_data
|
||||
and so on. Add and remove actions throw a SkipTest if not
|
||||
implemented, and verify actions by default do nothing.
|
||||
These methods, by default, call the corresponding *_actual_data()
|
||||
passing in 'data_label', 'data_start' and 'data_size' as defined
|
||||
for each DataType in the dictionary below.
|
||||
"""
|
||||
super(TestHelper, self).__init__()
|
||||
|
||||
self._expected_override_name = expected_override_name
|
||||
|
||||
self._ds_client = None
|
||||
self._current_host = None
|
||||
|
||||
self._expected_override_name = expected_override_name
|
||||
|
||||
# For building data access functions
|
||||
# name/fn pairs for each action
|
||||
self._data_fns = {self.FN_ACTION_ADD: {},
|
||||
self.FN_ACTION_REMOVE: {},
|
||||
self.FN_ACTION_VERIFY: {}}
|
||||
# Types of data functions to create.
|
||||
self._data_fns = {self.FN_ADD: {},
|
||||
self.FN_REMOVE: {},
|
||||
self.FN_VERIFY: {}}
|
||||
# Pattern used to create the data functions. The first parameter
|
||||
# is the function type (FN_ACTION_*), the second is the DataType
|
||||
# is the function type (FN_TYPE), the second is the DataType
|
||||
# or DT_ACTUAL.
|
||||
self.data_fn_pattern = '%s_%s_data'
|
||||
# Values to distinguish between the different DataTypes. If these
|
||||
# values don't work for a datastore, it will need to override
|
||||
# the auto-generated {FN_TYPE}_{DataType.name}_data method.
|
||||
self.DATA_START = 'start'
|
||||
self.DATA_SIZE = 'size'
|
||||
self._fn_data = {
|
||||
DataType.tiny.name: {
|
||||
self.DATA_START: 1,
|
||||
self.DATA_SIZE: 100},
|
||||
DataType.tiny2.name: {
|
||||
self.DATA_START: 500,
|
||||
self.DATA_SIZE: 100},
|
||||
DataType.small.name: {
|
||||
self.DATA_START: 1000,
|
||||
self.DATA_SIZE: 1000},
|
||||
DataType.large.name: {
|
||||
self.DATA_START: 100000,
|
||||
self.DATA_SIZE: 100000},
|
||||
}
|
||||
|
||||
self._build_data_fns()
|
||||
|
||||
########################
|
||||
# Client related methods
|
||||
########################
|
||||
################
|
||||
# Client related
|
||||
################
|
||||
def get_client(self, host, *args, **kwargs):
|
||||
"""Gets the datastore client."""
|
||||
if not self._ds_client or self._current_host != host:
|
||||
|
@ -99,9 +131,15 @@ class TestHelper(object):
|
|||
"""Create a datastore client."""
|
||||
raise SkipTest('No client defined')
|
||||
|
||||
######################
|
||||
# Data related methods
|
||||
######################
|
||||
def get_helper_credentials(self):
|
||||
"""Return the credentials that the client will be using to
|
||||
access the database.
|
||||
"""
|
||||
return {'name': None, 'password': None, 'database': None}
|
||||
|
||||
##############
|
||||
# Data related
|
||||
##############
|
||||
def add_data(self, data_type, host, *args, **kwargs):
|
||||
"""Adds data of type 'data_type' to the database. Descendant
|
||||
classes should implement a function for each DataType value
|
||||
|
@ -112,14 +150,14 @@ class TestHelper(object):
|
|||
Since this method may be called multiple times, the implemented
|
||||
'add_*_data' functions should be idempotent.
|
||||
"""
|
||||
self._perform_data_action(self.FN_ACTION_ADD, data_type, host,
|
||||
self._perform_data_action(self.FN_ADD, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
|
||||
def remove_data(self, data_type, host, *args, **kwargs):
|
||||
"""Removes all data associated with 'data_type'. See
|
||||
instructions for 'add_data' for implementation guidance.
|
||||
"""
|
||||
self._perform_data_action(self.FN_ACTION_REMOVE, data_type, host,
|
||||
self._perform_data_action(self.FN_REMOVE, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
|
||||
def verify_data(self, data_type, host, *args, **kwargs):
|
||||
|
@ -128,13 +166,12 @@ class TestHelper(object):
|
|||
some random elements within the set. See
|
||||
instructions for 'add_data' for implementation guidance.
|
||||
"""
|
||||
self._perform_data_action(self.FN_ACTION_VERIFY, data_type, host,
|
||||
self._perform_data_action(self.FN_VERIFY, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
|
||||
def _perform_data_action(self, action_type, data_type, host,
|
||||
*args, **kwargs):
|
||||
fns = self._data_fns[action_type]
|
||||
data_fn_name = self.data_fn_pattern % (action_type, data_type.name)
|
||||
def _perform_data_action(self, fn_type, fn_name, host, *args, **kwargs):
|
||||
fns = self._data_fns[fn_type]
|
||||
data_fn_name = self.data_fn_pattern % (fn_type, fn_name)
|
||||
try:
|
||||
fns[data_fn_name](self, host, *args, **kwargs)
|
||||
except SkipTest:
|
||||
|
@ -144,55 +181,83 @@ class TestHelper(object):
|
|||
(data_fn_name, self.__class__.__name__, ex))
|
||||
|
||||
def _build_data_fns(self):
|
||||
"""Build the base data functions specified by FN_ACTION_*
|
||||
"""Build the base data functions specified by FN_TYPE_*
|
||||
for each of the types defined in the DataType class. For example,
|
||||
'add_small_data' and 'verify_large_data'. These
|
||||
functions can be overwritten by a descendant class and
|
||||
those overwritten functions will be bound before calling
|
||||
any data functions such as 'add_data' or 'remove_data'.
|
||||
functions are set to call '*_actual_data' and will pass in
|
||||
sane values for label, start and size. The '*_actual_data'
|
||||
methods should be overwritten by a descendant class, and are the
|
||||
ones that do the actual work.
|
||||
The original 'add_small_data', etc. methods can also be overridden
|
||||
if needed, and those overwritten functions will be bound before
|
||||
calling any data functions such as 'add_data' or 'remove_data'.
|
||||
"""
|
||||
for fn_type in self.FN_ACTIONS:
|
||||
for fn_type in self.FN_TYPES:
|
||||
fn_dict = self._data_fns[fn_type]
|
||||
for data_type in DataType:
|
||||
self._data_fn_builder(fn_type, data_type, fn_dict)
|
||||
self._data_fn_builder(fn_type, data_type.name, fn_dict)
|
||||
self._data_fn_builder(fn_type, self.DT_ACTUAL, fn_dict)
|
||||
self._override_data_fns()
|
||||
|
||||
def _data_fn_builder(self, fn_type, data_type, fn_dict):
|
||||
def _data_fn_builder(self, fn_type, fn_name, fn_dict):
|
||||
"""Builds the actual function with a SkipTest exception,
|
||||
and changes the name to reflect the pattern.
|
||||
"""
|
||||
name = self.data_fn_pattern % (fn_type, data_type.name)
|
||||
data_fn_name = self.data_fn_pattern % (fn_type, fn_name)
|
||||
|
||||
def data_fn(self, host, *args, **kwargs):
|
||||
# default action is to skip the test
|
||||
using_str = ''
|
||||
if self._expected_override_name != self.__class__.__name__:
|
||||
using_str = ' (using %s)' % self.__class__.__name__
|
||||
raise SkipTest("Data function '%s' not found in '%s'%s" %
|
||||
(name, self._expected_override_name, using_str))
|
||||
# Build the overridable 'actual' Data Manipulation methods
|
||||
if fn_name == self.DT_ACTUAL:
|
||||
def data_fn(self, data_label, data_start, data_size, host,
|
||||
*args, **kwargs):
|
||||
# default action is to skip the test
|
||||
using_str = ''
|
||||
if self._expected_override_name != self.__class__.__name__:
|
||||
using_str = ' (using %s)' % self.__class__.__name__
|
||||
raise SkipTest("Data function '%s' not found in '%s'%s" % (
|
||||
data_fn_name, self._expected_override_name, using_str))
|
||||
else:
|
||||
def data_fn(self, host, *args, **kwargs):
|
||||
# call the corresponding 'actual' method
|
||||
fns = self._data_fns[fn_type]
|
||||
var_dict = self._fn_data[fn_name]
|
||||
data_start = var_dict[self.DATA_START]
|
||||
data_size = var_dict[self.DATA_SIZE]
|
||||
actual_fn_name = self.data_fn_pattern % (
|
||||
fn_type, self.DT_ACTUAL)
|
||||
try:
|
||||
fns[actual_fn_name](self, fn_name, data_start, data_size,
|
||||
host, *args, **kwargs)
|
||||
except SkipTest:
|
||||
raise
|
||||
except Exception as ex:
|
||||
raise RuntimeError("Error calling %s from class %s: %s" % (
|
||||
data_fn_name, self.__class__.__name__, ex))
|
||||
|
||||
data_fn.__name__ = data_fn.func_name = name
|
||||
fn_dict[name] = data_fn
|
||||
data_fn.__name__ = data_fn.func_name = data_fn_name
|
||||
fn_dict[data_fn_name] = data_fn
|
||||
|
||||
def _override_data_fns(self):
|
||||
"""Bind the override methods to the dict."""
|
||||
members = inspect.getmembers(self.__class__,
|
||||
predicate=inspect.ismethod)
|
||||
for fn_action in self.FN_ACTIONS:
|
||||
fns = self._data_fns[fn_action]
|
||||
for fn_type in self.FN_TYPES:
|
||||
fns = self._data_fns[fn_type]
|
||||
for name, fn in members:
|
||||
if name in fns:
|
||||
fns[name] = fn
|
||||
|
||||
#############################
|
||||
# Replication related methods
|
||||
#############################
|
||||
#####################
|
||||
# Replication related
|
||||
#####################
|
||||
def wait_for_replicas(self):
|
||||
"""Wait for data to propagate to all the replicas. Datastore
|
||||
specific overrides could increase (or decrease) this delay.
|
||||
"""
|
||||
sleep(30)
|
||||
|
||||
#######################
|
||||
# Database/User related
|
||||
#######################
|
||||
def get_valid_database_definitions(self):
|
||||
"""Return a list of valid database JSON definitions.
|
||||
These definitions will be used by tests that create databases.
|
||||
|
@ -207,6 +272,44 @@ class TestHelper(object):
|
|||
"""
|
||||
return list()
|
||||
|
||||
def get_non_existing_database_definition(self):
|
||||
"""Return a valid JSON definition for a non-existing database.
|
||||
This definition will be used by negative database tests.
|
||||
The database will not be created by any of the tests.
|
||||
Return None if the datastore does not support databases.
|
||||
"""
|
||||
valid_defs = self.get_valid_database_definitions()
|
||||
return self._get_non_existing_definition(valid_defs)
|
||||
|
||||
def get_non_existing_user_definition(self):
|
||||
"""Return a valid JSON definition for a non-existing user.
|
||||
This definition will be used by negative user tests.
|
||||
The user will not be created by any of the tests.
|
||||
Return None if the datastore does not support users.
|
||||
"""
|
||||
valid_defs = self.get_valid_user_definitions()
|
||||
return self._get_non_existing_definition(valid_defs)
|
||||
|
||||
def _get_non_existing_definition(self, existing_defs):
|
||||
"""This will create a unique definition for a non-existing object
|
||||
by randomizing one of an existing object.
|
||||
"""
|
||||
if existing_defs:
|
||||
non_existing_def = dict(existing_defs[0])
|
||||
while non_existing_def in existing_defs:
|
||||
non_existing_def = self._randomize_on_name(non_existing_def)
|
||||
return non_existing_def
|
||||
|
||||
return None
|
||||
|
||||
def _randomize_on_name(self, definition):
|
||||
def_copy = dict(definition)
|
||||
def_copy['name'] = ''.join([def_copy['name'], 'rnd'])
|
||||
return def_copy
|
||||
|
||||
#############################
|
||||
# Configuration Group related
|
||||
#############################
|
||||
def get_dynamic_group(self):
|
||||
"""Return a definition of a dynamic configuration group.
|
||||
A dynamic group should contain only properties that do not require
|
||||
|
|
|
@ -76,7 +76,9 @@ class DatabaseActionsRunner(TestRunner):
|
|||
|
||||
self.assert_pagination_match(list_page, full_list, 0, limit)
|
||||
if marker:
|
||||
self.assert_equal(list_page[-1], marker.name,
|
||||
last_database = list_page[-1]
|
||||
expected_marker = last_database.name
|
||||
self.assert_equal(expected_marker, marker,
|
||||
"Pagination marker should be the last element "
|
||||
"in the page.")
|
||||
list_page = self.auth_client.databases.list(
|
||||
|
@ -151,8 +153,9 @@ class DatabaseActionsRunner(TestRunner):
|
|||
def run_nonexisting_database_delete(self, expected_http_code=202):
|
||||
# Deleting a non-existing database is expected to succeed as if the
|
||||
# database was deleted.
|
||||
db_def = self.test_helper.get_non_existing_database_definition()
|
||||
self.assert_database_delete(
|
||||
self.instance_info.id, 'justashadow', expected_http_code)
|
||||
self.instance_info.id, db_def['name'], expected_http_code)
|
||||
|
||||
def run_system_database_delete(
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
|
|
|
@ -45,7 +45,7 @@ class InstanceCreateRunner(TestRunner):
|
|||
info = self.assert_instance_create(
|
||||
name, flavor, trove_volume_size, [], [], None, None,
|
||||
CONFIG.dbaas_datastore, CONFIG.dbaas_datastore_version,
|
||||
expected_states, expected_http_code)
|
||||
expected_states, expected_http_code, create_helper_user=True)
|
||||
|
||||
# Update the shared instance info.
|
||||
self.instance_info.databases = info.databases
|
||||
|
@ -77,7 +77,8 @@ class InstanceCreateRunner(TestRunner):
|
|||
|
||||
def run_initialized_instance_create(
|
||||
self, with_dbs=True, with_users=True, configuration_id=None,
|
||||
expected_states=['BUILD', 'ACTIVE'], expected_http_code=200):
|
||||
expected_states=['BUILD', 'ACTIVE'], expected_http_code=200,
|
||||
create_helper_user=True):
|
||||
# TODO(pmalik): Instance create should return 202 Accepted (cast)
|
||||
# rather than 200 OK (call).
|
||||
name = self.instance_info.name
|
||||
|
@ -97,7 +98,8 @@ class InstanceCreateRunner(TestRunner):
|
|||
self.init_inst_dbs, self.init_inst_users,
|
||||
self.init_config_group_id, None,
|
||||
CONFIG.dbaas_datastore, CONFIG.dbaas_datastore_version,
|
||||
expected_states, expected_http_code)
|
||||
expected_states, expected_http_code,
|
||||
create_helper_user=create_helper_user)
|
||||
|
||||
self.init_inst_id = info.id
|
||||
else:
|
||||
|
@ -121,7 +123,7 @@ class InstanceCreateRunner(TestRunner):
|
|||
self, name, flavor, trove_volume_size,
|
||||
database_definitions, user_definitions,
|
||||
configuration_id, root_password, datastore, datastore_version,
|
||||
expected_states, expected_http_code):
|
||||
expected_states, expected_http_code, create_helper_user=False):
|
||||
"""This assert method executes a 'create' call and verifies the server
|
||||
response. It neither waits for the instance to become available
|
||||
nor it performs any other validations itself.
|
||||
|
@ -134,6 +136,21 @@ class InstanceCreateRunner(TestRunner):
|
|||
users = [{'name': item['name'], 'password': item['password']}
|
||||
for item in user_definitions]
|
||||
|
||||
# Here we add helper user/database if any.
|
||||
if create_helper_user:
|
||||
helper_db_def, helper_user_def = self.build_helper_defs()
|
||||
if helper_db_def:
|
||||
self.report.log(
|
||||
"Appending a helper database '%s' to the instance "
|
||||
"definition." % helper_db_def['name'])
|
||||
databases.append(helper_db_def)
|
||||
if helper_user_def:
|
||||
self.report.log(
|
||||
"Appending a helper user '%s:%s' to the instance "
|
||||
"definition."
|
||||
% (helper_user_def['name'], helper_user_def['password']))
|
||||
users.append(helper_user_def)
|
||||
|
||||
instance_info = InstanceTestInfo()
|
||||
instance_info.name = name
|
||||
instance_info.databases = databases
|
||||
|
@ -259,8 +276,8 @@ class InstanceCreateRunner(TestRunner):
|
|||
self.assert_is_none(full_list.next,
|
||||
"Unexpected pagination in the database list.")
|
||||
listed_names = [database.name for database in full_list]
|
||||
self.assert_list_elements_equal(expected_names, listed_names,
|
||||
"Mismatch in instance databases.")
|
||||
self.assert_is_sublist(expected_names, listed_names,
|
||||
"Mismatch in instance databases.")
|
||||
|
||||
def _get_names(self, definitions):
|
||||
return [item['name'] for item in definitions]
|
||||
|
@ -271,8 +288,8 @@ class InstanceCreateRunner(TestRunner):
|
|||
self.assert_is_none(full_list.next,
|
||||
"Unexpected pagination in the user list.")
|
||||
listed_names = [user.name for user in full_list]
|
||||
self.assert_list_elements_equal(expected_names, listed_names,
|
||||
"Mismatch in instance users.")
|
||||
self.assert_is_sublist(expected_names, listed_names,
|
||||
"Mismatch in instance users.")
|
||||
|
||||
# Verify that user definitions include only created databases.
|
||||
all_databases = self._get_names(
|
||||
|
|
|
@ -33,6 +33,7 @@ CONF = cfg.CONF
|
|||
|
||||
|
||||
class TestRunner(object):
|
||||
|
||||
"""
|
||||
Base class for all 'Runner' classes.
|
||||
|
||||
|
@ -165,8 +166,8 @@ class TestRunner(object):
|
|||
Use the current instance's datastore if None.
|
||||
"""
|
||||
try:
|
||||
return CONF.get(
|
||||
datastore or self.instance_info.dbaas_datastore).get(name)
|
||||
datastore = datastore or self.instance_info.dbaas_datastore
|
||||
return CONF.get(datastore).get(name)
|
||||
except NoSuchOptError:
|
||||
return CONF.get(name)
|
||||
|
||||
|
@ -177,7 +178,7 @@ class TestRunner(object):
|
|||
def get_existing_instance(self):
|
||||
if self.is_using_existing_instance:
|
||||
instance_id = os.environ.get(self.USE_INSTANCE_ID_FLAG)
|
||||
return self._get_instance_info(instance_id)
|
||||
return self.get_instance(instance_id)
|
||||
|
||||
return None
|
||||
|
||||
|
@ -217,23 +218,43 @@ class TestRunner(object):
|
|||
self.fail(str(task.poll_exception()))
|
||||
|
||||
def _assert_instance_states(self, instance_id, expected_states,
|
||||
fast_fail_status='ERROR'):
|
||||
for status in expected_states:
|
||||
start_time = timer.time()
|
||||
try:
|
||||
poll_until(lambda: self._has_status(
|
||||
instance_id, status, fast_fail_status=fast_fail_status),
|
||||
sleep_time=self.def_sleep_time,
|
||||
time_out=self.def_timeout)
|
||||
self.report.log("Instance has gone '%s' in %s." %
|
||||
(status, self._time_since(start_time)))
|
||||
except exception.PollTimeOut:
|
||||
self.report.log(
|
||||
"Status of instance '%s' did not change to '%s' after %s."
|
||||
% (instance_id, status, self._time_since(start_time)))
|
||||
return False
|
||||
fast_fail_status='ERROR',
|
||||
require_all_states=False):
|
||||
"""Keep polling for the expected instance states until the instance
|
||||
acquires either the last or fast-fail state.
|
||||
|
||||
return True
|
||||
If the instance state does not match the state expected at the time of
|
||||
polling (and 'require_all_states' is not set) the code assumes the
|
||||
instance had already acquired before and moves to the next expected
|
||||
state.
|
||||
"""
|
||||
|
||||
found = False
|
||||
for status in expected_states:
|
||||
if require_all_states or found or self._has_status(
|
||||
instance_id, status, fast_fail_status=fast_fail_status):
|
||||
found = True
|
||||
start_time = timer.time()
|
||||
try:
|
||||
poll_until(lambda: self._has_status(
|
||||
instance_id, status,
|
||||
fast_fail_status=fast_fail_status),
|
||||
sleep_time=self.def_sleep_time,
|
||||
time_out=self.def_timeout)
|
||||
self.report.log("Instance has gone '%s' in %s." %
|
||||
(status, self._time_since(start_time)))
|
||||
except exception.PollTimeOut:
|
||||
self.report.log(
|
||||
"Status of instance '%s' did not change to '%s' "
|
||||
"after %s."
|
||||
% (instance_id, status, self._time_since(start_time)))
|
||||
return False
|
||||
else:
|
||||
self.report.log(
|
||||
"Instance state was not '%s', moving to the next expected "
|
||||
"state." % status)
|
||||
|
||||
return found
|
||||
|
||||
def _time_since(self, start_time):
|
||||
return '%.1fs' % (timer.time() - start_time)
|
||||
|
@ -290,11 +311,11 @@ class TestRunner(object):
|
|||
|
||||
def _has_status(self, instance_id, status, fast_fail_status=None):
|
||||
instance = self.get_instance(instance_id)
|
||||
self.report.log("Waiting for instance '%s' to become '%s': %s"
|
||||
self.report.log("Polling instance '%s' for state '%s', was '%s'."
|
||||
% (instance_id, status, instance.status))
|
||||
if fast_fail_status and instance.status == fast_fail_status:
|
||||
raise RuntimeError("Instance '%s' acquired a fast-fail status: %s"
|
||||
% (instance_id, status))
|
||||
% (instance_id, instance.status))
|
||||
return instance.status == status
|
||||
|
||||
def get_instance(self, instance_id):
|
||||
|
@ -319,3 +340,46 @@ class TestRunner(object):
|
|||
self.assert_is_not_none(flavor, "Flavor '%s' not found." % flavor_name)
|
||||
|
||||
return flavor
|
||||
|
||||
def copy_dict(self, d, ignored_keys=None):
|
||||
return {k: v for k, v in d.items()
|
||||
if not ignored_keys or k not in ignored_keys}
|
||||
|
||||
def create_test_helper_on_instance(self, instance_id):
|
||||
"""Here we add a helper user/database, if any, to a given instance
|
||||
via the Trove API.
|
||||
These are for internal use by the test framework and should
|
||||
not be changed by individual test-cases.
|
||||
"""
|
||||
database_def, user_def = self.build_helper_defs()
|
||||
if database_def:
|
||||
self.report.log(
|
||||
"Creating a helper database '%s' on instance: %s"
|
||||
% (database_def['name'], instance_id))
|
||||
self.auth_client.databases.create(instance_id, [database_def])
|
||||
|
||||
if user_def:
|
||||
self.report.log(
|
||||
"Creating a helper user '%s:%s' on instance: %s"
|
||||
% (user_def['name'], user_def['password'], instance_id))
|
||||
self.auth_client.users.create(instance_id, [user_def])
|
||||
|
||||
def build_helper_defs(self):
|
||||
"""Build helper database and user JSON definitions if credentials
|
||||
are defined by the helper.
|
||||
"""
|
||||
database_def = None
|
||||
user_def = None
|
||||
credentials = self.test_helper.get_helper_credentials()
|
||||
if credentials:
|
||||
database = credentials.get('database')
|
||||
if database:
|
||||
database_def = {'name': database}
|
||||
|
||||
username = credentials.get('name')
|
||||
if username:
|
||||
password = credentials.get('password', '')
|
||||
user_def = {'name': username, 'password': password,
|
||||
'databases': [{'name': database}]}
|
||||
|
||||
return database_def, user_def
|
||||
|
|
|
@ -120,31 +120,34 @@ class UserActionsRunner(TestRunner):
|
|||
def run_user_create_with_blank_name(
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
usr_def = self.test_helper.get_non_existing_user_definition()
|
||||
# Test with missing user name attribute.
|
||||
no_name_usr_def = self.copy_dict(usr_def, ignored_keys=['name'])
|
||||
self.assert_users_create_failure(
|
||||
self.instance_info.id,
|
||||
{'password': 'password1', 'databases': []},
|
||||
self.instance_info.id, no_name_usr_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
# Test with empty user name attribute.
|
||||
blank_name_usr_def = self.copy_dict(usr_def)
|
||||
blank_name_usr_def.update({'name': ''})
|
||||
self.assert_users_create_failure(
|
||||
self.instance_info.id,
|
||||
{'name': '', 'password': 'password1', 'databases': []},
|
||||
self.instance_info.id, blank_name_usr_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def run_user_create_with_blank_password(
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
usr_def = self.test_helper.get_non_existing_user_definition()
|
||||
# Test with missing password attribute.
|
||||
no_pass_usr_def = self.copy_dict(usr_def, ignored_keys=['password'])
|
||||
self.assert_users_create_failure(
|
||||
self.instance_info.id,
|
||||
{'name': 'nopassguy', 'databases': []},
|
||||
self.instance_info.id, no_pass_usr_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
# Test with missing databases attribute.
|
||||
no_db_usr_def = self.copy_dict(usr_def, ignored_keys=['databases'])
|
||||
self.assert_users_create_failure(
|
||||
self.instance_info.id,
|
||||
{'name': 'nodbguy', 'password': 'password1'},
|
||||
self.instance_info.id, no_db_usr_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def run_existing_user_create(
|
||||
|
@ -224,9 +227,13 @@ class UserActionsRunner(TestRunner):
|
|||
expected_exception, expected_http_code)
|
||||
|
||||
def run_user_attribute_update(self, expected_http_code=202):
|
||||
update_attribites = {'name': 'dblessguy', 'password': 'password2'}
|
||||
updated_def = self.user_defs[0]
|
||||
# Update the name by appending a random string to it.
|
||||
updated_name = ''.join([updated_def['name'], 'upd'])
|
||||
update_attribites = {'name': updated_name,
|
||||
'password': 'password2'}
|
||||
self.assert_user_attribute_update(
|
||||
self.instance_info.id, self.user_defs[0],
|
||||
self.instance_info.id, updated_def,
|
||||
update_attribites, expected_http_code)
|
||||
|
||||
def assert_user_attribute_update(self, instance_id, user_def,
|
||||
|
@ -270,8 +277,9 @@ class UserActionsRunner(TestRunner):
|
|||
def run_nonexisting_user_show(
|
||||
self, expected_exception=exceptions.NotFound,
|
||||
expected_http_code=404):
|
||||
usr_def = self.test_helper.get_non_existing_user_definition()
|
||||
self.assert_user_show_failure(
|
||||
self.instance_info.id, {'name': 'nonexistingusr'},
|
||||
self.instance_info.id, {'name': usr_def['name']},
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def assert_user_show_failure(self, instance_id, user_def,
|
||||
|
@ -297,16 +305,18 @@ class UserActionsRunner(TestRunner):
|
|||
|
||||
def run_nonexisting_user_update(self, expected_http_code=404):
|
||||
# Test valid update on a non-existing user.
|
||||
user_def = {'name': 'justashadow'}
|
||||
usr_def = self.test_helper.get_non_existing_user_definition()
|
||||
update_def = {'name': usr_def['name']}
|
||||
self.assert_user_attribute_update_failure(
|
||||
self.instance_info.id, user_def, user_def,
|
||||
self.instance_info.id, update_def, update_def,
|
||||
exceptions.NotFound, expected_http_code)
|
||||
|
||||
def run_nonexisting_user_delete(
|
||||
self, expected_exception=exceptions.NotFound,
|
||||
expected_http_code=404):
|
||||
usr_def = self.test_helper.get_non_existing_user_definition()
|
||||
self.assert_user_delete_failure(
|
||||
self.instance_info.id, {'name': 'justashadow'},
|
||||
self.instance_info.id, {'name': usr_def['name']},
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def assert_user_delete_failure(
|
||||
|
|
Loading…
Reference in New Issue