Fixes PEP8 issues as reported by tox and unittests
* Changes docstrings from single to double quotes. * Reformats multiline declarations with non-visual indentation. * Turns rsyslog resource into a plugin * Moves plugin-specific unittests into their respective plugins * Add tox support to plugins with tests Change-Id: Ife172b165004c366ad64faa7c00afceb8e99c76d
This commit is contained in:
@@ -27,7 +27,7 @@ def logsafe_str(data):
|
||||
|
||||
|
||||
def get_object_namespace(obj):
|
||||
'''Attempts to return a dotted string name representation of the general
|
||||
"""Attempts to return a dotted string name representation of the general
|
||||
form 'package.module.class.obj' for an object that has an __mro__ attribute
|
||||
|
||||
Designed to let you to name loggers inside objects in such a way
|
||||
@@ -40,7 +40,7 @@ def get_object_namespace(obj):
|
||||
and is then further improved by a series of functions until
|
||||
one of them fails.
|
||||
The value of the last successful name-setting method is returned.
|
||||
'''
|
||||
"""
|
||||
|
||||
try:
|
||||
return parse_class_namespace_string(str(obj.__mro__[0]))
|
||||
@@ -61,9 +61,10 @@ def get_object_namespace(obj):
|
||||
|
||||
|
||||
def parse_class_namespace_string(class_string):
|
||||
'''Parses the dotted namespace out of an object's __mro__.
|
||||
Returns a string
|
||||
'''
|
||||
"""
|
||||
Parses the dotted namespace out of an object's __mro__. Returns a string
|
||||
"""
|
||||
|
||||
class_string = str(class_string)
|
||||
class_string = class_string.replace("'>", "")
|
||||
class_string = class_string.replace("<class '", "")
|
||||
@@ -71,10 +72,9 @@ def parse_class_namespace_string(class_string):
|
||||
|
||||
|
||||
def getLogger(log_name, log_level=None):
|
||||
'''Convenience function to create a logger and set it's log level at the
|
||||
same time.
|
||||
Log level defaults to logging.DEBUG
|
||||
'''
|
||||
"""Convenience function to create a logger and set it's log level at the
|
||||
same time. Log level defaults to logging.DEBUG
|
||||
"""
|
||||
|
||||
# Create new log
|
||||
new_log = logging.getLogger(name=log_name)
|
||||
@@ -97,11 +97,11 @@ def getLogger(log_name, log_level=None):
|
||||
|
||||
def setup_new_cchandler(
|
||||
log_file_name, log_dir=None, encoding=None, msg_format=None):
|
||||
'''Creates a log handler named <log_file_name> configured to save the log
|
||||
"""Creates a log handler named <log_file_name> configured to save the log
|
||||
in <log_dir> or <os environment variable 'CAFE_TEST_LOG_PATH'>,
|
||||
in that order or precedent.
|
||||
File handler defaults: 'a+', encoding=encoding or "UTF-8", delay=True
|
||||
'''
|
||||
"""
|
||||
|
||||
log_dir = log_dir or os.getenv('CAFE_TEST_LOG_PATH')
|
||||
|
||||
@@ -131,10 +131,10 @@ def setup_new_cchandler(
|
||||
|
||||
|
||||
def log_results(result, test_id=None, verbosity=0):
|
||||
"""Replicates the printing functionality of unittest's runner.run() but
|
||||
log's instead of prints
|
||||
"""
|
||||
@summary: Replicates the printing functionality of unittest's
|
||||
runner.run() but log's instead of prints
|
||||
"""
|
||||
|
||||
infos = []
|
||||
expected_fails = unexpected_successes = skipped = 0
|
||||
|
||||
@@ -198,7 +198,8 @@ def log_errors(label, result, errors):
|
||||
|
||||
|
||||
def init_root_log_handler():
|
||||
# Setup root log handler if the root logger doesn't already have one
|
||||
"""Setup root log handler if the root logger doesn't already have one"""
|
||||
|
||||
if not getLogger('').handlers:
|
||||
master_log_file_name = os.getenv('CAFE_MASTER_LOG_FILE_NAME')
|
||||
getLogger('').addHandler(
|
||||
@@ -224,6 +225,7 @@ def log_info_block(
|
||||
using newlines. Otherwise, each line of the info block will be logged
|
||||
as seperate log lines (with seperate timestamps, etc.)
|
||||
"""
|
||||
|
||||
output = []
|
||||
try:
|
||||
info = info if isinstance(info, OrderedDict) else OrderedDict(info)
|
||||
@@ -233,7 +235,8 @@ def log_info_block(
|
||||
return
|
||||
|
||||
separator = str(separator or "{0}".format('=' * 56))
|
||||
max_length = len(max([k for k in info.keys() if info.get(k)], key=len)) + 3
|
||||
max_length = \
|
||||
len(max([k for k in info.keys() if info.get(k)], key=len)) + 3
|
||||
|
||||
output.append(separator)
|
||||
if heading:
|
||||
|
||||
@@ -14,9 +14,9 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Classes for test statistics
|
||||
'''
|
||||
"""
|
||||
from datetime import datetime
|
||||
import os
|
||||
import csv
|
||||
@@ -24,7 +24,7 @@ import sys
|
||||
|
||||
|
||||
class TestRunMetrics(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Timer used to track any time span
|
||||
@ivar start_time: Timestamp from the start of the timer
|
||||
@type start_time: C{datetime}
|
||||
@@ -34,7 +34,7 @@ class TestRunMetrics(object):
|
||||
of the runner and the default unittest.TestCase architecture to make
|
||||
this auto-magically work with unittest properly.
|
||||
This should be a child of unittest.TestResult
|
||||
'''
|
||||
"""
|
||||
def __init__(self):
|
||||
self.total_tests = 0
|
||||
self.total_passed = 0
|
||||
@@ -45,7 +45,7 @@ class TestRunMetrics(object):
|
||||
|
||||
|
||||
class TestResultTypes(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Types dictating an individual Test Case result
|
||||
@cvar PASSED: Test has passed
|
||||
@type PASSED: C{str}
|
||||
@@ -58,7 +58,8 @@ class TestResultTypes(object):
|
||||
@cvar ERRORED: Test has errored
|
||||
@type ERRORED: C{str}
|
||||
@note: This is essentially an Enumerated Type
|
||||
'''
|
||||
"""
|
||||
|
||||
PASSED = "Passed"
|
||||
FAILED = "Failed"
|
||||
SKIPPED = "Skipped" # Not Supported Yet
|
||||
@@ -68,39 +69,43 @@ class TestResultTypes(object):
|
||||
|
||||
|
||||
class TestTimer(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Timer used to track any time span
|
||||
@ivar start_time: Timestamp from the start of the timer
|
||||
@type start_time: C{datetime}
|
||||
@ivar stop_time: Timestamp of the end of the timer
|
||||
@type stop_time: C{datetime}
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.start_time = None
|
||||
self.stop_time = None
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Starts this timer
|
||||
@return: None
|
||||
@rtype: None
|
||||
'''
|
||||
"""
|
||||
|
||||
self.start_time = datetime.now()
|
||||
|
||||
def stop(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Stops this timer
|
||||
@return: None
|
||||
@rtype: None
|
||||
'''
|
||||
"""
|
||||
|
||||
self.stop_time = datetime.now()
|
||||
|
||||
def get_elapsed_time(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Convenience method for total elapsed time
|
||||
@rtype: C{datetime}
|
||||
@return: Elapsed time for this timer. C{None} if timer has not started
|
||||
'''
|
||||
"""
|
||||
|
||||
elapsedTime = None
|
||||
if self.start_time is not None:
|
||||
if self.stop_time is not None:
|
||||
@@ -108,36 +113,36 @@ class TestTimer(object):
|
||||
else:
|
||||
elapsedTime = (datetime.now() - self.start_time)
|
||||
else:
|
||||
''' Timer hasn't started, error on the side of caution '''
|
||||
# Timer hasn't started, error on the side of caution
|
||||
rightNow = datetime.now()
|
||||
elapsedTime = (rightNow - rightNow)
|
||||
return(elapsedTime)
|
||||
|
||||
|
||||
class CSVWriter(object):
|
||||
'''
|
||||
'''
|
||||
"""CSVWriter"""
|
||||
|
||||
def __init__(self, headers, file_name, log_dir='.', start_clean=False):
|
||||
self.file_mode = 'a'
|
||||
self.headers = headers
|
||||
|
||||
#create the dir if it does not exist
|
||||
# create the dir if it does not exist
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
#get full path
|
||||
# get full path
|
||||
self.full_path = os.path.normpath(os.path.join(log_dir, file_name))
|
||||
|
||||
#remove file if you want a clean log file
|
||||
# remove file if you want a clean log file
|
||||
if start_clean:
|
||||
''' Force the file to be overwritten before any writing '''
|
||||
# Force the file to be overwritten before any writing
|
||||
try:
|
||||
os.remove(self.full_path)
|
||||
except OSError:
|
||||
sys.stderr.write('File not writable\n')
|
||||
|
||||
if os.path.exists(self.full_path) is False:
|
||||
''' Write out the header to the stats log '''
|
||||
# Write out the header to the stats log
|
||||
self.writerow(self.headers)
|
||||
|
||||
def writerow(self, row_list):
|
||||
|
||||
@@ -61,7 +61,7 @@ class ConfiguratorCLI(object):
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
|
||||
#Engine configuration subparser
|
||||
# Engine configuration subparser
|
||||
subparser_engine_config = subparsers.add_parser('engine')
|
||||
subparser_engine_config.add_argument(
|
||||
'--init-install', action=EngineActions.InitInstall, nargs=0)
|
||||
@@ -72,8 +72,7 @@ class ConfiguratorCLI(object):
|
||||
|
||||
plugins_add_parser = plugin_args.add_parser('add')
|
||||
plugins_add_parser.add_argument(
|
||||
'plugin_dir', action=PluginActions.AddPluginCache,
|
||||
type=str)
|
||||
'plugin_dir', action=PluginActions.AddPluginCache, type=str)
|
||||
|
||||
plugins_add_parser = plugin_args.add_parser('list')
|
||||
plugins_add_parser.add_argument(
|
||||
@@ -81,8 +80,8 @@ class ConfiguratorCLI(object):
|
||||
|
||||
plugins_install_parser = plugin_args.add_parser('install')
|
||||
plugins_install_parser.add_argument(
|
||||
'plugin-name', action=PluginActions.InstallPlugin,
|
||||
type=str, nargs='*')
|
||||
'plugin-name', action=PluginActions.InstallPlugin, type=str,
|
||||
nargs='*')
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@@ -186,6 +186,7 @@ class TestEnvManager(object):
|
||||
reason, it sets the CAFE_TEST_REPO_PATH directly as well as
|
||||
CAFE_TEST_REPO_PACKAGE
|
||||
"""
|
||||
|
||||
return os.path.expanduser(
|
||||
self.engine_config_interface.default_test_repo)
|
||||
|
||||
@@ -218,8 +219,8 @@ class TestEnvManager(object):
|
||||
def test_logging_verbosity(self):
|
||||
"""Currently supports STANDARD and VERBOSE.
|
||||
TODO: Implement 'OFF' option that adds null handlers to all loggers
|
||||
|
||||
"""
|
||||
|
||||
return self.engine_config_interface.logging_verbosity
|
||||
|
||||
@_lazy_property
|
||||
@@ -234,6 +235,7 @@ class EngineDirectoryManager(object):
|
||||
Converts the top-level keys of this dictionary into a namespace.
|
||||
Raises exception if any self.keys() collide with internal attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
dict.__init__(self, **kwargs)
|
||||
collisions = set(kwargs) & set(dir(self))
|
||||
@@ -279,7 +281,8 @@ class EngineDirectoryManager(object):
|
||||
all changes made to the default .opencafe directory structure since
|
||||
opencafe's release.
|
||||
"""
|
||||
#Rename .cloudcafe to .opencafe
|
||||
|
||||
# Rename .cloudcafe to .opencafe
|
||||
if os.path.exists(cls._OLD_ROOT_DIR):
|
||||
if os.path.exists(cls.OPENCAFE_ROOT_DIR):
|
||||
print cls.wrapper.fill("* * ERROR * * *")
|
||||
@@ -312,7 +315,8 @@ class EngineDirectoryManager(object):
|
||||
@classmethod
|
||||
def set_engine_directory_permissions(cls):
|
||||
"""Recursively changes permissions default engine directory so that
|
||||
everything is user-owned"""
|
||||
everything is user-owned
|
||||
"""
|
||||
|
||||
PlatformManager.safe_chown(cls.OPENCAFE_ROOT_DIR)
|
||||
for root, dirs, files in os.walk(cls.OPENCAFE_ROOT_DIR):
|
||||
@@ -336,11 +340,11 @@ class EngineConfigManager(object):
|
||||
wrapper = textwrap.TextWrapper(
|
||||
initial_indent="* ", subsequent_indent=" ", break_long_words=False)
|
||||
|
||||
#Old Config Stuff for backwards compatability testing only
|
||||
# Old Config Stuff for backwards compatability testing only
|
||||
_OLD_ENGINE_CONFIG_PATH = os.path.join(
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR, 'configs', 'engine.config')
|
||||
|
||||
#Openafe config defaults
|
||||
# Openafe config defaults
|
||||
ENGINE_CONFIG_PATH = os.path.join(
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR, 'engine.config')
|
||||
|
||||
@@ -419,7 +423,7 @@ class EngineConfigManager(object):
|
||||
"Moving engine.config file from {0} to {1}".format(
|
||||
cls._OLD_ENGINE_CONFIG_PATH, cls.ENGINE_CONFIG_PATH))
|
||||
config = cls.read_config_file(cls._OLD_ENGINE_CONFIG_PATH)
|
||||
#Move to new location
|
||||
# Move to new location
|
||||
os.rename(cls._OLD_ENGINE_CONFIG_PATH, cls.ENGINE_CONFIG_PATH)
|
||||
|
||||
# Read config from current default location ('.opencafe/engine.config)
|
||||
@@ -597,6 +601,7 @@ class EnginePluginManager(object):
|
||||
""" Handles moving all plugin src data from package into the user's
|
||||
.opencafe folder for installation by the cafe-config tool.
|
||||
"""
|
||||
|
||||
default_dest = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugins = os.walk(plugins_src_dir).next()[1]
|
||||
|
||||
@@ -607,6 +612,7 @@ class EnginePluginManager(object):
|
||||
@classmethod
|
||||
def list_plugins(cls):
|
||||
""" Lists all plugins currently available in user's .opencafe cache"""
|
||||
|
||||
plugin_cache = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugin_folders = os.walk(plugin_cache).next()[1]
|
||||
wrap = textwrap.TextWrapper(initial_indent=" ",
|
||||
@@ -619,12 +625,14 @@ class EnginePluginManager(object):
|
||||
@classmethod
|
||||
def install_plugins(cls, plugin_names):
|
||||
""" Installs a list of plugins into the current environment"""
|
||||
|
||||
for plugin_name in plugin_names:
|
||||
cls.install_plugin(plugin_name)
|
||||
|
||||
@classmethod
|
||||
def install_plugin(cls, plugin_name):
|
||||
""" Install a single plugin by name into the current environment"""
|
||||
|
||||
plugin_cache = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugin_dir = os.path.join(plugin_cache, plugin_name)
|
||||
wrap = textwrap.TextWrapper(initial_indent=" ",
|
||||
|
||||
@@ -43,7 +43,7 @@ class DatasetList(list):
|
||||
|
||||
super(DatasetList, self).append(dataset)
|
||||
|
||||
def append_new_dataset(self, name, data_dict):
|
||||
def append_new_dataset(self, name, data_dict):
|
||||
"""Creates and appends a new Dataset"""
|
||||
self.append(_Dataset(name, data_dict))
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ import inspect
|
||||
import itertools
|
||||
|
||||
from types import FunctionType
|
||||
from unittest2 import TestCase
|
||||
from unittest import TestCase
|
||||
from warnings import warn, simplefilter
|
||||
|
||||
from cafe.common.reporting import cclogging
|
||||
|
||||
@@ -21,7 +21,7 @@ limitations under the License.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import unittest2 as unittest
|
||||
import unittest
|
||||
|
||||
from cafe.drivers.base import FixtureReporter
|
||||
from cafe.common.reporting.cclogging import init_root_log_handler
|
||||
@@ -75,7 +75,7 @@ class BaseTestFixture(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BaseTestFixture, cls).setUpClass()
|
||||
#Move root log handler initialization to the runner!
|
||||
# TODO: Move root log handler initialization to the runner!
|
||||
init_root_log_handler()
|
||||
cls._reporter = FixtureReporter(cls)
|
||||
cls.fixture_log = cls._reporter.logger.log
|
||||
@@ -140,8 +140,8 @@ class BaseTestFixture(unittest.TestCase):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as exception:
|
||||
#Pretty prints method signature in the following format:
|
||||
#"classTearDown failure: Unable to execute FnName(a, b, c=42)"
|
||||
# Pretty prints method signature in the following format:
|
||||
# "classTearDown failure: Unable to execute FnName(a, b, c=42)"
|
||||
cls.fixture_log.exception(exception)
|
||||
cls.fixture_log.error(
|
||||
"classTearDown failure: Exception occured while trying to"
|
||||
|
||||
@@ -26,7 +26,6 @@ class SummarizeResults(object):
|
||||
|
||||
def get_passed_tests(self):
|
||||
all_tests = []
|
||||
actual_number_of_tests_run = []
|
||||
failed_tests = []
|
||||
skipped_tests = []
|
||||
errored_tests = []
|
||||
@@ -50,8 +49,6 @@ class SummarizeResults(object):
|
||||
for item_2 in setup_errored_classes:
|
||||
if item_2 == item_1.__class__.__name__:
|
||||
setup_errored_tests.append(item_1)
|
||||
else:
|
||||
actual_number_of_tests_run = all_tests
|
||||
|
||||
passed_tests = list(set(all_tests) - set(failed_tests) -
|
||||
set(skipped_tests) - set(errored_tests) -
|
||||
|
||||
@@ -25,7 +25,7 @@ from inspect import getmembers, isclass
|
||||
from multiprocessing import Process, Manager
|
||||
from re import search
|
||||
from traceback import extract_tb
|
||||
import unittest2 as unittest
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from result import TaggedTextTestResult
|
||||
@@ -88,8 +88,7 @@ def print_traceback():
|
||||
"""
|
||||
info = sys.exc_info()
|
||||
excp_type, excp_value = info[:2]
|
||||
err_msg = error_msg(excp_type.__name__,
|
||||
excp_value)
|
||||
err_msg = error_msg(excp_type.__name__, excp_value)
|
||||
print err_msg
|
||||
for file_name, lineno, function, text in extract_tb(info[2]):
|
||||
print ">>>", file_name
|
||||
@@ -129,12 +128,8 @@ class OpenCafeParallelTextTestRunner(unittest.TextTestRunner):
|
||||
def run(self, test):
|
||||
"""Run the given test case or test suite."""
|
||||
result = self._makeResult()
|
||||
startTime = time.time()
|
||||
test(result)
|
||||
stopTime = time.time()
|
||||
timeTaken = stopTime - startTime
|
||||
result.printErrors()
|
||||
run = result.testsRun
|
||||
return result
|
||||
|
||||
|
||||
@@ -905,18 +900,10 @@ class UnittestRunner(object):
|
||||
"""Inject tag mapping into the result __dict__ object if available"""
|
||||
if hasattr(result, 'mapping'):
|
||||
mapping = result.mapping.test_to_tag_mapping
|
||||
|
||||
if not mapping is None and len(mapping) > 0:
|
||||
setattr(result, 'tags', mapping)
|
||||
else:
|
||||
setattr(result, 'tags', [])
|
||||
setattr(result, 'tags', mapping or [])
|
||||
|
||||
attributes = result.mapping.test_to_attribute_mapping
|
||||
|
||||
if not attributes is None and len(attributes) > 0:
|
||||
setattr(result, 'attributes', attributes)
|
||||
else:
|
||||
setattr(result, 'attributes', [])
|
||||
setattr(result, 'attributes', attributes or [])
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
@@ -927,14 +914,11 @@ class UnittestRunner(object):
|
||||
parallel_test_list = []
|
||||
|
||||
builder = SuiteBuilder(
|
||||
self.cl_args.module_regex,
|
||||
self.cl_args.method_regex,
|
||||
self.cl_args.tags,
|
||||
self.cl_args.supress_flag)
|
||||
self.cl_args.module_regex, self.cl_args.method_regex,
|
||||
self.cl_args.tags, self.cl_args.supress_flag)
|
||||
|
||||
test_runner = self.get_runner(
|
||||
self.cl_args.parallel,
|
||||
self.cl_args.fail_fast,
|
||||
self.cl_args.parallel, self.cl_args.fail_fast,
|
||||
self.cl_args.verbose)
|
||||
|
||||
# Build master test suite
|
||||
@@ -950,27 +934,24 @@ class UnittestRunner(object):
|
||||
parallel_test_list = builder.generate_suite_list(
|
||||
path, parallel_test_list)
|
||||
else:
|
||||
master_suite = builder.generate_suite(
|
||||
self.product_repo_path)
|
||||
master_suite = builder.generate_suite(self.product_repo_path)
|
||||
if self.cl_args.parallel:
|
||||
parallel_test_list = builder.generate_suite_list(
|
||||
self.product_repo_path)
|
||||
|
||||
if self.cl_args.parallel:
|
||||
exit_code = self.run_parallel(
|
||||
parallel_test_list,
|
||||
test_runner,
|
||||
parallel_test_list, test_runner,
|
||||
result_type=self.cl_args.result,
|
||||
results_path=self.cl_args.result_directory,
|
||||
verbosity=self.cl_args.verbose)
|
||||
exit(exit_code)
|
||||
else:
|
||||
exit_code = self.run_serialized(
|
||||
master_suite,
|
||||
test_runner,
|
||||
result_type=self.cl_args.result,
|
||||
master_suite, test_runner, result_type=self.cl_args.result,
|
||||
results_path=self.cl_args.result_directory,
|
||||
verbosity=self.cl_args.verbose)
|
||||
|
||||
exit(exit_code)
|
||||
|
||||
@staticmethod
|
||||
@@ -1040,13 +1021,13 @@ class UnittestRunner(object):
|
||||
UnittestRunner._inject_tag_mapping(result)
|
||||
|
||||
if result_type is not None:
|
||||
result_parser = SummarizeResults(vars(result), master_suite,
|
||||
total_execution_time)
|
||||
result_parser = SummarizeResults(
|
||||
vars(result), master_suite, total_execution_time)
|
||||
all_results = result_parser.gather_results()
|
||||
reporter = Reporter(result_parser=result_parser,
|
||||
all_results=all_results)
|
||||
reporter.generate_report(result_type=result_type,
|
||||
path=results_path)
|
||||
reporter = Reporter(
|
||||
result_parser=result_parser, all_results=all_results)
|
||||
reporter.generate_report(
|
||||
result_type=result_type, path=results_path)
|
||||
|
||||
log_results(result, verbosity=verbosity)
|
||||
if not result.wasSuccessful():
|
||||
|
||||
@@ -72,5 +72,5 @@ class OpenCafeUnittestTestSuite(TestSuite):
|
||||
className = util.strclass(currentClass)
|
||||
errorName = 'setUpClass (%s)' % className
|
||||
self._addClassOrModuleLevelException(result, e, errorName)
|
||||
#Monkeypatch: Run class cleanup if setUpClass fails
|
||||
# Monkeypatch: Run class cleanup if setUpClass fails
|
||||
currentClass._do_class_cleanup_tasks()
|
||||
|
||||
@@ -23,14 +23,14 @@ class RequiredClientNotDefinedError(Exception):
|
||||
|
||||
|
||||
def behavior(*required_clients):
|
||||
'''Decorator that tags method as a behavior, and optionally adds
|
||||
"""Decorator that tags method as a behavior, and optionally adds
|
||||
required client objects to an internal attribute. Causes calls to this
|
||||
method to throw RequiredClientNotDefinedError exception if the containing
|
||||
class does not have the proper client instances defined.
|
||||
'''
|
||||
#@decorator.decorator
|
||||
"""
|
||||
|
||||
def _decorator(func):
|
||||
#Unused for now
|
||||
# Unused for now
|
||||
setattr(func, '__is_behavior__', True)
|
||||
setattr(func, '__required_clients__', [])
|
||||
for client in required_clients:
|
||||
|
||||
@@ -14,10 +14,10 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
'''Provides low level connectivity to the commandline via popen()
|
||||
"""Provides low level connectivity to the commandline via popen()
|
||||
@note: Primarily intended to serve as base classes for a specific
|
||||
command line client Class
|
||||
'''
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
from subprocess import Popen, PIPE, CalledProcessError
|
||||
@@ -29,49 +29,54 @@ from cafe.engine.models.commandline_response import CommandLineResponse
|
||||
|
||||
|
||||
class BaseCommandLineClient(BaseClient):
|
||||
'''Wrapper for driving/parsing a command line program
|
||||
"""Wrapper for driving/parsing a command line program
|
||||
@ivar base_command: This processes base command string. (I.E. 'ls', 'pwd')
|
||||
@type base_command: C{str}
|
||||
@note: This class is dependent on a local installation of the wrapped
|
||||
client process. The thing you run has to be there!
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, base_command=None, env_var_dict=None):
|
||||
'''
|
||||
"""
|
||||
@param base_command: This processes base command string.
|
||||
(I.E. 'ls', 'pwd')
|
||||
@type base_command: C{str}
|
||||
'''
|
||||
"""
|
||||
|
||||
super(BaseCommandLineClient, self).__init__()
|
||||
self.base_command = base_command
|
||||
self.env_var_dict = env_var_dict or {}
|
||||
self.set_environment_variables(self.env_var_dict)
|
||||
|
||||
def set_environment_variables(self, env_var_dict=None):
|
||||
'''Sets all os environment variables provided in env_var_dict'''
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = env_var_dict
|
||||
for key, value in self.env_var_dict.items():
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
def update_environment_variables(self, env_var_dict=None):
|
||||
'''Sets all os environment variables provided in env_var_dict'''
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = self.env_var_dict.update(env_var_dict or {})
|
||||
for key, value in self.env_var_dict.items():
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
def unset_environment_variables(self, env_var_list=None):
|
||||
'''Unsets all os environment variables provided in env_var_dict
|
||||
"""Unsets all os environment variables provided in env_var_dict
|
||||
by default.
|
||||
If env_var_list is passed, attempts to unset all environment vars in
|
||||
list'''
|
||||
list"""
|
||||
|
||||
env_var_list = env_var_list or self.env_var_dict.keys() or []
|
||||
for key, _ in env_var_list:
|
||||
self._log.debug('unsetting {0}'.format(key))
|
||||
os.unsetenv(str(key))
|
||||
|
||||
def _build_command(self, cmd, *args):
|
||||
#Process command we received
|
||||
# Process command we received
|
||||
command = "{0} {1}".format(
|
||||
self.base_command, cmd) if self.base_command else cmd
|
||||
if args and args[0]:
|
||||
@@ -93,7 +98,7 @@ class BaseCommandLineClient(BaseClient):
|
||||
return command
|
||||
|
||||
def _execute_command(self, command):
|
||||
#Run the command
|
||||
# Run the command
|
||||
process = None
|
||||
try:
|
||||
process = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
|
||||
@@ -107,13 +112,14 @@ class BaseCommandLineClient(BaseClient):
|
||||
"""Running a command asynchronously returns a CommandLineResponse
|
||||
objecct with a running subprocess.Process object in it. This process
|
||||
needs to be closed or killed manually after execution."""
|
||||
|
||||
os_response = CommandLineResponse()
|
||||
os_response.command = self._build_command(cmd, *args)
|
||||
os_response.process = self._execute_command(os_response.command)
|
||||
return os_response
|
||||
|
||||
def run_command(self, cmd, *args):
|
||||
'''Sends a command directly to this instance's command line
|
||||
"""Sends a command directly to this instance's command line
|
||||
@param cmd: Command to sent to command line
|
||||
@type cmd: C{str}
|
||||
@param args: Optional list of args to be passed with the command
|
||||
@@ -122,8 +128,9 @@ class BaseCommandLineClient(BaseClient):
|
||||
@return: The full response details from the command line
|
||||
@rtype: L{CommandLineResponse}
|
||||
@note: PRIVATE. Can be over-ridden in a child class
|
||||
'''
|
||||
#Wait for the process to complete and then read the output
|
||||
"""
|
||||
|
||||
# Wait for the process to complete and then read the output
|
||||
os_response = self.run_command_async(cmd, *args)
|
||||
std_out, std_err = os_response.process.communicate()
|
||||
os_response.standard_out = str(std_out).splitlines()
|
||||
@@ -140,12 +147,13 @@ class BaseCommandLineClient(BaseClient):
|
||||
self._log, info, heading='COMMAND LINE RESPONSE',
|
||||
log_level=DEBUG, one_line=True)
|
||||
|
||||
#Clean up the process to avoid any leakage/wonkiness with stdout/stderr
|
||||
# Clean up the process to avoid any leakage/wonkiness with
|
||||
# stdout/stderr
|
||||
try:
|
||||
os_response.process.kill()
|
||||
except OSError:
|
||||
#An OS Error is valid if the process has exited. We only
|
||||
#need to be concerned about other exceptions
|
||||
# An OS Error is valid if the process has exited. We only
|
||||
# need to be concerned about other exceptions
|
||||
sys.exc_clear()
|
||||
|
||||
os_response.process = None
|
||||
|
||||
@@ -19,30 +19,35 @@ from cafe.common.reporting import cclogging
|
||||
|
||||
class CommonToolsMixin(object):
|
||||
"""Methods used to make building data models easier, common to all types"""
|
||||
|
||||
@staticmethod
|
||||
def _bool_to_string(value, true_string='true', false_string='false'):
|
||||
"""Returns a string representation of a boolean value, or the value
|
||||
provided if the value is not an instance of bool
|
||||
"""
|
||||
|
||||
if isinstance(value, bool):
|
||||
return true_string if value is True else false_string
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def _remove_empty_values(dictionary):
|
||||
'''Returns a new dictionary based on 'dictionary', minus any keys with
|
||||
"""Returns a new dictionary based on 'dictionary', minus any keys with
|
||||
values that are None
|
||||
'''
|
||||
"""
|
||||
|
||||
return dict((k, v) for k, v in dictionary.iteritems() if v is not None)
|
||||
|
||||
|
||||
class JSON_ToolsMixin(object):
|
||||
"""Methods used to make building json data models easier"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class XML_ToolsMixin(object):
|
||||
"""Methods used to make building xml data models easier"""
|
||||
|
||||
_XML_VERSION = '1.0'
|
||||
_ENCODING = 'UTF-8'
|
||||
|
||||
@@ -69,6 +74,7 @@ class XML_ToolsMixin(object):
|
||||
@staticmethod
|
||||
def _remove_xml_etree_namespace(doc, namespace):
|
||||
"""Remove namespace in the passed document in place."""
|
||||
|
||||
ns = u'{%s}' % namespace
|
||||
nsl = len(ns)
|
||||
for elem in doc.getiterator():
|
||||
@@ -126,10 +132,10 @@ class BaseModel(object):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
#Splitting the xml and json stuff into mixins cleans up the code but still
|
||||
#muddies the AutoMarshallingModel namespace. We could create
|
||||
#tool objects in the AutoMarshallingModel, which would just act as
|
||||
#sub-namespaces, to keep it clean. --Jose
|
||||
# Splitting the xml and json stuff into mixins cleans up the code but still
|
||||
# muddies the AutoMarshallingModel namespace. We could create
|
||||
# tool objects in the AutoMarshallingModel, which would just act as
|
||||
# sub-namespaces, to keep it clean. --Jose
|
||||
class AutoMarshallingModel(
|
||||
BaseModel, CommonToolsMixin, JSON_ToolsMixin, XML_ToolsMixin):
|
||||
"""
|
||||
@@ -137,6 +143,7 @@ class AutoMarshallingModel(
|
||||
to automatically create serialized requests and automatically
|
||||
deserialize responses in a format-agnostic way.
|
||||
"""
|
||||
|
||||
_log = cclogging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
@@ -181,7 +188,7 @@ class AutoMarshallingModel(
|
||||
except Exception as deserialization_exception:
|
||||
cls._log.exception(deserialization_exception)
|
||||
|
||||
#Try to log string and format_type if deserialization broke
|
||||
# Try to log string and format_type if deserialization broke
|
||||
if deserialization_exception is not None:
|
||||
try:
|
||||
cls._log.debug(
|
||||
@@ -200,14 +207,14 @@ class AutoMarshallingModel(
|
||||
|
||||
return model_object
|
||||
|
||||
#Serialization Functions
|
||||
# Serialization Functions
|
||||
def _obj_to_json(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def _obj_to_xml(self):
|
||||
raise NotImplementedError
|
||||
|
||||
#Deserialization Functions
|
||||
# Deserialization Functions
|
||||
@classmethod
|
||||
def _xml_to_obj(cls, serialized_str):
|
||||
raise NotImplementedError
|
||||
@@ -219,11 +226,13 @@ class AutoMarshallingModel(
|
||||
|
||||
class AutoMarshallingListModel(list, AutoMarshallingModel):
|
||||
"""List-like AutoMarshallingModel used for some special cases"""
|
||||
|
||||
def __str__(self):
|
||||
return list.__str__(self)
|
||||
|
||||
|
||||
class AutoMarshallingDictModel(dict, AutoMarshallingModel):
|
||||
"""Dict-like AutoMarshallingModel used for some special cases"""
|
||||
|
||||
def __str__(self):
|
||||
return dict.__str__(self)
|
||||
|
||||
@@ -27,6 +27,7 @@ except:
|
||||
The mongo data-source is currently not being used. and needs to be
|
||||
abstracted out into a data-source plugin.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -42,7 +43,7 @@ class ConfigEnvironmentVariableError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
#Decorator
|
||||
# This is a decorator
|
||||
def expected_values(*values):
|
||||
def decorator(fn):
|
||||
def wrapped():
|
||||
@@ -242,11 +243,9 @@ class MongoDataSource(DictionaryDataSource):
|
||||
|
||||
|
||||
class BaseConfigSectionInterface(object):
|
||||
"""
|
||||
Base class for building an interface for the data contained in a
|
||||
"""Base class for building an interface for the data contained in a
|
||||
SafeConfigParser object, as loaded from the config file as defined
|
||||
by the engine's config file.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, config_file_path, section_name):
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Copyright 2013 Rackspace
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
@@ -1,150 +0,0 @@
|
||||
"""
|
||||
Copyright 2013 Rackspace
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import socket
|
||||
|
||||
from portal.input.syslog.usyslog import SyslogMessageHead
|
||||
|
||||
|
||||
class RSyslogClient(object):
|
||||
|
||||
def __init__(self, host='127.0.0.1', port=5140, default_sd=None):
|
||||
super(RSyslogClient, self).__init__()
|
||||
|
||||
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.default_sd = default_sd
|
||||
|
||||
def connect(self, host=None, port=None):
|
||||
address = (host or self.host, port or self.port)
|
||||
self.sock.connect(address)
|
||||
|
||||
def close(self):
|
||||
self.sock.close()
|
||||
|
||||
def send(self, priority, version=1, timestamp=None, app_name=None,
|
||||
host_name=None, message_id=None, process_id=None,
|
||||
msg=None, sd=None):
|
||||
if self.default_sd:
|
||||
composite_sd = self.default_sd
|
||||
composite_sd.update(sd or {})
|
||||
sd = composite_sd
|
||||
|
||||
msg_handler = MessageHandler()
|
||||
header = self.build_header(app_name, host_name, message_id,
|
||||
priority, process_id, version,
|
||||
timestamp, sd)
|
||||
|
||||
msg_handler.add_message_head(header)
|
||||
msg_handler.add_message_part(msg)
|
||||
syslog_msg = msg_handler.get_full_message()
|
||||
|
||||
return self.sock.sendall(syslog_msg)
|
||||
|
||||
@classmethod
|
||||
def build_header(self, app_name, host_name, message_id, priority,
|
||||
process_id, version, timestamp, sd):
|
||||
""" Builds a Syslog Message Header Object
|
||||
Also handles replacing passed None's with syslog "nil" values.
|
||||
"""
|
||||
head = SyslogMessageHead()
|
||||
head.appname = app_name or '-'
|
||||
head.hostname = host_name or '-'
|
||||
head.messageid = message_id or '-'
|
||||
head.priority = priority or '-'
|
||||
head.processid = process_id or '-'
|
||||
head.timestamp = timestamp or '-'
|
||||
head.version = version or '-'
|
||||
head.sd = sd or {}
|
||||
return head
|
||||
|
||||
|
||||
class MessageHandler(object):
|
||||
|
||||
def __init__(self):
|
||||
self.msg = b''
|
||||
self.msg_head = None
|
||||
self.msg_count = 0
|
||||
|
||||
def add_message_head(self, message_head):
|
||||
self.msg_count += 1
|
||||
self.msg_head = message_head
|
||||
|
||||
def add_message_part(self, message_part):
|
||||
self.msg += bytes(message_part)
|
||||
|
||||
def get_full_message(self, message_end=''):
|
||||
full_message = bytes(self.msg + message_end)
|
||||
syslog_message = self.msg_head.as_dict()
|
||||
syslog_message['message'] = full_message
|
||||
cee_msg = self.to_cee(syslog_message)
|
||||
|
||||
return self.cee_dict_to_rsyslog(cee_msg)
|
||||
|
||||
@classmethod
|
||||
def to_cee(cls, syslog_message):
|
||||
cee_message = {
|
||||
'time': syslog_message.get('timestamp'),
|
||||
'host': syslog_message.get('hostname'),
|
||||
'pname': syslog_message.get('appname'),
|
||||
'pri': syslog_message.get('priority'),
|
||||
'ver': syslog_message.get('version'),
|
||||
'pid': syslog_message.get('processid'),
|
||||
'msgid': syslog_message.get('messageid'),
|
||||
'msg': syslog_message.get('message'),
|
||||
'native': syslog_message.get('sd')
|
||||
}
|
||||
|
||||
return cee_message
|
||||
|
||||
@classmethod
|
||||
def sd_dict_to_syslog_str(cls, sd_dict):
|
||||
""" Converts structured data dictionary to a syslog str """
|
||||
syslog_sds = ''
|
||||
for sd_key, sd_val in sd_dict.items():
|
||||
syslog_str = '[{sd_key}'.format(sd_key=sd_key)
|
||||
|
||||
for sub_key, sub_val in sd_val.items():
|
||||
syslog_str = '{orig} {key}="{value}"'.format(
|
||||
orig=syslog_str, key=sub_key, value=sub_val)
|
||||
syslog_str += ']'
|
||||
|
||||
syslog_sds += syslog_str
|
||||
|
||||
return syslog_sds
|
||||
|
||||
@classmethod
|
||||
def cee_dict_to_rsyslog(cls, cee_dict):
|
||||
""" Converts a CEE format dictionary and converts it to a syslog
|
||||
message string.
|
||||
"""
|
||||
structured_data = cee_dict.get('native')
|
||||
if structured_data is not None:
|
||||
structured_data = cls.sd_dict_to_syslog_str(structured_data)
|
||||
|
||||
log = ('<{pri}>{ver} {time} {host} {app} {pid} {msgid} {sd} '
|
||||
'{msg}').format(
|
||||
pri=cee_dict.get('pri'),
|
||||
time=cee_dict.get('time', '-'),
|
||||
ver=cee_dict.get('ver'),
|
||||
host=cee_dict.get('host', '-'),
|
||||
app=cee_dict.get('pname', '-'),
|
||||
pid=cee_dict.get('pid', '-'),
|
||||
msgid=cee_dict.get('msgid', '-'),
|
||||
sd=structured_data or '-',
|
||||
msg=cee_dict.get('msg'))
|
||||
|
||||
return b'{length} {syslog}'.format(length=len(log), syslog=log)
|
||||
Reference in New Issue
Block a user