Adds python 3.4 compatability to OpenCAFE
* Fixes all issued reported by 2to3 * Adds 3.4 tox test support * Adds six to pip requirements file Change-Id: Idb2743000e128651d02490de1415771647c98040
This commit is contained in:
@@ -14,10 +14,11 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class BaseReport:
|
||||
__metaclass__ = ABCMeta
|
||||
@add_metaclass(ABCMeta)
|
||||
class BaseReport(object):
|
||||
|
||||
@abstractmethod
|
||||
def generate_report(self, result_parser, all_results=None, path=None):
|
||||
|
||||
@@ -139,15 +139,15 @@ def log_results(result):
|
||||
expected_fails = unexpected_successes = skipped = 0
|
||||
|
||||
try:
|
||||
results = map(len, (result.expectedFailures,
|
||||
result.unexpectedSuccesses,
|
||||
result.skipped))
|
||||
results = list(map(len, (
|
||||
result.expectedFailures, result.unexpectedSuccesses,
|
||||
result.skipped)))
|
||||
expected_fails, unexpected_successes, skipped = results
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if not result.wasSuccessful():
|
||||
failed, errored = map(len, (result.failures, result.errors))
|
||||
failed, errored = list(map(len, (result.failures, result.errors)))
|
||||
|
||||
if failed:
|
||||
infos.append("failures={0}".format(failed))
|
||||
@@ -174,10 +174,10 @@ def log_results(result):
|
||||
else:
|
||||
log.info("\n")
|
||||
|
||||
print '=' * 150
|
||||
print "Detailed logs: {0}".format(
|
||||
os.getenv("CAFE_TEST_LOG_PATH"))
|
||||
print '-' * 150
|
||||
print('=' * 150)
|
||||
print("Detailed logs: {0}".format(
|
||||
os.getenv("CAFE_TEST_LOG_PATH")))
|
||||
print('-' * 150)
|
||||
|
||||
|
||||
def log_errors(label, result, errors):
|
||||
@@ -228,7 +228,7 @@ def log_info_block(
|
||||
|
||||
separator = str(separator or "{0}".format('=' * 56))
|
||||
max_length = \
|
||||
len(max([k for k in info.keys() if info.get(k)], key=len)) + 3
|
||||
len(max([k for k in list(info.keys()) if info.get(k)], key=len)) + 3
|
||||
|
||||
output.append(separator)
|
||||
if heading:
|
||||
|
||||
@@ -61,5 +61,5 @@ class JSONReport(BaseReport):
|
||||
if os.path.isdir(result_path):
|
||||
result_path += "/results.json"
|
||||
|
||||
with open(result_path, 'wb') as result_file:
|
||||
with open(result_path, 'w') as result_file:
|
||||
json.dump(test_results, result_file)
|
||||
|
||||
@@ -74,6 +74,7 @@
|
||||
[n for n in UNICODE_BLOCKS.get_range(BLOCK_NAMES.thai).codepoint_names()]
|
||||
"""
|
||||
|
||||
import six
|
||||
import unicodedata
|
||||
|
||||
# Integer denoting the first unicode codepoint
|
||||
@@ -435,7 +436,7 @@ class UnicodeRange(object):
|
||||
representation of all codepoints in UnicodeRange
|
||||
"""
|
||||
for codepoint in self.codepoints():
|
||||
yield unichr(codepoint).encode(encoding)
|
||||
yield six.unichr(codepoint).encode(encoding)
|
||||
|
||||
|
||||
class UnicodeRangeList(list):
|
||||
@@ -480,7 +481,7 @@ class UnicodeRangeList(list):
|
||||
UnicodeRangeList
|
||||
"""
|
||||
for codepoint in self.codepoints():
|
||||
yield unichr(codepoint).encode(encoding)
|
||||
yield six.unichr(codepoint).encode(encoding)
|
||||
|
||||
def get_range(self, range_name):
|
||||
"""
|
||||
@@ -561,4 +562,5 @@ def codepoint_name(codepoint_integer):
|
||||
(codepoint_integer > (UNICODE_ENDING_CODEPOINT + 1)):
|
||||
return None
|
||||
|
||||
return unicodedata.name(unichr(codepoint_integer), hex(codepoint_integer))
|
||||
return unicodedata.name(
|
||||
six.unichr(codepoint_integer), hex(codepoint_integer))
|
||||
|
||||
@@ -23,34 +23,34 @@ class EngineActions(object):
|
||||
|
||||
class InitInstall(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
print "================================="
|
||||
print "* Initializing Engine Install"
|
||||
print("=================================")
|
||||
print("* Initializing Engine Install")
|
||||
EngineDirectoryManager.build_engine_directories()
|
||||
EngineConfigManager.build_engine_config()
|
||||
print "================================="
|
||||
print("=================================")
|
||||
|
||||
|
||||
class PluginActions(object):
|
||||
class AddPluginCache(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
print "================================="
|
||||
print "* Adding Plugin Cache"
|
||||
print("=================================")
|
||||
print("* Adding Plugin Cache")
|
||||
EnginePluginManager.populate_plugin_cache(values)
|
||||
print "================================="
|
||||
print("=================================")
|
||||
|
||||
class InstallPlugin(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
print "================================="
|
||||
print "* Installing Plugins"
|
||||
print("=================================")
|
||||
print("* Installing Plugins")
|
||||
EnginePluginManager.install_plugins(values)
|
||||
print "================================="
|
||||
print("=================================")
|
||||
|
||||
class ListPlugins(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
print "================================="
|
||||
print "* Available Plugins"
|
||||
print("=================================")
|
||||
print("* Available Plugins")
|
||||
EnginePluginManager.list_plugins()
|
||||
print "================================="
|
||||
print("=================================")
|
||||
|
||||
|
||||
class ConfiguratorCLI(object):
|
||||
|
||||
@@ -24,7 +24,7 @@ import getpass
|
||||
import shutil
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
from six.moves.configparser import SafeConfigParser
|
||||
from cafe.engine.config import EngineConfig
|
||||
|
||||
if not platform.system().lower() == 'windows':
|
||||
@@ -104,13 +104,12 @@ class TestEnvManager(object):
|
||||
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.func_name = func.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return None
|
||||
value = self.func(obj)
|
||||
setattr(obj, self.func_name, value)
|
||||
setattr(obj, self.func.__name__, value)
|
||||
return value
|
||||
|
||||
def __init__(
|
||||
@@ -271,15 +270,15 @@ class EngineDirectoryManager(object):
|
||||
|
||||
@classmethod
|
||||
def create_engine_directories(cls):
|
||||
print cls.wrapper.fill('Creating default directories in {0}'.format(
|
||||
cls.OPENCAFE_ROOT_DIR))
|
||||
print(cls.wrapper.fill('Creating default directories in {0}'.format(
|
||||
cls.OPENCAFE_ROOT_DIR)))
|
||||
|
||||
# Create the opencafe root dir and sub dirs
|
||||
PlatformManager.safe_create_dir(cls.OPENCAFE_ROOT_DIR)
|
||||
print cls.wrapper.fill('...created {0}'.format(cls.OPENCAFE_ROOT_DIR))
|
||||
for _, directory_path in cls.OPENCAFE_SUB_DIRS.items():
|
||||
print(cls.wrapper.fill('...created {0}'.format(cls.OPENCAFE_ROOT_DIR)))
|
||||
for _, directory_path in list(cls.OPENCAFE_SUB_DIRS.items()):
|
||||
PlatformManager.safe_create_dir(directory_path)
|
||||
print cls.wrapper.fill('...created {0}'.format(directory_path))
|
||||
print(cls.wrapper.fill('...created {0}'.format(directory_path)))
|
||||
|
||||
@classmethod
|
||||
def set_engine_directory_permissions(cls):
|
||||
@@ -350,9 +349,9 @@ class EngineConfigManager(object):
|
||||
def write_config_backup(cls, config):
|
||||
config_backup_location = "{0}{1}".format(
|
||||
cls.ENGINE_CONFIG_PATH, '.backup')
|
||||
print cls.wrapper.fill(
|
||||
print(cls.wrapper.fill(
|
||||
"Creating backup of {0} at {1}".format(
|
||||
cls.ENGINE_CONFIG_PATH, config_backup_location))
|
||||
cls.ENGINE_CONFIG_PATH, config_backup_location)))
|
||||
cls.write_and_chown_config(config, config_backup_location)
|
||||
|
||||
@classmethod
|
||||
@@ -383,8 +382,8 @@ class EngineConfigManager(object):
|
||||
|
||||
if not update_tracker._updated:
|
||||
wrapper = textwrap.TextWrapper(initial_indent=" ")
|
||||
print wrapper.fill(
|
||||
"...no updates applied, engine.config is newest version")
|
||||
print(wrapper.fill(
|
||||
"...no updates applied, engine.config is newest version"))
|
||||
|
||||
return config
|
||||
|
||||
@@ -423,12 +422,12 @@ class EngineConfigManager(object):
|
||||
def build_engine_config(cls):
|
||||
config = None
|
||||
if os.path.exists(cls.ENGINE_CONFIG_PATH):
|
||||
print cls.wrapper.fill('Checking for updates to engine.config...')
|
||||
print(cls.wrapper.fill('Checking for updates to engine.config...'))
|
||||
config = cls.update_engine_config()
|
||||
else:
|
||||
print cls.wrapper.fill(
|
||||
print(cls.wrapper.fill(
|
||||
"Creating default engine.config at {0}".format(
|
||||
cls.ENGINE_CONFIG_PATH))
|
||||
cls.ENGINE_CONFIG_PATH)))
|
||||
config = cls.generate_default_engine_config()
|
||||
|
||||
cls.write_and_chown_config(config, cls.ENGINE_CONFIG_PATH)
|
||||
@@ -439,9 +438,9 @@ class EngineConfigManager(object):
|
||||
twrap = textwrap.TextWrapper(
|
||||
initial_indent='* ', subsequent_indent=' ',
|
||||
break_long_words=False)
|
||||
print twrap.fill(
|
||||
print(twrap.fill(
|
||||
'Installing reference configuration files in ...'.format(
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR))
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR)))
|
||||
twrap = textwrap.TextWrapper(
|
||||
initial_indent=' ', subsequent_indent=' ',
|
||||
break_long_words=False)
|
||||
@@ -463,7 +462,7 @@ class EngineConfigManager(object):
|
||||
|
||||
if print_progress:
|
||||
if destination_dir not in _printed:
|
||||
print twrap.fill('{0}'.format(destination_dir))
|
||||
print(twrap.fill('{0}'.format(destination_dir)))
|
||||
_printed.append(destination_dir)
|
||||
|
||||
PlatformManager.safe_chown(destination_file)
|
||||
@@ -507,7 +506,7 @@ class EnginePluginManager(object):
|
||||
break_long_words=False).fill
|
||||
|
||||
for plugin_folder in plugin_folders:
|
||||
print wrap('... {name}'.format(name=plugin_folder))
|
||||
print(wrap('... {name}'.format(name=plugin_folder)))
|
||||
|
||||
@classmethod
|
||||
def install_plugins(cls, plugin_names):
|
||||
@@ -527,11 +526,11 @@ class EnginePluginManager(object):
|
||||
break_long_words=False).fill
|
||||
|
||||
# Pretty output of plugin name
|
||||
print wrap('... {name}'.format(name=plugin_name))
|
||||
print(wrap('... {name}'.format(name=plugin_name)))
|
||||
|
||||
# Verify that the plugin exists
|
||||
if not os.path.exists(plugin_dir):
|
||||
print wrap('* Failed to install plugin: {0}'.format(plugin_name))
|
||||
print(wrap('* Failed to install plugin: {0}'.format(plugin_name)))
|
||||
return
|
||||
|
||||
# Install Plugin
|
||||
@@ -543,10 +542,10 @@ class EnginePluginManager(object):
|
||||
standard_out, standard_error = process.communicate()
|
||||
except Exception as e:
|
||||
msg = '* Plugin install failed {0}\n{1}\n'.format(cmd, e)
|
||||
print wrap(msg)
|
||||
print(wrap(msg))
|
||||
|
||||
# Print failure if we receive an error code
|
||||
if process and process.returncode != 0:
|
||||
print wrap(standard_out)
|
||||
print wrap(standard_error)
|
||||
print wrap('* Failed to install plugin: {0}'.format(plugin_name))
|
||||
print(wrap(standard_out))
|
||||
print(wrap(standard_error))
|
||||
print(wrap('* Failed to install plugin: {0}'.format(plugin_name)))
|
||||
|
||||
@@ -155,6 +155,6 @@ def print_mug(name, brewing_from):
|
||||
=== CAFE {name} Runner ===""".format(
|
||||
path=brewing_from, name=name)
|
||||
|
||||
print border
|
||||
print mug
|
||||
print border
|
||||
print(border)
|
||||
print(mug)
|
||||
print(border)
|
||||
|
||||
@@ -95,7 +95,7 @@ def print_mug(base_dir):
|
||||
mug7 = " |_______|"
|
||||
mug8 = "=== CAFE Behave Runner ==="
|
||||
|
||||
print "\n{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}".format(
|
||||
print("\n{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}".format(
|
||||
mug0,
|
||||
mug1,
|
||||
mug2,
|
||||
@@ -104,11 +104,11 @@ def print_mug(base_dir):
|
||||
mug5,
|
||||
mug6,
|
||||
mug7,
|
||||
mug8)
|
||||
mug8))
|
||||
|
||||
print "-" * len(brew)
|
||||
print brew
|
||||
print "-" * len(brew)
|
||||
print("-" * len(brew))
|
||||
print(brew)
|
||||
print("-" * len(brew))
|
||||
|
||||
if __name__ == '__main__':
|
||||
entry_point()
|
||||
|
||||
@@ -93,7 +93,7 @@ def print_mug(base_dir):
|
||||
mug7 = " |_______|"
|
||||
mug8 = "=== CAFE Vows Runner ==="
|
||||
|
||||
print "\n{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}".format(
|
||||
print("\n{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}".format(
|
||||
mug0,
|
||||
mug1,
|
||||
mug2,
|
||||
@@ -102,11 +102,11 @@ def print_mug(base_dir):
|
||||
mug5,
|
||||
mug6,
|
||||
mug7,
|
||||
mug8)
|
||||
mug8))
|
||||
|
||||
print "-" * len(brew)
|
||||
print brew
|
||||
print "-" * len(brew)
|
||||
print("-" * len(brew))
|
||||
print(brew)
|
||||
print("-" * len(brew))
|
||||
|
||||
if __name__ == '__main__':
|
||||
entry_point()
|
||||
|
||||
@@ -16,7 +16,8 @@ limitations under the License.
|
||||
|
||||
import collections
|
||||
import inspect
|
||||
import itertools
|
||||
import six
|
||||
from six.moves import zip_longest
|
||||
|
||||
from types import FunctionType
|
||||
from unittest import TestCase
|
||||
@@ -91,7 +92,8 @@ def DataDrivenFixture(cls):
|
||||
|
||||
# Create a new test from the old test
|
||||
new_test = FunctionType(
|
||||
original_test.func_code, original_test.func_globals,
|
||||
six.get_function_code(original_test),
|
||||
six.get_function_globals(original_test),
|
||||
name=new_test_name)
|
||||
|
||||
# Copy over any other attributes the original test had (mainly to
|
||||
@@ -108,7 +110,7 @@ def DataDrivenFixture(cls):
|
||||
|
||||
# Make sure we take into account required arguments
|
||||
kwargs = dict(
|
||||
itertools.izip_longest(
|
||||
zip_longest(
|
||||
args[::-1], list(defaults or ())[::-1], fillvalue=None))
|
||||
|
||||
kwargs.update(dataset.data)
|
||||
@@ -153,7 +155,7 @@ class memoized(object):
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
self.__name__ = func.func_name
|
||||
self.__name__ = func.__name__
|
||||
|
||||
def __call__(self, *args):
|
||||
log_name = "{0}.{1}".format(
|
||||
|
||||
@@ -19,7 +19,7 @@ class SummarizeResults(object):
|
||||
|
||||
def __init__(self, result_dict, master_testsuite,
|
||||
execution_time):
|
||||
for keys, values in result_dict.items():
|
||||
for keys, values in list(result_dict.items()):
|
||||
setattr(self, keys, values)
|
||||
self.master_testsuite = master_testsuite
|
||||
self.execution_time = execution_time
|
||||
|
||||
@@ -45,7 +45,7 @@ def tree(directory, padding, print_files=False):
|
||||
dir_token = "{0}+-".format(padding[:-1])
|
||||
dir_path = os.path.basename(os.path.abspath(directory))
|
||||
|
||||
print "{0}{1}/".format(dir_token, dir_path)
|
||||
print("{0}{1}/".format(dir_token, dir_path))
|
||||
|
||||
padding = "{0}{1}".format(padding, " ")
|
||||
|
||||
@@ -53,7 +53,7 @@ def tree(directory, padding, print_files=False):
|
||||
try:
|
||||
files = os.listdir(directory)
|
||||
except OSError:
|
||||
print "Directory: {0} Does Not Exist".format(directory)
|
||||
print("Directory: {0} Does Not Exist".format(directory))
|
||||
else:
|
||||
files = [name for name in os.listdir(directory) if
|
||||
os.path.isdir(os.path.join(directory, name))]
|
||||
@@ -68,7 +68,7 @@ def tree(directory, padding, print_files=False):
|
||||
tree(path, "".join([padding, "|"]), print_files)
|
||||
else:
|
||||
if (not file_name.endswith(".pyc") and file_name != "__init__.py"):
|
||||
print "{0}{1}".format(padding, file_name)
|
||||
print("{0}{1}".format(padding, file_name))
|
||||
|
||||
|
||||
class _WritelnDecorator(object):
|
||||
@@ -147,9 +147,9 @@ class SuiteBuilder(object):
|
||||
truth_values = []
|
||||
method_attrs = {}
|
||||
|
||||
attr_keys = attrs.keys()
|
||||
attr_keys = list(attrs.keys())
|
||||
method_attrs = method.__dict__[TAGS_DECORATOR_ATTR_DICT_NAME]
|
||||
method_attrs_keys = method_attrs.keys()
|
||||
method_attrs_keys = list(method_attrs.keys())
|
||||
|
||||
for attr_key in attr_keys:
|
||||
if attr_key in method_attrs_keys:
|
||||
@@ -327,23 +327,23 @@ class _UnittestRunnerCLI(object):
|
||||
product))
|
||||
|
||||
def _print_test_tree():
|
||||
print "\n<[TEST REPO]>\n"
|
||||
print("\n<[TEST REPO]>\n")
|
||||
tree(test_dir, " ", print_files=True)
|
||||
|
||||
def _print_config_tree():
|
||||
print "\n<[CONFIGS]>\n"
|
||||
print("\n<[CONFIGS]>\n")
|
||||
tree(product_config_dir, " ", print_files=True)
|
||||
|
||||
def _print_product_tree():
|
||||
print "\n<[PRODUCTS]>\n"
|
||||
print("\n<[PRODUCTS]>\n")
|
||||
tree(test_env_mgr.test_repo_path, " ", print_files=False)
|
||||
|
||||
def _print_product_list():
|
||||
print "\n<[PRODUCTS]>\n"
|
||||
print "+-{0}".format(product_config_dir)
|
||||
print "\n".join(
|
||||
print("\n<[PRODUCTS]>\n")
|
||||
print("+-{0}".format(product_config_dir))
|
||||
print("\n".join(
|
||||
[" +-{0}/".format(dirname) for dirname in os.listdir(
|
||||
product_config_dir)])
|
||||
product_config_dir)]))
|
||||
|
||||
# If no values passed, print a default
|
||||
if not values:
|
||||
@@ -385,7 +385,7 @@ class _UnittestRunnerCLI(object):
|
||||
|
||||
test_env = TestEnvManager(namespace.product or "", values)
|
||||
if not os.path.exists(test_env.test_config_file_path):
|
||||
print (
|
||||
print(
|
||||
"cafe-runner: error: config file at {0} does not "
|
||||
"exist".format(test_env.test_config_file_path))
|
||||
exit(1)
|
||||
@@ -411,7 +411,7 @@ class _UnittestRunnerCLI(object):
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
if not os.path.exists(values):
|
||||
print (
|
||||
print(
|
||||
"cafe-runner: error: data-directory '{0}' does not "
|
||||
"exist".format(values))
|
||||
exit(1)
|
||||
@@ -430,8 +430,8 @@ class _UnittestRunnerCLI(object):
|
||||
"cafe-runner: error: {0} is not a valid argument for "
|
||||
"-v/--verbose".format(values))
|
||||
if msg:
|
||||
print parser.usage
|
||||
print msg
|
||||
print(parser.usage)
|
||||
print(msg)
|
||||
exit(1)
|
||||
|
||||
os.environ["VERBOSE"] = "true" if values == 3 else "false"
|
||||
@@ -653,7 +653,7 @@ class _UnittestRunnerCLI(object):
|
||||
# Special case for when product or config is missing and --list
|
||||
# wasn't called
|
||||
if args.product is None or args.config is None:
|
||||
print argparser.usage
|
||||
print(argparser.usage)
|
||||
print (
|
||||
"cafe-runner: error: You must supply both a product and a "
|
||||
"config to run tests")
|
||||
@@ -661,7 +661,7 @@ class _UnittestRunnerCLI(object):
|
||||
|
||||
if (args.result or args.result_directory) and (
|
||||
args.result is None or args.result_directory is None):
|
||||
print argparser.usage
|
||||
print(argparser.usage)
|
||||
print (
|
||||
"cafe-runner: error: You must supply both a --result and a "
|
||||
"--result-directory to print out json or xml formatted "
|
||||
@@ -691,7 +691,7 @@ class UnittestRunner(object):
|
||||
|
||||
@staticmethod
|
||||
def print_mug_and_paths(test_env):
|
||||
print """
|
||||
print("""
|
||||
( (
|
||||
) )
|
||||
.........
|
||||
@@ -700,16 +700,16 @@ class UnittestRunner(object):
|
||||
| :-) |_| |
|
||||
| |___|
|
||||
|_______|
|
||||
=== CAFE Runner ==="""
|
||||
print "=" * 150
|
||||
print "Percolated Configuration"
|
||||
print "-" * 150
|
||||
print "BREWING FROM: ....: {0}".format(test_env.test_repo_path)
|
||||
print "ENGINE CONFIG FILE: {0}".format(test_env.engine_config_path)
|
||||
print "TEST CONFIG FILE..: {0}".format(test_env.test_config_file_path)
|
||||
print "DATA DIRECTORY....: {0}".format(test_env.test_data_directory)
|
||||
print "LOG PATH..........: {0}".format(test_env.test_log_dir)
|
||||
print "=" * 150
|
||||
=== CAFE Runner ===""")
|
||||
print("=" * 150)
|
||||
print("Percolated Configuration")
|
||||
print("-" * 150)
|
||||
print("BREWING FROM: ....: {0}".format(test_env.test_repo_path))
|
||||
print("ENGINE CONFIG FILE: {0}".format(test_env.engine_config_path))
|
||||
print("TEST CONFIG FILE..: {0}".format(test_env.test_config_file_path))
|
||||
print("DATA DIRECTORY....: {0}".format(test_env.test_data_directory))
|
||||
print("LOG PATH..........: {0}".format(test_env.test_log_dir))
|
||||
print("=" * 150)
|
||||
|
||||
@staticmethod
|
||||
def execute_test(runner, test_id, test, results):
|
||||
@@ -732,24 +732,24 @@ class UnittestRunner(object):
|
||||
|
||||
@staticmethod
|
||||
def dump_results(start, finish, results):
|
||||
print "-" * 71
|
||||
print("-" * 71)
|
||||
|
||||
tests_run = 0
|
||||
errors = 0
|
||||
failures = 0
|
||||
for key, result in results.items():
|
||||
for key, result in list(results.items()):
|
||||
tests_run += result.testsRun
|
||||
errors += len(result.errors)
|
||||
failures += len(result.failures)
|
||||
|
||||
print "Ran {0} test{1} in {2:.3f}s".format(
|
||||
tests_run, "s" if tests_run != 1 else "", finish - start)
|
||||
print("Ran {0} test{1} in {2:.3f}s".format(
|
||||
tests_run, "s" if tests_run != 1 else "", finish - start))
|
||||
|
||||
if failures or errors:
|
||||
print "\nFAILED ({0}{1}{2})".format(
|
||||
print("\nFAILED ({0}{1}{2})".format(
|
||||
"Failures={0}".format(failures) if failures else "",
|
||||
" " if failures and errors else "",
|
||||
"Errors={0}".format(errors) if errors else "")
|
||||
"Errors={0}".format(errors) if errors else ""))
|
||||
|
||||
return errors, failures, tests_run
|
||||
|
||||
@@ -769,7 +769,7 @@ class UnittestRunner(object):
|
||||
if self.cl_args.dry_run:
|
||||
for suite in parallel_test_list:
|
||||
for test in suite:
|
||||
print test
|
||||
print(test)
|
||||
exit(0)
|
||||
exit_code = self.run_parallel(
|
||||
parallel_test_list, test_runner,
|
||||
@@ -780,7 +780,7 @@ class UnittestRunner(object):
|
||||
master_suite = builder.generate_suite()
|
||||
if self.cl_args.dry_run:
|
||||
for test in master_suite:
|
||||
print test
|
||||
print(test)
|
||||
exit(0)
|
||||
exit_code = self.run_serialized(
|
||||
master_suite, test_runner, result_type=self.cl_args.result,
|
||||
@@ -823,7 +823,7 @@ class UnittestRunner(object):
|
||||
|
||||
if result_type is not None:
|
||||
all_results = []
|
||||
for test_id, result in results.items():
|
||||
for test_id, result in list(results.items()):
|
||||
tests = test_mapping[test_id]
|
||||
result_parser = SummarizeResults(
|
||||
vars(result), tests, (finish - start))
|
||||
|
||||
@@ -26,7 +26,7 @@ class OpenCafeUnittestTestSuite(TestSuite):
|
||||
if tearDownClass is not None:
|
||||
try:
|
||||
tearDownClass()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if isinstance(result, _DebugResult):
|
||||
raise
|
||||
className = util.strclass(previousClass)
|
||||
@@ -65,7 +65,7 @@ class OpenCafeUnittestTestSuite(TestSuite):
|
||||
if setUpClass is not None:
|
||||
try:
|
||||
setUpClass()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if isinstance(result, _DebugResult):
|
||||
raise
|
||||
currentClass._classSetupFailed = True
|
||||
|
||||
@@ -52,7 +52,7 @@ class BaseCommandLineClient(BaseClient):
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = env_var_dict
|
||||
for key, value in self.env_var_dict.items():
|
||||
for key, value in list(self.env_var_dict.items()):
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
@@ -60,7 +60,7 @@ class BaseCommandLineClient(BaseClient):
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = self.env_var_dict.update(env_var_dict or {})
|
||||
for key, value in self.env_var_dict.items():
|
||||
for key, value in list(self.env_var_dict.items()):
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
@@ -70,7 +70,7 @@ class BaseCommandLineClient(BaseClient):
|
||||
If env_var_list is passed, attempts to unset all environment vars in
|
||||
list"""
|
||||
|
||||
env_var_list = env_var_list or self.env_var_dict.keys() or []
|
||||
env_var_list = env_var_list or list(self.env_var_dict.keys()) or []
|
||||
for key, _ in env_var_list:
|
||||
self._log.debug('unsetting {0}'.format(key))
|
||||
os.unsetenv(str(key))
|
||||
|
||||
@@ -178,7 +178,7 @@ class LinuxClient(BaseClient):
|
||||
|
||||
def is_decimal(char):
|
||||
return str.isdigit(char) or char == "."
|
||||
size = filter(is_decimal, size)
|
||||
size = list(filter(is_decimal, size))
|
||||
return float(size)
|
||||
|
||||
def get_number_of_vcpus(self):
|
||||
@@ -283,7 +283,7 @@ class LinuxClient(BaseClient):
|
||||
# Return a list of partition objects that each contains the name and
|
||||
# size of the partition in bytes and the type of the partition
|
||||
partition_types = self.get_partition_types()
|
||||
partition_names = ' '.join(partition_types.keys())
|
||||
partition_names = ' '.join(list(partition_types.keys()))
|
||||
|
||||
partition_size_output = self.ssh_client.execute_command(
|
||||
'fdisk -l %s 2>/dev/null | '
|
||||
|
||||
@@ -15,6 +15,7 @@ limitations under the License.
|
||||
"""
|
||||
|
||||
from cafe.common.reporting import cclogging
|
||||
import six
|
||||
|
||||
|
||||
class CommonToolsMixin(object):
|
||||
@@ -49,7 +50,8 @@ class CommonToolsMixin(object):
|
||||
values that are None
|
||||
"""
|
||||
|
||||
return dict((k, v) for k, v in dictionary.iteritems() if v is not None)
|
||||
return dict(
|
||||
(k, v) for k, v in six.iteritems(dictionary) if v is not None)
|
||||
|
||||
|
||||
class JSON_ToolsMixin(object):
|
||||
@@ -88,7 +90,7 @@ class XML_ToolsMixin(object):
|
||||
def _remove_xml_etree_namespace(doc, namespace):
|
||||
"""Remove namespace in the passed document in place."""
|
||||
|
||||
ns = u'{%s}' % namespace
|
||||
ns = six.u(namespace)
|
||||
nsl = len(ns)
|
||||
for elem in doc.getiterator():
|
||||
for key in elem.attrib:
|
||||
@@ -129,11 +131,11 @@ class BaseModel(object):
|
||||
def __str__(self):
|
||||
strng = '<{0} object> {1}'.format(
|
||||
type(self).__name__, self.__REPR_SEPARATOR__)
|
||||
for key in vars(self).keys():
|
||||
for key in list(vars(self).keys()):
|
||||
val = getattr(self, key)
|
||||
if isinstance(val, cclogging.logging.Logger):
|
||||
continue
|
||||
elif isinstance(val, unicode):
|
||||
elif isinstance(val, six.text_type):
|
||||
strng = '{0}{1} = {2}{3}'.format(
|
||||
strng, key, val.encode("utf-8"), self.__REPR_SEPARATOR__)
|
||||
else:
|
||||
|
||||
@@ -17,7 +17,9 @@ limitations under the License.
|
||||
import abc
|
||||
import json
|
||||
import os
|
||||
import ConfigParser
|
||||
from six.moves import configparser
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
from cafe.common.reporting import cclogging
|
||||
try:
|
||||
@@ -65,18 +67,18 @@ def _get_path_from_env(os_env_var):
|
||||
os_env_var)
|
||||
raise ConfigEnvironmentVariableError(msg)
|
||||
except Exception as exception:
|
||||
print ("Unexpected exception when attempting to access '{1}'"
|
||||
" environment variable.".format(os_env_var))
|
||||
print(
|
||||
"Unexpected exception when attempting to access '{1}'"
|
||||
" environment variable.".format(os_env_var))
|
||||
raise exception
|
||||
|
||||
# Standard format to for flat key/value data sources
|
||||
CONFIG_KEY = 'CAFE_{section_name}_{key}'
|
||||
|
||||
|
||||
@add_metaclass(abc.ABCMeta)
|
||||
class DataSource(object):
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def get(self, item_name, default=None):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -113,7 +115,7 @@ class ConfigParserDataSource(DataSource):
|
||||
self._log = cclogging.getLogger(
|
||||
cclogging.get_object_namespace(self.__class__))
|
||||
|
||||
self._data_source = ConfigParser.SafeConfigParser()
|
||||
self._data_source = configparser.SafeConfigParser()
|
||||
self._section_name = section_name
|
||||
|
||||
# Check if the path exists
|
||||
@@ -133,7 +135,7 @@ class ConfigParserDataSource(DataSource):
|
||||
|
||||
try:
|
||||
return self._data_source.get(self._section_name, item_name)
|
||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
|
||||
except (configparser.NoOptionError, configparser.NoSectionError) as e:
|
||||
if default is None:
|
||||
self._log.error(str(e))
|
||||
else:
|
||||
@@ -146,7 +148,7 @@ class ConfigParserDataSource(DataSource):
|
||||
try:
|
||||
return self._data_source.get(
|
||||
self._section_name, item_name, raw=True)
|
||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
|
||||
except (configparser.NoOptionError, configparser.NoSectionError) as e:
|
||||
if default is None:
|
||||
self._log.error(str(e))
|
||||
else:
|
||||
@@ -159,7 +161,7 @@ class ConfigParserDataSource(DataSource):
|
||||
|
||||
try:
|
||||
return self._data_source.getboolean(self._section_name, item_name)
|
||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
|
||||
except (configparser.NoOptionError, configparser.NoSectionError) as e:
|
||||
if default is None:
|
||||
self._log.error(str(e))
|
||||
else:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import sys
|
||||
import os
|
||||
import sphinx_rtd_theme
|
||||
|
||||
extensions = [
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
six
|
||||
@@ -229,18 +229,18 @@ class HTTPClient(BaseHTTPClient):
|
||||
headers = dict(self.default_headers, **(headers or {}))
|
||||
|
||||
# Override url if present in requestslib_kwargs
|
||||
if 'url' in requestslib_kwargs.keys():
|
||||
if 'url' in list(requestslib_kwargs.keys()):
|
||||
url = requestslib_kwargs.get('url', None) or url
|
||||
del requestslib_kwargs['url']
|
||||
|
||||
# Override method if present in requestslib_kwargs
|
||||
if 'method' in requestslib_kwargs.keys():
|
||||
if 'method' in list(requestslib_kwargs.keys()):
|
||||
method = requestslib_kwargs.get('method', None) or method
|
||||
del requestslib_kwargs['method']
|
||||
|
||||
# The requests lib already removes None key/value pairs, but we force
|
||||
# it here in case that behavior ever changes
|
||||
for key in requestslib_kwargs.keys():
|
||||
for key in list(requestslib_kwargs.keys()):
|
||||
if requestslib_kwargs[key] is None:
|
||||
del requestslib_kwargs[key]
|
||||
|
||||
|
||||
@@ -114,10 +114,10 @@ class MessageHandler(object):
|
||||
def sd_dict_to_syslog_str(cls, sd_dict):
|
||||
""" Converts structured data dictionary to a syslog str """
|
||||
syslog_sds = ''
|
||||
for sd_key, sd_val in sd_dict.items():
|
||||
for sd_key, sd_val in list(sd_dict.items()):
|
||||
syslog_str = '[{sd_key}'.format(sd_key=sd_key)
|
||||
|
||||
for sub_key, sub_val in sd_val.items():
|
||||
for sub_key, sub_val in list(sd_val.items()):
|
||||
syslog_str = '{orig} {key}="{value}"'.format(
|
||||
orig=syslog_str, key=sub_key, value=sub_val)
|
||||
syslog_str += ']'
|
||||
|
||||
@@ -89,14 +89,18 @@ class SSHBehavior(BaseBehavior):
|
||||
if not os.path.isfile(private_key_file_path):
|
||||
return SSHKeyResponse(error="No private key file created")
|
||||
else:
|
||||
os.chmod(private_key_file_path, 0700)
|
||||
os.chmod(private_key_file_path, 0o700)
|
||||
return SSHKeyResponse(
|
||||
public_key=pub_keyfile_path,
|
||||
private_key=private_key_file_path)
|
||||
except IOError as (errno, strerror):
|
||||
_log.error("I/O error({0}): {1}".format(
|
||||
errno, strerror))
|
||||
return SSHKeyResponse(error=strerror)
|
||||
except IOError as err:
|
||||
try:
|
||||
errno, strerror = err
|
||||
_log.error("I/O error({0}): {1}".format(
|
||||
errno, strerror))
|
||||
return SSHKeyResponse(error=strerror)
|
||||
except:
|
||||
return SSHKeyResponse(error=str(err))
|
||||
|
||||
@behavior(BaseSSHClient)
|
||||
def ping_using_remote_machine(self, ping_ip_address, count=3):
|
||||
|
||||
@@ -15,7 +15,7 @@ limitations under the License.
|
||||
"""
|
||||
|
||||
import socket
|
||||
import StringIO
|
||||
import io
|
||||
import time
|
||||
|
||||
from paramiko import AutoAddPolicy, RSAKey, ProxyCommand
|
||||
@@ -124,7 +124,7 @@ class BaseSSHClient(BaseClient):
|
||||
connect_args['look_for_keys'] = True
|
||||
|
||||
if auth_strategy == SSHAuthStrategy.KEY_STRING:
|
||||
key_file = StringIO.StringIO(key)
|
||||
key_file = io.StringIO(key)
|
||||
key = RSAKey.from_private_key(key_file)
|
||||
connect_args['pkey'] = key
|
||||
|
||||
@@ -207,7 +207,7 @@ class BaseSSHClient(BaseClient):
|
||||
height=9999999)
|
||||
if self._chan is not None:
|
||||
break
|
||||
except SSHException, msg:
|
||||
except SSHException as msg:
|
||||
retry_count = retry_count + 1
|
||||
self._log.error("Channel attempt {0} failed \n {1}".format(
|
||||
retry_count,
|
||||
@@ -476,7 +476,7 @@ class SSHClient(BaseSSHClient):
|
||||
sftp_conn = self.ssh_connection.open_sftp()
|
||||
try:
|
||||
sftp_conn.put(local_path, remote_path)
|
||||
except IOError, exception:
|
||||
except IOError as exception:
|
||||
self._log.warning("Error during file transfer: {error}".format(
|
||||
error=exception))
|
||||
return False
|
||||
@@ -499,7 +499,7 @@ class SSHClient(BaseSSHClient):
|
||||
sftp_conn = self.ssh_connection.open_sftp()
|
||||
try:
|
||||
sftp_conn.get(remote_path, local_path)
|
||||
except IOError, exception:
|
||||
except IOError as exception:
|
||||
self._log.warning("Error during file transfer: {error}".format(
|
||||
error=exception))
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user