Fix compatible issues in unit tests for python 3
Change-Id: I7c83333be77724cd1864393aed93143162fd48a2
This commit is contained in:
parent
a470fa6aec
commit
9f371ebab3
@ -294,7 +294,8 @@ def process_files(dirname, files):
|
||||
fpath = os.path.join(dirname, fname)
|
||||
with open(fpath, 'r') as fp:
|
||||
try:
|
||||
template = json.load(fp)
|
||||
data = fp.read()
|
||||
template = json.loads(data)
|
||||
except ValueError as e:
|
||||
LOG.warning("Error processing {path}, {reason}".format(
|
||||
path=fpath, reason=e))
|
||||
@ -343,7 +344,7 @@ def process_files(dirname, files):
|
||||
"files".format(path=fpath))
|
||||
|
||||
except Handled as e:
|
||||
log_skipping_dir(dirname, e.message)
|
||||
log_skipping_dir(dirname, str(e))
|
||||
node_groups = []
|
||||
clusters = []
|
||||
|
||||
|
@ -13,6 +13,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import string
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
|
||||
@ -90,17 +92,20 @@ class RemoteCommandException(SaharaException):
|
||||
|
||||
formatted_message = self.message_template % cmd
|
||||
|
||||
def to_printable(s):
|
||||
return "".join(filter(lambda x: x in string.printable, s))
|
||||
|
||||
if ret_code:
|
||||
formatted_message = '%s\nReturn code: %s' % (
|
||||
formatted_message, six.text_type(ret_code))
|
||||
|
||||
if stderr:
|
||||
formatted_message = '%s\nSTDERR:\n%s' % (
|
||||
formatted_message, stderr.decode('ascii', 'ignore'))
|
||||
formatted_message, to_printable(stderr))
|
||||
|
||||
if stdout:
|
||||
formatted_message = '%s\nSTDOUT:\n%s' % (
|
||||
formatted_message, stdout.decode('ascii', 'ignore'))
|
||||
formatted_message, to_printable(stdout))
|
||||
|
||||
super(RemoteCommandException, self).__init__(formatted_message)
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
import collections
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
|
||||
import sahara.exceptions as e
|
||||
from sahara.i18n import _
|
||||
@ -39,7 +40,7 @@ def _get_node_process_name(node_process):
|
||||
name = None
|
||||
if isinstance(node_process, np.NodeProcess):
|
||||
name = node_process.ui_name
|
||||
elif isinstance(node_process, basestring):
|
||||
elif isinstance(node_process, six.string_types):
|
||||
name = node_process
|
||||
return name
|
||||
|
||||
@ -319,8 +320,8 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
||||
|
||||
def filter_instances(self, instances, node_process=None, service=None):
|
||||
if node_process:
|
||||
return filter(
|
||||
lambda i: self.check_for_process(i, node_process), instances)
|
||||
return list(filter(
|
||||
lambda i: self.check_for_process(i, node_process), instances))
|
||||
if service:
|
||||
result = []
|
||||
for instance in instances:
|
||||
|
@ -78,7 +78,8 @@ class HadoopXML(BaseConfigurationFile):
|
||||
|
||||
def parse(self, content):
|
||||
configs = xml.parse_hadoop_xml_with_name_and_value(content)
|
||||
map(lambda i: self.add_property(i['name'], i['value']), configs)
|
||||
for cfg in configs:
|
||||
self.add_property(cfg["name"], cfg["value"])
|
||||
|
||||
def render(self):
|
||||
return xml.create_hadoop_xml(self._config_dict)
|
||||
@ -142,7 +143,7 @@ class EnvironmentConfig(BaseConfigurationFile):
|
||||
|
||||
def parse(self, content):
|
||||
for line in content.splitlines():
|
||||
line = line.strip().decode("utf-8")
|
||||
line = six.text_type(line.strip())
|
||||
match = self._regex.match(line)
|
||||
if match:
|
||||
name, value = match.groups()
|
||||
|
@ -66,9 +66,9 @@ def get_port_from_address(address):
|
||||
|
||||
def instances_with_services(instances, node_processes):
|
||||
node_processes = set(node_processes)
|
||||
return filter(
|
||||
return list(filter(
|
||||
lambda x: node_processes.intersection(
|
||||
x.node_group.node_processes), instances)
|
||||
x.node_group.node_processes), instances))
|
||||
|
||||
|
||||
def start_process_event_message(process):
|
||||
|
@ -283,7 +283,7 @@ def _construct_data_source_url(url, job_exec_id):
|
||||
def _randstr(match):
|
||||
len = int(match.group(1))
|
||||
return ''.join(random.choice(string.ascii_lowercase)
|
||||
for _ in xrange(len))
|
||||
for _ in six.moves.range(len))
|
||||
|
||||
url = url.replace("%JOB_EXEC_ID%", job_exec_id)
|
||||
|
||||
|
@ -70,7 +70,7 @@ SAMPLE_CONF_JOB_EXECUTION = {
|
||||
}
|
||||
}
|
||||
|
||||
BINARY_DATA = "vU}\x97\x1c\xdf\xa686\x08\xf2\tf\x0b\xb1}"
|
||||
BINARY_DATA = b"vU}\x97\x1c\xdf\xa686\x08\xf2\tf\x0b\xb1}"
|
||||
|
||||
SAMPLE_JOB_BINARY_INTERNAL = {
|
||||
"tenant_id": "test_tenant",
|
||||
|
@ -221,7 +221,7 @@ class TemplateUpdateTestCase(base.ConductorManagerTestCase):
|
||||
def _write_files(self, tempdir, templates):
|
||||
files = []
|
||||
for template in templates:
|
||||
fp = tempfile.NamedTemporaryFile(suffix=".json",
|
||||
fp = tempfile.NamedTemporaryFile(suffix=".json", mode="w",
|
||||
dir=tempdir, delete=False)
|
||||
json.dump(template, fp)
|
||||
files.append(fp.name)
|
||||
@ -320,7 +320,7 @@ class TemplateUpdateTestCase(base.ConductorManagerTestCase):
|
||||
# Invalid JSON should cause all files to be skipped
|
||||
fp = tempfile.NamedTemporaryFile(suffix=".json",
|
||||
dir=tempdir, delete=False)
|
||||
fp.write("not json")
|
||||
fp.write(b"not json")
|
||||
files += [fp.name]
|
||||
fp.close()
|
||||
ng_templates, cl_templates = template_api.process_files(tempdir, files)
|
||||
|
@ -78,7 +78,7 @@ class TestHandler(b.SaharaTestCase):
|
||||
self.assertIsNotNone(np_dict)
|
||||
self.assertIsInstance(np_dict, dict)
|
||||
for k, v in six.iteritems(np_dict):
|
||||
self.assertIsInstance(k, basestring)
|
||||
self.assertIsInstance(k, six.string_types)
|
||||
self.assertIsInstance(v, list)
|
||||
self.assertNotEqual(0, len(v))
|
||||
|
||||
@ -101,7 +101,7 @@ class TestHandler(b.SaharaTestCase):
|
||||
configs_dict = self.handler.get_configs_dict()
|
||||
self.assertIsInstance(configs_dict, dict)
|
||||
for k, v in six.iteritems(configs_dict):
|
||||
self.assertIsInstance(k, basestring)
|
||||
self.assertIsInstance(k, six.string_types)
|
||||
self.assertIsInstance(v, dict)
|
||||
|
||||
def test_get_open_ports(self):
|
||||
|
@ -117,7 +117,6 @@ class HDFSHelperTestCase(base.SaharaTestCase):
|
||||
@mock.patch(('sahara.service.edp.hdfs_helper._get_cluster_hosts_'
|
||||
'information'))
|
||||
def test_configure_cluster_for_hdfs(self, mock_helper, mock_get, mock_six):
|
||||
source = mock.MagicMock(url='www.host.ru')
|
||||
inst = mock.MagicMock()
|
||||
inst.remote = mock.MagicMock()
|
||||
mock_six.return_value = 111
|
||||
@ -127,7 +126,7 @@ class HDFSHelperTestCase(base.SaharaTestCase):
|
||||
'/etc/hosts && rm -f /tmp/etc-hosts.111 '
|
||||
'/tmp/etc-hosts-update.111')
|
||||
mock_get.return_value = [inst]
|
||||
helper.configure_cluster_for_hdfs(self.cluster, source)
|
||||
helper.configure_cluster_for_hdfs(self.cluster, "www.host.ru")
|
||||
inst.remote.assert_has_calls(
|
||||
[mock.call(), mock.call().__enter__(),
|
||||
mock.call().__enter__().write_file_to(str1, mock_helper()),
|
||||
|
@ -144,7 +144,7 @@ class TopologyTestCase(base.SaharaTestCase):
|
||||
def _read_swift_topology(self, content):
|
||||
temp_file = tempfile.NamedTemporaryFile()
|
||||
try:
|
||||
temp_file.write(content)
|
||||
temp_file.write(str.encode(content))
|
||||
temp_file.flush()
|
||||
self.override_config("swift_topology_file", temp_file.name)
|
||||
return th._read_swift_topology()
|
||||
|
@ -148,15 +148,17 @@ class ExecuteWithRetryTest(testbase.SaharaTestCase):
|
||||
|
||||
def test_neutronclient_calls_without_retry(self):
|
||||
# check that following errors will not be retried
|
||||
self._check_error_without_retry(neutron_exc.BadRequest, 400)
|
||||
self._check_error_without_retry(neutron_exc.Forbidden, 403)
|
||||
self._check_error_without_retry(neutron_exc.NotFound, 404)
|
||||
self._check_error_without_retry(neutron_exc.Conflict, 409)
|
||||
# neutron exception expects string in constructor
|
||||
self._check_error_without_retry(neutron_exc.BadRequest, "400")
|
||||
self._check_error_without_retry(neutron_exc.Forbidden, "403")
|
||||
self._check_error_without_retry(neutron_exc.NotFound, "404")
|
||||
self._check_error_without_retry(neutron_exc.Conflict, "409")
|
||||
|
||||
def test_neutronclient_calls_with_retry(self):
|
||||
# check that following errors will be retried
|
||||
self._check_error_with_retry(neutron_exc.InternalServerError, 500)
|
||||
self._check_error_with_retry(neutron_exc.ServiceUnavailable, 503)
|
||||
# neutron exception expects string in constructor
|
||||
self._check_error_with_retry(neutron_exc.InternalServerError, "500")
|
||||
self._check_error_with_retry(neutron_exc.ServiceUnavailable, "503")
|
||||
|
||||
def test_heatclient_calls_without_retry(self):
|
||||
# check that following errors will not be retried
|
||||
|
@ -115,6 +115,6 @@ def get_builtin_binaries(job, configs):
|
||||
if is_adapt_for_oozie_enabled(configs):
|
||||
path = 'service/edp/resources/edp-main-wrapper.jar'
|
||||
name = 'builtin-%s.jar' % six.text_type(uuid.uuid4())
|
||||
return [{'raw': files.get_file_text(path),
|
||||
return [{'raw': files.get_file_binary(path),
|
||||
'name': name}]
|
||||
return []
|
||||
|
@ -22,3 +22,9 @@ def get_file_text(file_name):
|
||||
full_name = pkg.resource_filename(
|
||||
version.version_info.package, file_name)
|
||||
return open(full_name).read()
|
||||
|
||||
|
||||
def get_file_binary(file_name):
|
||||
full_name = pkg.resource_filename(
|
||||
version.version_info.package, file_name)
|
||||
return open(full_name, "rb").read()
|
||||
|
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from novaclient.v2 import images
|
||||
import six
|
||||
|
||||
from sahara import exceptions as exc
|
||||
|
||||
@ -32,7 +33,7 @@ def _iter_tags(meta):
|
||||
def _ensure_tags(tags):
|
||||
if not tags:
|
||||
return []
|
||||
return [tags] if type(tags) in [str, unicode] else tags
|
||||
return [tags] if isinstance(tags, six.string_types) else tags
|
||||
|
||||
|
||||
class SaharaImage(images.Image):
|
||||
|
@ -121,9 +121,8 @@ def add_property_to_configuration(doc, name, value):
|
||||
|
||||
def add_properties_to_configuration(doc, parent_for_conf, configs):
|
||||
get_and_create_if_not_exist(doc, parent_for_conf, 'configuration')
|
||||
for n in sorted(configs):
|
||||
if n:
|
||||
add_property_to_configuration(doc, n, configs[n])
|
||||
for n in sorted(filter(lambda x: x, configs)):
|
||||
add_property_to_configuration(doc, n, configs[n])
|
||||
|
||||
|
||||
def add_child(doc, parent, tag_to_add):
|
||||
@ -156,7 +155,7 @@ def add_text_element_to_element(doc, parent, element, value):
|
||||
|
||||
|
||||
def add_equal_separated_dict(doc, parent_tag, each_elem_tag, value):
|
||||
for k in sorted(value):
|
||||
for k in sorted(filter(lambda x: x, value)):
|
||||
if k:
|
||||
add_text_element_to_tag(doc, parent_tag, each_elem_tag,
|
||||
"%s=%s" % (k, value[k]))
|
||||
|
Loading…
Reference in New Issue
Block a user