Fix Python 3 issues in nova.db.sqlalchemy

* Replace dict(obj.iteritems()) with dict(obj) where obj is a dictionary
  or an oslo.db object
* Replace obj.iteritems() with obj.items() where obj is a dictionary:
  obj.items() works on Python 2 and Python 3.
* Replace filter() and map() with list-comprehension using if when a list is
  expected. On Python 3, filter() and map() return an iterator.
* Replace obj.keys() with list(obj.keys()) when a list is expected.
  Replace obj.keys()[0] with list(obj.keys())[0]. On Python 3,
  dict.keys() now returns an iterator.
* Replace (int, long) with six.integer_types
* Get the name of a function using the __name__ attribute, instead of
  the func_name attribute. The func_name attribute was removed
  in Python 3.
* InstanceTypeTestCase: use dict.update() to combine dictionaries
  instead of using dict.items()+reduce()+dict()
* matchers.py: replace StringIO.StringIO with six.StringIO.
* tox.ini: add nova.tests.unit.db.test_db_api to Python 3.4

Blueprint nova-python3
Change-Id: Iae4f6d971818d5b38e838a83753a0707e954bb04
This commit is contained in:
Victor Stinner 2015-06-24 15:42:51 +02:00 committed by Matt Riedemann
parent 9cb24b3168
commit 2daf8c38ad
9 changed files with 79 additions and 73 deletions

View File

@ -393,7 +393,7 @@ class Constraint(object):
self.conditions = conditions self.conditions = conditions
def apply(self, model, query): def apply(self, model, query):
for key, condition in self.conditions.iteritems(): for key, condition in self.conditions.items():
for clause in condition.clauses(getattr(model, key)): for clause in condition.clauses(getattr(model, key)):
query = query.filter(clause) query = query.filter(clause)
return query return query
@ -697,7 +697,7 @@ def compute_node_statistics(context):
@require_admin_context @require_admin_context
def certificate_create(context, values): def certificate_create(context, values):
certificate_ref = models.Certificate() certificate_ref = models.Certificate()
for (key, value) in values.iteritems(): for (key, value) in values.items():
certificate_ref[key] = value certificate_ref[key] = value
certificate_ref.save() certificate_ref.save()
return certificate_ref return certificate_ref
@ -847,7 +847,7 @@ def floating_ip_bulk_destroy(context, ips):
# Delete the quotas, if needed. # Delete the quotas, if needed.
# Quota update happens in a separate transaction, so previous must have # Quota update happens in a separate transaction, so previous must have
# been committed first. # been committed first.
for project_id, count in project_id_to_quota_count.iteritems(): for project_id, count in project_id_to_quota_count.items():
try: try:
reservations = quota.QUOTAS.reserve(context, reservations = quota.QUOTAS.reserve(context,
project_id=project_id, project_id=project_id,
@ -1544,7 +1544,7 @@ def virtual_interface_get_all(context):
def _metadata_refs(metadata_dict, meta_class): def _metadata_refs(metadata_dict, meta_class):
metadata_refs = [] metadata_refs = []
if metadata_dict: if metadata_dict:
for k, v in metadata_dict.iteritems(): for k, v in metadata_dict.items():
metadata_ref = meta_class() metadata_ref = meta_class()
metadata_ref['key'] = k metadata_ref['key'] = k
metadata_ref['value'] = v metadata_ref['value'] = v
@ -1817,7 +1817,7 @@ def _instances_fill_metadata(context, instances,
filled_instances = [] filled_instances = []
for inst in instances: for inst in instances:
inst = dict(inst.iteritems()) inst = dict(inst)
inst['system_metadata'] = sys_meta[inst['uuid']] inst['system_metadata'] = sys_meta[inst['uuid']]
inst['metadata'] = meta[inst['uuid']] inst['metadata'] = meta[inst['uuid']]
if 'pci_devices' in manual_joins: if 'pci_devices' in manual_joins:
@ -2216,7 +2216,7 @@ def _exact_instance_filter(query, filters, legal_keys):
query = query.filter(column_attr.any(value=v)) query = query.filter(column_attr.any(value=v))
else: else:
for k, v in value.iteritems(): for k, v in value.items():
query = query.filter(column_attr.any(key=k)) query = query.filter(column_attr.any(key=k))
query = query.filter(column_attr.any(value=v)) query = query.filter(column_attr.any(value=v))
elif isinstance(value, (list, tuple, set, frozenset)): elif isinstance(value, (list, tuple, set, frozenset)):
@ -2385,8 +2385,7 @@ def instance_get_all_by_host_and_node(context, host, node,
manual_joins = [] manual_joins = []
else: else:
candidates = ['system_metadata', 'metadata'] candidates = ['system_metadata', 'metadata']
manual_joins = filter(lambda x: x in candidates, manual_joins = [x for x in columns_to_join if x in candidates]
columns_to_join)
columns_to_join = list(set(columns_to_join) - set(candidates)) columns_to_join = list(set(columns_to_join) - set(candidates))
return _instances_fill_metadata(context, return _instances_fill_metadata(context,
_instance_get_all_query( _instance_get_all_query(
@ -2488,7 +2487,7 @@ def _instance_metadata_update_in_place(context, instance, metadata_type, model,
for condemned in to_delete: for condemned in to_delete:
condemned.soft_delete(session=session) condemned.soft_delete(session=session)
for key, value in metadata.iteritems(): for key, value in metadata.items():
newitem = model() newitem = model()
newitem.update({'key': key, 'value': value, newitem.update({'key': key, 'value': value,
'instance_uuid': instance['uuid']}) 'instance_uuid': instance['uuid']})
@ -4575,7 +4574,7 @@ def flavor_create(context, values, projects=None):
specs = values.get('extra_specs') specs = values.get('extra_specs')
specs_refs = [] specs_refs = []
if specs: if specs:
for k, v in specs.iteritems(): for k, v in specs.items():
specs_ref = models.InstanceTypeExtraSpecs() specs_ref = models.InstanceTypeExtraSpecs()
specs_ref['key'] = k specs_ref['key'] = k
specs_ref['value'] = v specs_ref['value'] = v
@ -4860,7 +4859,7 @@ def flavor_extra_specs_update_or_create(context, flavor_id, specs,
existing_keys.add(key) existing_keys.add(key)
spec_ref.update({"value": specs[key]}) spec_ref.update({"value": specs[key]})
for key, value in specs.iteritems(): for key, value in specs.items():
if key in existing_keys: if key in existing_keys:
continue continue
spec_ref = models.InstanceTypeExtraSpecs() spec_ref = models.InstanceTypeExtraSpecs()
@ -5553,7 +5552,7 @@ def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False,
already_existing_keys.add(key) already_existing_keys.add(key)
new_entries = [] new_entries = []
for key, value in metadata.iteritems(): for key, value in metadata.items():
if key in already_existing_keys: if key in already_existing_keys:
continue continue
new_entries.append({"key": key, new_entries.append({"key": key,
@ -5620,7 +5619,7 @@ def instance_fault_create(context, values):
fault_ref = models.InstanceFault() fault_ref = models.InstanceFault()
fault_ref.update(values) fault_ref.update(values)
fault_ref.save() fault_ref.save()
return dict(fault_ref.iteritems()) return dict(fault_ref)
def instance_fault_get_by_instance_uuids(context, instance_uuids): def instance_fault_get_by_instance_uuids(context, instance_uuids):

View File

@ -50,7 +50,7 @@ def InetSmall():
def _create_shadow_tables(migrate_engine): def _create_shadow_tables(migrate_engine):
meta = MetaData(migrate_engine) meta = MetaData(migrate_engine)
meta.reflect(migrate_engine) meta.reflect(migrate_engine)
table_names = meta.tables.keys() table_names = list(meta.tables.keys())
meta.bind = migrate_engine meta.bind = migrate_engine
@ -96,7 +96,7 @@ def _populate_instance_types(instance_types_table):
try: try:
i = instance_types_table.insert() i = instance_types_table.insert()
for name, values in default_inst_types.iteritems(): for name, values in default_inst_types.items():
i.execute({'name': name, 'memory_mb': values["mem"], i.execute({'name': name, 'memory_mb': values["mem"],
'vcpus': values["vcpus"], 'deleted': 0, 'vcpus': values["vcpus"], 'deleted': 0,
'root_gb': values["root_gb"], 'root_gb': values["root_gb"],

View File

@ -63,7 +63,7 @@ def check_shadow_table(migrate_engine, table_name):
columns = {c.name: c for c in table.columns} columns = {c.name: c for c in table.columns}
shadow_columns = {c.name: c for c in shadow_table.columns} shadow_columns = {c.name: c for c in shadow_table.columns}
for name, column in columns.iteritems(): for name, column in columns.items():
if name not in shadow_columns: if name not in shadow_columns:
raise exception.NovaException( raise exception.NovaException(
_("Missing column %(table)s.%(column)s in shadow table") _("Missing column %(table)s.%(column)s in shadow table")
@ -78,7 +78,7 @@ def check_shadow_table(migrate_engine, table_name):
'c_type': column.type, 'c_type': column.type,
'shadow_c_type': shadow_column.type}) 'shadow_c_type': shadow_column.type})
for name, column in shadow_columns.iteritems(): for name, column in shadow_columns.items():
if name not in columns: if name not in columns:
raise exception.NovaException( raise exception.NovaException(
_("Extra column %(table)s.%(column)s in shadow table") _("Extra column %(table)s.%(column)s in shadow table")

View File

@ -524,7 +524,7 @@ class NovaObject(object):
@property @property
def obj_fields(self): def obj_fields(self):
return self.fields.keys() + self.obj_extra_fields return list(self.fields.keys()) + self.obj_extra_fields
# NOTE(danms): This is nova-specific, so don't copy this to o.vo # NOTE(danms): This is nova-specific, so don't copy this to o.vo
@contextlib.contextmanager @contextlib.contextmanager

View File

@ -83,8 +83,7 @@ def _expected_cols(expected_attrs):
if field in expected_attrs] if field in expected_attrs]
if complex_cols: if complex_cols:
simple_cols.append('extra') simple_cols.append('extra')
simple_cols = filter(lambda x: x not in _INSTANCE_EXTRA_FIELDS, simple_cols = [x for x in simple_cols if x not in _INSTANCE_EXTRA_FIELDS]
simple_cols)
if (any([flavor in expected_attrs if (any([flavor in expected_attrs
for flavor in ['flavor', 'old_flavor', 'new_flavor']]) and for flavor in ['flavor', 'old_flavor', 'new_flavor']]) and
'system_metadata' not in simple_cols): 'system_metadata' not in simple_cols):

View File

@ -526,7 +526,7 @@ class DbQuotaDriver(object):
# Set up the reservation expiration # Set up the reservation expiration
if expire is None: if expire is None:
expire = CONF.reservation_expire expire = CONF.reservation_expire
if isinstance(expire, (int, long)): if isinstance(expire, six.integer_types):
expire = datetime.timedelta(seconds=expire) expire = datetime.timedelta(seconds=expire)
if isinstance(expire, datetime.timedelta): if isinstance(expire, datetime.timedelta):
expire = timeutils.utcnow() + expire expire = timeutils.utcnow() + expire

View File

@ -20,7 +20,6 @@
import copy import copy
import datetime import datetime
import types
import uuid as stdlib_uuid import uuid as stdlib_uuid
import iso8601 import iso8601
@ -170,7 +169,7 @@ class DecoratorTestCase(test.TestCase):
decorated_func = decorator(test_func) decorated_func = decorator(test_func)
self.assertEqual(test_func.func_name, decorated_func.func_name) self.assertEqual(test_func.__name__, decorated_func.__name__)
self.assertEqual(test_func.__doc__, decorated_func.__doc__) self.assertEqual(test_func.__doc__, decorated_func.__doc__)
self.assertEqual(test_func.__module__, decorated_func.__module__) self.assertEqual(test_func.__module__, decorated_func.__module__)
@ -774,7 +773,7 @@ class AggregateDBApiTestCase(test.TestCase):
ctxt = context.get_admin_context() ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt) result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata() metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0] key = list(metadata.keys())[0]
new_metadata = {key: 'foo', new_metadata = {key: 'foo',
'fake_new_key': 'fake_new_value'} 'fake_new_key': 'fake_new_value'}
metadata.update(new_metadata) metadata.update(new_metadata)
@ -805,7 +804,7 @@ class AggregateDBApiTestCase(test.TestCase):
ctxt = context.get_admin_context() ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt) result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata() metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0] key = list(metadata.keys())[0]
db.aggregate_metadata_delete(ctxt, result['id'], key) db.aggregate_metadata_delete(ctxt, result['id'], key)
new_metadata = {key: 'foo'} new_metadata = {key: 'foo'}
db.aggregate_metadata_add(ctxt, result['id'], new_metadata) db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
@ -818,9 +817,10 @@ class AggregateDBApiTestCase(test.TestCase):
result = _create_aggregate(context=ctxt, metadata=None) result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata() metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata) db.aggregate_metadata_add(ctxt, result['id'], metadata)
db.aggregate_metadata_delete(ctxt, result['id'], metadata.keys()[0]) db.aggregate_metadata_delete(ctxt, result['id'],
list(metadata.keys())[0])
expected = db.aggregate_metadata_get(ctxt, result['id']) expected = db.aggregate_metadata_get(ctxt, result['id'])
del metadata[metadata.keys()[0]] del metadata[list(metadata.keys())[0]]
self.assertThat(metadata, matchers.DictMatches(expected)) self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self): def test_aggregate_remove_availability_zone(self):
@ -1029,7 +1029,7 @@ class SqlAlchemyDbApiTestCase(DbTestCase):
self.create_instance_with_args(host='host2') self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1') result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result)) self.assertEqual(2, len(result))
self.assertEqual(types.UnicodeType, type(result[0])) self.assertEqual(six.text_type, type(result[0]))
def test_instance_get_active_by_window_joined(self): def test_instance_get_active_by_window_joined(self):
now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701) now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701)
@ -1369,7 +1369,11 @@ class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys): def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None: if ignored_keys is None:
ignored_keys = [] ignored_keys = []
return {k: v for k, v in obj.iteritems() if isinstance(obj, dict):
obj_items = obj.items()
else:
obj_items = obj.iteritems()
return {k: v for k, v in obj_items
if k not in ignored_keys} if k not in ignored_keys}
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None): def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
@ -1380,7 +1384,7 @@ class ModelsObjectComparatorMixin(object):
len(obj2), len(obj2),
"Keys mismatch: %s" % "Keys mismatch: %s" %
str(set(obj1.keys()) ^ set(obj2.keys()))) str(set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.iteritems(): for key, value in obj1.items():
self.assertEqual(value, obj2[key]) self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None): def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
@ -1393,7 +1397,7 @@ class ModelsObjectComparatorMixin(object):
def _assertEqualOrderedListOfObjects(self, objs1, objs2, def _assertEqualOrderedListOfObjects(self, objs1, objs2,
ignored_keys=None): ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys) obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
conv = lambda obj: map(obj_to_dict, obj) conv = lambda objs: [obj_to_dict(obj) for obj in objs]
self.assertEqual(conv(objs1), conv(objs2)) self.assertEqual(conv(objs1), conv(objs2))
@ -1579,15 +1583,15 @@ class SecurityGroupRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
rules_ids = [security_group_rule['id'], security_group_rule1['id']] rules_ids = [security_group_rule['id'], security_group_rule1['id']]
for rule in found_rules: for rule in found_rules:
if columns is None: if columns is None:
self.assertIn('grantee_group', dict(rule.iteritems())) self.assertIn('grantee_group', dict(rule))
self.assertIn('instances', self.assertIn('instances',
dict(rule.grantee_group.iteritems())) dict(rule.grantee_group))
self.assertIn( self.assertIn(
'system_metadata', 'system_metadata',
dict(rule.grantee_group.instances[0].iteritems())) dict(rule.grantee_group.instances[0]))
self.assertIn(rule['id'], rules_ids) self.assertIn(rule['id'], rules_ids)
else: else:
self.assertNotIn('grantee_group', dict(rule.iteritems())) self.assertNotIn('grantee_group', dict(rule))
def test_security_group_rule_get_by_security_group(self): def test_security_group_rule_get_by_security_group(self):
self._test_security_group_rule_get_by_security_group() self._test_security_group_rule_get_by_security_group()
@ -1675,7 +1679,7 @@ class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
def test_security_group_create(self): def test_security_group_create(self):
security_group = self._create_security_group({}) security_group = self._create_security_group({})
self.assertIsNotNone(security_group['id']) self.assertIsNotNone(security_group['id'])
for key, value in self._get_base_values().iteritems(): for key, value in self._get_base_values().items():
self.assertEqual(value, security_group[key]) self.assertEqual(value, security_group[key])
def test_security_group_destroy(self): def test_security_group_destroy(self):
@ -1708,7 +1712,7 @@ class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
self.ctxt, secgroup['id'], self.ctxt, secgroup['id'],
columns_to_join=['instances.system_metadata']) columns_to_join=['instances.system_metadata'])
inst = secgroup.instances[0] inst = secgroup.instances[0]
self.assertIn('system_metadata', dict(inst.iteritems()).keys()) self.assertIn('system_metadata', dict(inst).keys())
def test_security_group_get_no_instances(self): def test_security_group_get_no_instances(self):
instance = db.instance_create(self.ctxt, {}) instance = db.instance_create(self.ctxt, {})
@ -1863,7 +1867,7 @@ class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
security_group['id'], security_group['id'],
new_values, new_values,
columns_to_join=['rules.grantee_group']) columns_to_join=['rules.grantee_group'])
for key, value in new_values.iteritems(): for key, value in new_values.items():
self.assertEqual(updated_group[key], value) self.assertEqual(updated_group[key], value)
self.assertEqual(updated_group['rules'], []) self.assertEqual(updated_group['rules'], [])
@ -2817,7 +2821,7 @@ class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def test_service_create(self): def test_service_create(self):
service = self._create_service({}) service = self._create_service({})
self.assertIsNotNone(service['id']) self.assertIsNotNone(service['id'])
for key, value in self._get_base_values().iteritems(): for key, value in self._get_base_values().items():
self.assertEqual(value, service[key]) self.assertEqual(value, service[key])
def test_service_create_disabled(self): def test_service_create_disabled(self):
@ -2846,7 +2850,7 @@ class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
} }
db.service_update(self.ctxt, service['id'], new_values) db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id']) updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.iteritems(): for key, value in new_values.items():
self.assertEqual(value, updated_service[key]) self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self): def test_service_update_not_found_exception(self):
@ -3534,9 +3538,9 @@ class InstanceTypeTestCase(BaseInstanceTypeTestCase):
filters = {} filters = {}
expected_it = flavors expected_it = flavors
for name, value in filters.iteritems(): for name, value in filters.items():
filt = lambda it: lambda_filters[name](it, value) filt = lambda it: lambda_filters[name](it, value)
expected_it = filter(filt, expected_it) expected_it = list(filter(filt, expected_it))
real_it = db.flavor_get_all(self.ctxt, filters=filters) real_it = db.flavor_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(expected_it, real_it) self._assertEqualListsOfObjects(expected_it, real_it)
@ -3559,9 +3563,9 @@ class InstanceTypeTestCase(BaseInstanceTypeTestCase):
for root in root_filts: for root in root_filts:
for disabled in disabled_filts: for disabled in disabled_filts:
for is_public in is_public_filts: for is_public in is_public_filts:
filts = [f.items() for f in filts = {}
[mem, root, disabled, is_public]] for f in (mem, root, disabled, is_public):
filts = dict(reduce(lambda x, y: x + y, filts, [])) filts.update(f)
assert_multi_filter_flavor_get(filts) assert_multi_filter_flavor_get(filts)
def test_flavor_get_all_limit_sort(self): def test_flavor_get_all_limit_sort(self):
@ -3749,7 +3753,7 @@ class InstanceTypeExtraSpecsTestCase(BaseInstanceTypeTestCase):
def test_flavor_extra_specs_delete(self): def test_flavor_extra_specs_delete(self):
for it in self.flavors: for it in self.flavors:
specs = it['extra_specs'] specs = it['extra_specs']
key = specs.keys()[0] key = list(specs.keys())[0]
del specs[key] del specs[key]
db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key) db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key)
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid']) real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
@ -3958,15 +3962,15 @@ class FixedIPTestCase(BaseInstanceTypeTestCase):
'host3': ['1.1.1.6'] 'host3': ['1.1.1.6']
} }
for host, ips in host_ips.iteritems(): for host, ips in host_ips.items():
for ip in ips: for ip in ips:
instance_uuid = self._create_instance(host=host) instance_uuid = self._create_instance(host=host)
db.fixed_ip_create(self.ctxt, {'address': ip}) db.fixed_ip_create(self.ctxt, {'address': ip})
db.fixed_ip_associate(self.ctxt, ip, instance_uuid) db.fixed_ip_associate(self.ctxt, ip, instance_uuid)
for host, ips in host_ips.iteritems(): for host, ips in host_ips.items():
ips_on_host = map(lambda x: x['address'], ips_on_host = [x['address']
db.fixed_ip_get_by_host(self.ctxt, host)) for x in db.fixed_ip_get_by_host(self.ctxt, host)]
self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips) self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips)
def test_fixed_ip_get_by_network_host_not_found_exception(self): def test_fixed_ip_get_by_network_host_not_found_exception(self):
@ -4572,13 +4576,13 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
'pool2': ['2.2.2.2'], 'pool2': ['2.2.2.2'],
'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5'] 'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5']
} }
for pool, addresses in pools.iteritems(): for pool, addresses in pools.items():
for address in addresses: for address in addresses:
vals = {'pool': pool, 'address': address, 'project_id': None} vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals) self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id'] project_id = self._get_base_values()['project_id']
for pool, addresses in pools.iteritems(): for pool, addresses in pools.items():
alloc_addrs = [] alloc_addrs = []
for i in addresses: for i in addresses:
float_addr = db.floating_ip_allocate_address(self.ctxt, float_addr = db.floating_ip_allocate_address(self.ctxt,
@ -4676,7 +4680,7 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
def test_floating_ip_bulk_create(self): def test_floating_ip_bulk_create(self):
expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4'] expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
result = db.floating_ip_bulk_create(self.ctxt, result = db.floating_ip_bulk_create(self.ctxt,
map(lambda x: {'address': x}, expected_ips), [{'address': x} for x in expected_ips],
want_result=False) want_result=False)
self.assertIsNone(result) self.assertIsNone(result)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(), self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
@ -4686,11 +4690,13 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4'] ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
prepare_ips = lambda x: {'address': x} prepare_ips = lambda x: {'address': x}
result = db.floating_ip_bulk_create(self.ctxt, map(prepare_ips, ips)) result = db.floating_ip_bulk_create(self.ctxt,
list(map(prepare_ips, ips)))
self.assertEqual(ips, [ip.address for ip in result]) self.assertEqual(ips, [ip.address for ip in result])
self.assertRaises(exception.FloatingIpExists, self.assertRaises(exception.FloatingIpExists,
db.floating_ip_bulk_create, db.floating_ip_bulk_create,
self.ctxt, map(prepare_ips, ['1.1.1.5', '1.1.1.4']), self.ctxt,
list(map(prepare_ips, ['1.1.1.5', '1.1.1.4'])),
want_result=False) want_result=False)
self.assertRaises(exception.FloatingIpNotFoundForAddress, self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address, db.floating_ip_get_by_address,
@ -4733,7 +4739,7 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete) db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete)
expected_addresses = map(lambda x: x['address'], ips_for_non_delete) expected_addresses = [x['address'] for x in ips_for_non_delete]
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(), self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_addresses) expected_addresses)
self.assertEqual(db.quota_usage_get_all_by_project( self.assertEqual(db.quota_usage_get_all_by_project(
@ -4908,14 +4914,14 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
} }
hosts_with_float_ips = {} hosts_with_float_ips = {}
for host, addresses in hosts.iteritems(): for host, addresses in hosts.items():
hosts_with_float_ips[host] = [] hosts_with_float_ips[host] = []
for address in addresses: for address in addresses:
float_ip = self._create_floating_ip({'host': host, float_ip = self._create_floating_ip({'host': host,
'address': address}) 'address': address})
hosts_with_float_ips[host].append(float_ip) hosts_with_float_ips[host].append(float_ip)
for host, float_ips in hosts_with_float_ips.iteritems(): for host, float_ips in hosts_with_float_ips.items():
real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host) real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects(float_ips, real_float_ips, self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys="fixed_ip") ignored_keys="fixed_ip")
@ -4933,14 +4939,14 @@ class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
} }
projects_with_float_ips = {} projects_with_float_ips = {}
for project_id, addresses in projects.iteritems(): for project_id, addresses in projects.items():
projects_with_float_ips[project_id] = [] projects_with_float_ips[project_id] = []
for address in addresses: for address in addresses:
float_ip = self._create_floating_ip({'project_id': project_id, float_ip = self._create_floating_ip({'project_id': project_id,
'address': address}) 'address': address})
projects_with_float_ips[project_id].append(float_ip) projects_with_float_ips[project_id].append(float_ip)
for project_id, float_ips in projects_with_float_ips.iteritems(): for project_id, float_ips in projects_with_float_ips.items():
real_float_ips = db.floating_ip_get_all_by_project(self.ctxt, real_float_ips = db.floating_ip_get_all_by_project(self.ctxt,
project_id) project_id)
self._assertEqualListsOfObjects(float_ips, real_float_ips, self._assertEqualListsOfObjects(float_ips, real_float_ips,
@ -5452,8 +5458,8 @@ class BlockDeviceMappingTestCase(test.TestCase):
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid) bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(bdm_real[0]['destination_type'], 'moon') self.assertEqual(bdm_real[0]['destination_type'], 'moon')
# Also make sure the update call returned correct data # Also make sure the update call returned correct data
self.assertEqual(dict(bdm_real[0].iteritems()), self.assertEqual(dict(bdm_real[0]),
dict(result.iteritems())) dict(result))
def test_block_device_mapping_update_or_create(self): def test_block_device_mapping_update_or_create(self):
values = { values = {
@ -6339,7 +6345,7 @@ class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources, reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources,
quotas, quotas, deltas, None, quotas, quotas, deltas, None,
None, None, 'project1') None, None, 'project1')
resources_names = reservable_resources.keys() resources_names = list(reservable_resources.keys())
for reservation_uuid in reservations_uuids: for reservation_uuid in reservations_uuids:
reservation = _reservation_get(self.ctxt, reservation_uuid) reservation = _reservation_get(self.ctxt, reservation_uuid)
usage = db.quota_usage_get(self.ctxt, 'project1', usage = db.quota_usage_get(self.ctxt, 'project1',
@ -6392,7 +6398,7 @@ class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0') quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0')
expected = {'resource': 'resource0', 'project_id': 'p1', expected = {'resource': 'resource0', 'project_id': 'p1',
'in_use': 0, 'reserved': 0, 'total': 0} 'in_use': 0, 'reserved': 0, 'total': 0}
for key, value in expected.iteritems(): for key, value in expected.items():
self.assertEqual(value, quota_usage[key]) self.assertEqual(value, quota_usage[key])
def test_quota_usage_get_all_by_project(self): def test_quota_usage_get_all_by_project(self):
@ -6425,7 +6431,7 @@ class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1') quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1')
expected = {'resource': 'resource0', 'project_id': 'p1', expected = {'resource': 'resource0', 'project_id': 'p1',
'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85} 'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85}
for key, value in expected.iteritems(): for key, value in expected.items():
self.assertEqual(value, quota_usage[key]) self.assertEqual(value, quota_usage[key])
def test_quota_create_exists(self): def test_quota_create_exists(self):
@ -6760,7 +6766,7 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
compute_node_data['hypervisor_hostname'] = name compute_node_data['hypervisor_hostname'] = name
node = db.compute_node_create(self.ctxt, compute_node_data) node = db.compute_node_create(self.ctxt, compute_node_data)
node = dict(node.iteritems()) node = dict(node)
expected.append(node) expected.append(node)
@ -6905,7 +6911,7 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
def test_compute_node_statistics(self): def test_compute_node_statistics(self):
stats = db.compute_node_statistics(self.ctxt) stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 1) self.assertEqual(stats.pop('count'), 1)
for k, v in stats.iteritems(): for k, v in stats.items():
self.assertEqual(v, self.item[k]) self.assertEqual(v, self.item[k])
def test_compute_node_statistics_disabled_service(self): def test_compute_node_statistics_disabled_service(self):
@ -7054,7 +7060,7 @@ class CertificateTestCase(test.TestCase, ModelsObjectComparatorMixin):
'project_id': 'project', 'project_id': 'project',
'file_name': 'filename' 'file_name': 'filename'
} }
return [{k: v + str(x) for k, v in base_values.iteritems()} return [{k: v + str(x) for k, v in base_values.items()}
for x in range(1, 4)] for x in range(1, 4)]
def _certificates_create(self): def _certificates_create(self):
@ -7221,7 +7227,7 @@ class CellTestCase(test.TestCase, ModelsObjectComparatorMixin):
test_values = [] test_values = []
for x in range(1, 4): for x in range(1, 4):
modified_val = {k: self._cell_value_modify(v, x) modified_val = {k: self._cell_value_modify(v, x)
for k, v in self._get_cell_base_values().iteritems()} for k, v in self._get_cell_base_values().items()}
db.cell_create(self.ctxt, modified_val) db.cell_create(self.ctxt, modified_val)
test_values.append(modified_val) test_values.append(modified_val)
return test_values return test_values

View File

@ -17,9 +17,9 @@
"""Matcher classes to be used inside of the testtools assertThat framework.""" """Matcher classes to be used inside of the testtools assertThat framework."""
import pprint import pprint
import StringIO
from lxml import etree from lxml import etree
import six
from testtools import content from testtools import content
@ -398,7 +398,7 @@ class XMLMatches(object):
def __init__(self, expected, allow_mixed_nodes=False, def __init__(self, expected, allow_mixed_nodes=False,
skip_empty_text_nodes=True, skip_values=('DONTCARE',)): skip_empty_text_nodes=True, skip_values=('DONTCARE',)):
self.expected_xml = expected self.expected_xml = expected
self.expected = etree.parse(StringIO.StringIO(expected)) self.expected = etree.parse(six.StringIO(expected))
self.allow_mixed_nodes = allow_mixed_nodes self.allow_mixed_nodes = allow_mixed_nodes
self.skip_empty_text_nodes = skip_empty_text_nodes self.skip_empty_text_nodes = skip_empty_text_nodes
self.skip_values = set(skip_values) self.skip_values = set(skip_values)
@ -407,7 +407,7 @@ class XMLMatches(object):
return 'XMLMatches(%r)' % self.expected_xml return 'XMLMatches(%r)' % self.expected_xml
def match(self, actual_xml): def match(self, actual_xml):
actual = etree.parse(StringIO.StringIO(actual_xml)) actual = etree.parse(six.StringIO(actual_xml))
state = XMLMatchState(self.expected_xml, actual_xml) state = XMLMatchState(self.expected_xml, actual_xml)
expected_doc_info = self._get_xml_docinfo(self.expected) expected_doc_info = self._get_xml_docinfo(self.expected)

View File

@ -42,7 +42,9 @@ deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements-py3.txt -r{toxinidir}/test-requirements-py3.txt
commands = commands =
find . -type f -name "*.pyc" -delete find . -type f -name "*.pyc" -delete
python -m testtools.run nova.tests.unit.test_versions python -m testtools.run \
nova.tests.unit.db.test_db_api \
nova.tests.unit.test_versions
[testenv:functional] [testenv:functional]
usedevelop = True usedevelop = True