Add access alias (aka) for the resource attributes
In the compute.server we are currently exposing names, which are only "aliases" for known attributes (i.e. accessIPv4 vs access_ipv4). Since we want to drop the _normalization completely it is necessary to expose attributes under old used names (which are not really fitting naming convention of the resource attributes). So add a "aka" to the BaseComponent. While we are here - repair some of the resource accesses from the unittests (i.e. direct conversion to dict), - update local dict storage, so that json.dumps works on update resource. Actually also fix the existing problem in the code with that. - add _attributes_iterator to de-duplicate logic in Resource - what a wonderful bug - passing sha256 as md5 (affects nodepool heavily). Add also tests for that. Change-Id: I39edfc8fe4e4f6c216663c0e7602c9f1bec5cab4
This commit is contained in:
parent
f7860861a1
commit
402c9f7361
|
@ -301,7 +301,7 @@ class ImageCloudMixin(_normalize.Normalizer):
|
|||
image = self.image.create_image(
|
||||
name, filename=filename,
|
||||
container=container,
|
||||
md5=sha256, sha256=sha256,
|
||||
md5=md5, sha256=sha256,
|
||||
disk_format=disk_format, container_format=container_format,
|
||||
disable_vendor_agent=disable_vendor_agent,
|
||||
wait=wait, timeout=timeout,
|
||||
|
|
|
@ -88,7 +88,7 @@ class _BaseComponent(object):
|
|||
# The class to be used for mappings
|
||||
_map_cls = dict
|
||||
|
||||
def __init__(self, name, type=None, default=None, alias=None,
|
||||
def __init__(self, name, type=None, default=None, alias=None, aka=None,
|
||||
alternate_id=False, list_type=None, coerce_to_default=False,
|
||||
**kwargs):
|
||||
"""A typed descriptor for a component that makes up a Resource
|
||||
|
@ -101,6 +101,7 @@ class _BaseComponent(object):
|
|||
component to a string, __set__ will fail, for example.
|
||||
:param default: Typically None, but any other default can be set.
|
||||
:param alias: If set, alternative attribute on object to return.
|
||||
:param aka: If set, additional name attribute would be available under.
|
||||
:param alternate_id:
|
||||
When `True`, this property is known internally as a value that
|
||||
can be sent with requests that require an ID but when `id` is
|
||||
|
@ -121,6 +122,7 @@ class _BaseComponent(object):
|
|||
else:
|
||||
self.default = default
|
||||
self.alias = alias
|
||||
self.aka = aka
|
||||
self.alternate_id = alternate_id
|
||||
self.list_type = list_type
|
||||
self.coerce_to_default = coerce_to_default
|
||||
|
@ -440,6 +442,9 @@ class Resource(dict):
|
|||
_delete_response_class = None
|
||||
_store_unknown_attrs_as_properties = False
|
||||
|
||||
# Placeholder for aliases as dict of {__alias__:__original}
|
||||
_attr_aliases = {}
|
||||
|
||||
def __init__(self, _synchronized=False, connection=None, **attrs):
|
||||
"""The base resource
|
||||
|
||||
|
@ -493,6 +498,11 @@ class Resource(dict):
|
|||
|
||||
self._update_location()
|
||||
|
||||
for attr, component in self._attributes_iterator():
|
||||
if component.aka:
|
||||
# Register alias for the attribute (local name)
|
||||
self._attr_aliases[component.aka] = attr
|
||||
|
||||
# TODO(mordred) This is terrible, but is a hack at the moment to ensure
|
||||
# json.dumps works. The json library does basically if not obj: and
|
||||
# obj.items() ... but I think the if not obj: is short-circuiting down
|
||||
|
@ -500,6 +510,18 @@ class Resource(dict):
|
|||
# always False even if we override __len__ or __bool__.
|
||||
dict.update(self, self.to_dict())
|
||||
|
||||
@classmethod
|
||||
def _attributes_iterator(cls, components=tuple([Body, Header])):
|
||||
"""Iterator over all Resource attributes
|
||||
"""
|
||||
# isinstance stricly requires this to be a tuple
|
||||
# Since we're looking at class definitions we need to include
|
||||
# subclasses, so check the whole MRO.
|
||||
for klass in cls.__mro__:
|
||||
for attr, component in klass.__dict__.items():
|
||||
if isinstance(component, components):
|
||||
yield attr, component
|
||||
|
||||
def __repr__(self):
|
||||
pairs = [
|
||||
"%s=%s" % (k, v if v is not None else 'None')
|
||||
|
@ -541,7 +563,14 @@ class Resource(dict):
|
|||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
return object.__getattribute__(self, name)
|
||||
try:
|
||||
return object.__getattribute__(self, name)
|
||||
except AttributeError as e:
|
||||
if name in self._attr_aliases:
|
||||
# Hmm - not found. But hey, the alias exists...
|
||||
return object.__getattribute__(
|
||||
self, self._attr_aliases[name])
|
||||
raise e
|
||||
|
||||
def __getitem__(self, name):
|
||||
"""Provide dictionary access for elements of the data model."""
|
||||
|
@ -549,6 +578,10 @@ class Resource(dict):
|
|||
# behaves like its wrapped content. If we get it on the class,
|
||||
# it returns the BaseComponent itself, not the results of __get__.
|
||||
real_item = getattr(self.__class__, name, None)
|
||||
if not real_item and name in self._attr_aliases:
|
||||
# Not found? But we know an alias exists.
|
||||
name = self._attr_aliases[name]
|
||||
real_item = getattr(self.__class__, name, None)
|
||||
if isinstance(real_item, _BaseComponent):
|
||||
return getattr(self, name)
|
||||
raise KeyError(name)
|
||||
|
@ -569,6 +602,23 @@ class Resource(dict):
|
|||
cls=self.__class__.__name__,
|
||||
name=name))
|
||||
|
||||
def _attributes(self, remote_names=False, components=None,
|
||||
include_aliases=True):
|
||||
"""Generate list of supported attributes
|
||||
"""
|
||||
attributes = []
|
||||
|
||||
if not components:
|
||||
components = tuple([Body, Header, Computed, URI])
|
||||
|
||||
for attr, component in self._attributes_iterator(components):
|
||||
key = attr if not remote_names else component.name
|
||||
attributes.append(key)
|
||||
if include_aliases and component.aka:
|
||||
attributes.append(component.aka)
|
||||
|
||||
return attributes
|
||||
|
||||
def keys(self):
|
||||
# NOTE(mordred) In python2, dict.keys returns a list. In python3 it
|
||||
# returns a dict_keys view. For 2, we can return a list from the
|
||||
|
@ -576,15 +626,9 @@ class Resource(dict):
|
|||
# It won't strictly speaking be an actual dict_keys, so it's possible
|
||||
# we may want to get more clever, but for now let's see how far this
|
||||
# will take us.
|
||||
underlying_keys = itertools.chain(
|
||||
self._body.attributes.keys(),
|
||||
self._header.attributes.keys(),
|
||||
self._uri.attributes.keys(),
|
||||
self._computed.attributes.keys())
|
||||
if six.PY2:
|
||||
return list(underlying_keys)
|
||||
else:
|
||||
return underlying_keys
|
||||
# NOTE(gtema) For now let's return list of 'public' attributes and not
|
||||
# remotes or "unknown"
|
||||
return self._attributes()
|
||||
|
||||
def _update(self, **attrs):
|
||||
"""Given attributes, update them on this instance
|
||||
|
@ -736,16 +780,12 @@ class Resource(dict):
|
|||
"""Return a dict of attributes of a given component on the class"""
|
||||
mapping = component._map_cls()
|
||||
ret = component._map_cls()
|
||||
# Since we're looking at class definitions we need to include
|
||||
# subclasses, so check the whole MRO.
|
||||
for klass in cls.__mro__:
|
||||
for key, value in klass.__dict__.items():
|
||||
if isinstance(value, component):
|
||||
# Make sure base classes don't end up overwriting
|
||||
# mappings we've found previously in subclasses.
|
||||
if key not in mapping:
|
||||
# Make it this way first, to get MRO stuff correct.
|
||||
mapping[key] = value.name
|
||||
for key, value in cls._attributes_iterator(component):
|
||||
# Make sure base classes don't end up overwriting
|
||||
# mappings we've found previously in subclasses.
|
||||
if key not in mapping:
|
||||
# Make it this way first, to get MRO stuff correct.
|
||||
mapping[key] = value.name
|
||||
for k, v in mapping.items():
|
||||
ret[v] = k
|
||||
return ret
|
||||
|
@ -888,38 +928,35 @@ class Resource(dict):
|
|||
# but is slightly different in that we're looking at an instance
|
||||
# and we're mapping names on this class to their actual stored
|
||||
# values.
|
||||
# Since we're looking at class definitions we need to include
|
||||
# subclasses, so check the whole MRO.
|
||||
for klass in self.__class__.__mro__:
|
||||
for attr, component in klass.__dict__.items():
|
||||
if isinstance(component, components):
|
||||
if original_names:
|
||||
key = component.name
|
||||
for attr, component in self._attributes_iterator(components):
|
||||
if original_names:
|
||||
key = component.name
|
||||
else:
|
||||
key = attr
|
||||
for key in filter(None, (key, component.aka)):
|
||||
# Make sure base classes don't end up overwriting
|
||||
# mappings we've found previously in subclasses.
|
||||
if key not in mapping:
|
||||
value = getattr(self, attr, None)
|
||||
if ignore_none and value is None:
|
||||
continue
|
||||
if isinstance(value, Resource):
|
||||
mapping[key] = value.to_dict(_to_munch=_to_munch)
|
||||
elif isinstance(value, dict) and _to_munch:
|
||||
mapping[key] = munch.Munch(value)
|
||||
elif value and isinstance(value, list):
|
||||
converted = []
|
||||
for raw in value:
|
||||
if isinstance(raw, Resource):
|
||||
converted.append(
|
||||
raw.to_dict(_to_munch=_to_munch))
|
||||
elif isinstance(raw, dict) and _to_munch:
|
||||
converted.append(munch.Munch(raw))
|
||||
else:
|
||||
converted.append(raw)
|
||||
mapping[key] = converted
|
||||
else:
|
||||
key = attr
|
||||
# Make sure base classes don't end up overwriting
|
||||
# mappings we've found previously in subclasses.
|
||||
if key not in mapping:
|
||||
value = getattr(self, attr, None)
|
||||
if ignore_none and value is None:
|
||||
continue
|
||||
if isinstance(value, Resource):
|
||||
mapping[key] = value.to_dict(_to_munch=_to_munch)
|
||||
elif isinstance(value, dict) and _to_munch:
|
||||
mapping[key] = munch.Munch(value)
|
||||
elif value and isinstance(value, list):
|
||||
converted = []
|
||||
for raw in value:
|
||||
if isinstance(raw, Resource):
|
||||
converted.append(
|
||||
raw.to_dict(_to_munch=_to_munch))
|
||||
elif isinstance(raw, dict) and _to_munch:
|
||||
converted.append(munch.Munch(raw))
|
||||
else:
|
||||
converted.append(raw)
|
||||
mapping[key] = converted
|
||||
else:
|
||||
mapping[key] = value
|
||||
mapping[key] = value
|
||||
|
||||
return mapping
|
||||
# Compatibility with the munch.Munch.toDict method
|
||||
|
@ -1065,6 +1102,7 @@ class Resource(dict):
|
|||
self._header.attributes.update(headers)
|
||||
self._header.clean()
|
||||
self._update_location()
|
||||
dict.update(self, self.to_dict())
|
||||
|
||||
@classmethod
|
||||
def _get_session(cls, session):
|
||||
|
|
|
@ -19,6 +19,7 @@ Fakes used for testing
|
|||
|
||||
import datetime
|
||||
import json
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
from openstack.orchestration.util import template_format
|
||||
|
@ -221,7 +222,17 @@ def make_fake_stack_event(
|
|||
def make_fake_image(
|
||||
image_id=None, md5=NO_MD5, sha256=NO_SHA256, status='active',
|
||||
image_name=u'fake_image',
|
||||
data=None,
|
||||
checksum=u'ee36e35a297980dee1b514de9803ec6d'):
|
||||
if data:
|
||||
md5 = hashlib.md5()
|
||||
sha256 = hashlib.sha256()
|
||||
with open(data, 'rb') as file_obj:
|
||||
for chunk in iter(lambda: file_obj.read(8192), b''):
|
||||
md5.update(chunk)
|
||||
sha256.update(chunk)
|
||||
md5 = md5.hexdigest()
|
||||
sha256 = sha256.hexdigest()
|
||||
return {
|
||||
u'image_state': u'available',
|
||||
u'container_format': u'bare',
|
||||
|
@ -243,10 +254,10 @@ def make_fake_image(
|
|||
u'min_disk': 40,
|
||||
u'virtual_size': None,
|
||||
u'name': image_name,
|
||||
u'checksum': checksum,
|
||||
u'checksum': md5 or checksum,
|
||||
u'created_at': u'2016-02-10T05:03:11Z',
|
||||
u'owner_specified.openstack.md5': NO_MD5,
|
||||
u'owner_specified.openstack.sha256': NO_SHA256,
|
||||
u'owner_specified.openstack.md5': md5 or NO_MD5,
|
||||
u'owner_specified.openstack.sha256': sha256 or NO_SHA256,
|
||||
u'owner_specified.openstack.object': 'images/{name}'.format(
|
||||
name=image_name),
|
||||
u'protected': False}
|
||||
|
|
|
@ -11,13 +11,13 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import hashlib
|
||||
import operator
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
from openstack import exceptions
|
||||
from openstack.cloud import exc
|
||||
from openstack.cloud import meta
|
||||
from openstack.tests import fakes
|
||||
|
@ -35,12 +35,14 @@ class BaseTestImage(base.TestCase):
|
|||
self.image_name = self.getUniqueString('image')
|
||||
self.object_name = u'images/{name}'.format(name=self.image_name)
|
||||
self.imagefile = tempfile.NamedTemporaryFile(delete=False)
|
||||
self.imagefile.write(b'\0')
|
||||
data = b'\2\0'
|
||||
self.imagefile.write(data)
|
||||
self.imagefile.close()
|
||||
self.output = uuid.uuid4().bytes
|
||||
self.output = data
|
||||
self.fake_image_dict = fakes.make_fake_image(
|
||||
image_id=self.image_id, image_name=self.image_name,
|
||||
checksum=hashlib.md5(self.output).hexdigest())
|
||||
data=self.imagefile.name
|
||||
)
|
||||
self.fake_search_return = {'images': [self.fake_image_dict]}
|
||||
self.container_name = self.getUniqueString('container')
|
||||
|
||||
|
@ -333,9 +335,13 @@ class TestImage(BaseTestImage):
|
|||
u'container_format': u'bare',
|
||||
u'disk_format': u'qcow2',
|
||||
u'name': self.image_name,
|
||||
u'owner_specified.openstack.md5': fakes.NO_MD5,
|
||||
u'owner_specified.openstack.md5':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.md5'],
|
||||
u'owner_specified.openstack.object': self.object_name,
|
||||
u'owner_specified.openstack.sha256': fakes.NO_SHA256,
|
||||
u'owner_specified.openstack.sha256':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.sha256'],
|
||||
u'visibility': u'private'})
|
||||
),
|
||||
dict(method='PUT',
|
||||
|
@ -360,7 +366,8 @@ class TestImage(BaseTestImage):
|
|||
is_public=False)
|
||||
|
||||
self.assert_calls()
|
||||
self.assertEqual(self.adapter.request_history[5].text.read(), b'\x00')
|
||||
self.assertEqual(self.adapter.request_history[5].text.read(),
|
||||
self.output)
|
||||
|
||||
def test_create_image_task(self):
|
||||
self.cloud.image_api_use_tasks = True
|
||||
|
@ -428,8 +435,12 @@ class TestImage(BaseTestImage):
|
|||
object=self.image_name),
|
||||
status_code=201,
|
||||
validate=dict(
|
||||
headers={'x-object-meta-x-sdk-md5': fakes.NO_MD5,
|
||||
'x-object-meta-x-sdk-sha256': fakes.NO_SHA256})
|
||||
headers={'x-object-meta-x-sdk-md5':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.md5'],
|
||||
'x-object-meta-x-sdk-sha256':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.sha256']})
|
||||
),
|
||||
dict(method='POST',
|
||||
uri=self.get_mock_url(
|
||||
|
@ -467,9 +478,13 @@ class TestImage(BaseTestImage):
|
|||
container=self.container_name,
|
||||
object=self.image_name),
|
||||
u'path': u'/owner_specified.openstack.object'},
|
||||
{u'op': u'add', u'value': fakes.NO_MD5,
|
||||
{u'op': u'add', u'value':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.md5'],
|
||||
u'path': u'/owner_specified.openstack.md5'},
|
||||
{u'op': u'add', u'value': fakes.NO_SHA256,
|
||||
{u'op': u'add', u'value':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.sha256'],
|
||||
u'path': u'/owner_specified.openstack.sha256'}],
|
||||
key=operator.itemgetter('path')),
|
||||
headers={
|
||||
|
@ -486,8 +501,12 @@ class TestImage(BaseTestImage):
|
|||
'X-Trans-Id': 'txbbb825960a3243b49a36f-005a0dadaedfw1',
|
||||
'Content-Length': '1290170880',
|
||||
'Last-Modified': 'Tue, 14 Apr 2015 18:29:01 GMT',
|
||||
'X-Object-Meta-X-Sdk-Sha256': fakes.NO_SHA256,
|
||||
'X-Object-Meta-X-Sdk-Md5': fakes.NO_MD5,
|
||||
'X-Object-Meta-X-Sdk-Sha256':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.sha256'],
|
||||
'X-Object-Meta-X-Sdk-Md5':
|
||||
self.fake_image_dict[
|
||||
'owner_specified.openstack.md5'],
|
||||
'Date': 'Thu, 16 Nov 2017 15:24:30 GMT',
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Content-Type': 'application/octet-stream',
|
||||
|
@ -756,46 +775,56 @@ class TestImage(BaseTestImage):
|
|||
def test_create_image_put_v2_wrong_checksum_delete(self):
|
||||
self.cloud.image_api_use_tasks = False
|
||||
|
||||
args = {'name': self.image_name,
|
||||
'container_format': 'bare', 'disk_format': 'qcow2',
|
||||
'owner_specified.openstack.md5': fakes.NO_MD5,
|
||||
'owner_specified.openstack.sha256': fakes.NO_SHA256,
|
||||
'owner_specified.openstack.object': 'images/{name}'.format(
|
||||
name=self.image_name),
|
||||
'visibility': 'private'}
|
||||
fake_image = self.fake_image_dict
|
||||
|
||||
ret = args.copy()
|
||||
ret['id'] = self.image_id
|
||||
ret['status'] = 'success'
|
||||
ret['checksum'] = 'fake'
|
||||
fake_image['owner_specified.openstack.md5'] = 'a'
|
||||
fake_image['owner_specified.openstack.sha256'] = 'b'
|
||||
|
||||
self.register_uris([
|
||||
dict(method='GET',
|
||||
uri='https://image.example.com/v2/images',
|
||||
uri=self.get_mock_url(
|
||||
'image', append=['images'], base_url_append='v2'),
|
||||
json={'images': []}),
|
||||
dict(method='POST',
|
||||
uri='https://image.example.com/v2/images',
|
||||
json=ret,
|
||||
validate=dict(json=args)),
|
||||
dict(method='PUT',
|
||||
uri='https://image.example.com/v2/images/{id}/file'.format(
|
||||
id=self.image_id),
|
||||
status_code=400,
|
||||
uri=self.get_mock_url(
|
||||
'image', append=['images'], base_url_append='v2'),
|
||||
json=self.fake_image_dict,
|
||||
validate=dict(
|
||||
headers={
|
||||
'Content-Type': 'application/octet-stream',
|
||||
},
|
||||
)),
|
||||
json={
|
||||
u'container_format': u'bare',
|
||||
u'disk_format': u'qcow2',
|
||||
u'name': self.image_name,
|
||||
u'owner_specified.openstack.md5':
|
||||
fake_image[
|
||||
'owner_specified.openstack.md5'],
|
||||
u'owner_specified.openstack.object': self.object_name,
|
||||
u'owner_specified.openstack.sha256':
|
||||
fake_image[
|
||||
'owner_specified.openstack.sha256'],
|
||||
u'visibility': u'private'})
|
||||
),
|
||||
dict(method='PUT',
|
||||
uri=self.get_mock_url(
|
||||
'image', append=['images', self.image_id, 'file'],
|
||||
base_url_append='v2'),
|
||||
request_headers={'Content-Type': 'application/octet-stream'}),
|
||||
dict(method='GET',
|
||||
uri=self.get_mock_url(
|
||||
'image', append=['images', self.fake_image_dict['id']],
|
||||
base_url_append='v2'
|
||||
),
|
||||
json=fake_image),
|
||||
dict(method='DELETE',
|
||||
uri='https://image.example.com/v2/images/{id}'.format(
|
||||
id=self.image_id)),
|
||||
id=self.image_id))
|
||||
])
|
||||
|
||||
self.assertRaises(
|
||||
exc.OpenStackCloudHTTPError,
|
||||
self._call_create_image,
|
||||
self.image_name,
|
||||
md5='some_fake')
|
||||
exceptions.SDKException,
|
||||
self.cloud.create_image,
|
||||
self.image_name, self.imagefile.name,
|
||||
is_public=False, md5='a', sha256='b'
|
||||
)
|
||||
|
||||
self.assert_calls()
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
# under the License.
|
||||
|
||||
import itertools
|
||||
import json
|
||||
|
||||
from keystoneauth1 import adapter
|
||||
import mock
|
||||
|
@ -54,11 +55,12 @@ class TestComponent(base.TestCase):
|
|||
|
||||
def test_creation(self):
|
||||
sot = resource._BaseComponent(
|
||||
"name", type=int, default=1, alternate_id=True)
|
||||
"name", type=int, default=1, alternate_id=True, aka="alias")
|
||||
|
||||
self.assertEqual("name", sot.name)
|
||||
self.assertEqual(int, sot.type)
|
||||
self.assertEqual(1, sot.default)
|
||||
self.assertEqual("alias", sot.aka)
|
||||
self.assertTrue(sot.alternate_id)
|
||||
|
||||
def test_get_no_instance(self):
|
||||
|
@ -684,11 +686,73 @@ class TestResource(base.TestCase):
|
|||
value = "id"
|
||||
self.assertEqual(value, resource.Resource._get_id(value))
|
||||
|
||||
def test__attributes(self):
|
||||
class Test(resource.Resource):
|
||||
foo = resource.Header('foo')
|
||||
bar = resource.Body('bar', aka='_bar')
|
||||
bar_local = resource.Body('bar_remote')
|
||||
|
||||
sot = Test()
|
||||
|
||||
self.assertEqual(
|
||||
sorted(['foo', 'bar', '_bar', 'bar_local',
|
||||
'id', 'name', 'location']),
|
||||
sorted(sot._attributes())
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
sorted(['foo', 'bar', 'bar_local', 'id', 'name', 'location']),
|
||||
sorted(sot._attributes(include_aliases=False))
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
sorted(['foo', 'bar', '_bar', 'bar_remote',
|
||||
'id', 'name', 'location']),
|
||||
sorted(sot._attributes(remote_names=True))
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
sorted(['bar', '_bar', 'bar_local', 'id', 'name', 'location']),
|
||||
sorted(sot._attributes(
|
||||
components=tuple([resource.Body, resource.Computed])))
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
('foo',),
|
||||
tuple(sot._attributes(components=tuple([resource.Header])))
|
||||
)
|
||||
|
||||
def test__attributes_iterator(self):
|
||||
class Parent(resource.Resource):
|
||||
foo = resource.Header('foo')
|
||||
bar = resource.Body('bar', aka='_bar')
|
||||
|
||||
class Child(Parent):
|
||||
foo1 = resource.Header('foo1')
|
||||
bar1 = resource.Body('bar1')
|
||||
|
||||
sot = Child()
|
||||
expected = ['foo', 'bar', 'foo1', 'bar1']
|
||||
|
||||
for attr, component in sot._attributes_iterator():
|
||||
if attr in expected:
|
||||
expected.remove(attr)
|
||||
self.assertEqual([], expected)
|
||||
|
||||
expected = ['foo', 'foo1']
|
||||
|
||||
# Check we iterate only over headers
|
||||
for attr, component in sot._attributes_iterator(
|
||||
components=tuple([resource.Header])):
|
||||
if attr in expected:
|
||||
expected.remove(attr)
|
||||
self.assertEqual([], expected)
|
||||
|
||||
def test_to_dict(self):
|
||||
|
||||
class Test(resource.Resource):
|
||||
foo = resource.Header('foo')
|
||||
bar = resource.Body('bar')
|
||||
bar = resource.Body('bar', aka='_bar')
|
||||
|
||||
res = Test(id='FAKE_ID')
|
||||
|
||||
|
@ -697,7 +761,8 @@ class TestResource(base.TestCase):
|
|||
'name': None,
|
||||
'location': None,
|
||||
'foo': None,
|
||||
'bar': None
|
||||
'bar': None,
|
||||
'_bar': None
|
||||
}
|
||||
self.assertEqual(expected, res.to_dict())
|
||||
|
||||
|
@ -791,17 +856,18 @@ class TestResource(base.TestCase):
|
|||
|
||||
class Parent(resource.Resource):
|
||||
foo = resource.Header('foo')
|
||||
bar = resource.Body('bar')
|
||||
bar = resource.Body('bar', aka='_bar')
|
||||
|
||||
class Child(Parent):
|
||||
foo_new = resource.Header('foo_baz_server')
|
||||
bar_new = resource.Body('bar_baz_server')
|
||||
|
||||
res = Child(id='FAKE_ID')
|
||||
res = Child(id='FAKE_ID', bar='test')
|
||||
|
||||
expected = {
|
||||
'foo': None,
|
||||
'bar': None,
|
||||
'bar': 'test',
|
||||
'_bar': 'test',
|
||||
'foo_new': None,
|
||||
'bar_new': None,
|
||||
'id': 'FAKE_ID',
|
||||
|
@ -810,6 +876,50 @@ class TestResource(base.TestCase):
|
|||
}
|
||||
self.assertEqual(expected, res.to_dict())
|
||||
|
||||
def test_json_dumps_from_resource(self):
|
||||
class Test(resource.Resource):
|
||||
foo = resource.Body('foo_remote')
|
||||
|
||||
res = Test(foo='bar')
|
||||
|
||||
expected = '{"foo": "bar", "id": null, "location": null, "name": null}'
|
||||
|
||||
actual = json.dumps(res, sort_keys=True)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
response = FakeResponse({
|
||||
'foo': 'new_bar'})
|
||||
res._translate_response(response)
|
||||
|
||||
expected = ('{"foo": "new_bar", "id": null, '
|
||||
'"location": null, "name": null}')
|
||||
actual = json.dumps(res, sort_keys=True)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_access_by_aka(self):
|
||||
class Test(resource.Resource):
|
||||
foo = resource.Header('foo_remote', aka='foo_alias')
|
||||
|
||||
res = Test(foo='bar', name='test')
|
||||
|
||||
self.assertEqual('bar', res['foo_alias'])
|
||||
self.assertEqual('bar', res.foo_alias)
|
||||
self.assertTrue('foo' in res.keys())
|
||||
self.assertTrue('foo_alias' in res.keys())
|
||||
expected = munch.Munch({
|
||||
'id': None,
|
||||
'name': 'test',
|
||||
'location': None,
|
||||
'foo': 'bar',
|
||||
'foo_alias': 'bar'
|
||||
})
|
||||
actual = munch.Munch(res)
|
||||
self.assertEqual(expected, actual)
|
||||
self.assertEqual(expected, res.toDict())
|
||||
self.assertEqual(expected, res.to_dict())
|
||||
self.assertDictEqual(expected, res)
|
||||
self.assertDictEqual(expected, dict(res))
|
||||
|
||||
def test_to_dict_value_error(self):
|
||||
|
||||
class Test(resource.Resource):
|
||||
|
|
Loading…
Reference in New Issue