hacking: upgrade to 0.9.x serie
Change-Id: I252758fd633662de9659a402c5e3d7e3ce1fae0f
This commit is contained in:
parent
353ae24d34
commit
fadbef8511
@ -422,7 +422,7 @@ class TaskFactoryProxy(glance.domain.proxy.TaskFactory):
|
||||
def new_task(self, **kwargs):
|
||||
owner = kwargs.get('owner', self.context.owner)
|
||||
|
||||
#NOTE(nikhil): Unlike Images, Tasks are expected to have owner.
|
||||
# NOTE(nikhil): Unlike Images, Tasks are expected to have owner.
|
||||
# We currently do not allow even admins to set the owner to None.
|
||||
if owner is not None and (owner == self.context.owner
|
||||
or self.context.is_admin):
|
||||
@ -457,7 +457,7 @@ class TaskStubRepoProxy(glance.domain.proxy.TaskStubRepo):
|
||||
return [proxy_task_stub(self.context, t) for t in task_stubs]
|
||||
|
||||
|
||||
#Metadef Namespace classes
|
||||
# Metadef Namespace classes
|
||||
def is_namespace_mutable(context, namespace):
|
||||
"""Return True if the namespace is mutable in this context."""
|
||||
if context.is_admin:
|
||||
@ -548,7 +548,7 @@ class MetadefNamespaceRepoProxy(glance.domain.proxy.MetadefNamespaceRepo):
|
||||
namespace in namespaces]
|
||||
|
||||
|
||||
#Metadef Object classes
|
||||
# Metadef Object classes
|
||||
def is_object_mutable(context, object):
|
||||
"""Return True if the object is mutable in this context."""
|
||||
if context.is_admin:
|
||||
@ -635,7 +635,7 @@ class MetadefObjectRepoProxy(glance.domain.proxy.MetadefObjectRepo):
|
||||
meta_object in objects]
|
||||
|
||||
|
||||
#Metadef ResourceType classes
|
||||
# Metadef ResourceType classes
|
||||
def is_meta_resource_type_mutable(context, meta_resource_type):
|
||||
"""Return True if the meta_resource_type is mutable in this context."""
|
||||
if context.is_admin:
|
||||
@ -644,7 +644,7 @@ def is_meta_resource_type_mutable(context, meta_resource_type):
|
||||
if context.owner is None:
|
||||
return False
|
||||
|
||||
#(lakshmiS): resource type can exist without an association with
|
||||
# (lakshmiS): resource type can exist without an association with
|
||||
# namespace and resource type cannot be created/update/deleted directly(
|
||||
# they have to be associated/de-associated from namespace)
|
||||
if meta_resource_type.namespace:
|
||||
@ -724,7 +724,7 @@ class MetadefResourceTypeRepoProxy(
|
||||
meta_resource_type in meta_resource_types]
|
||||
|
||||
|
||||
#Metadef namespace properties classes
|
||||
# Metadef namespace properties classes
|
||||
def is_namespace_property_mutable(context, namespace_property):
|
||||
"""Return True if the object is mutable in this context."""
|
||||
if context.is_admin:
|
||||
@ -787,8 +787,8 @@ class MetadefPropertyFactoryProxy(glance.domain.proxy.MetadefPropertyFactory):
|
||||
"owned by '%s'")
|
||||
raise exception.Forbidden(message % (owner))
|
||||
|
||||
return super(MetadefPropertyFactoryProxy, self).\
|
||||
new_namespace_property(**kwargs)
|
||||
return super(MetadefPropertyFactoryProxy, self).new_namespace_property(
|
||||
**kwargs)
|
||||
|
||||
|
||||
class MetadefPropertyRepoProxy(glance.domain.proxy.MetadefPropertyRepo):
|
||||
|
@ -93,9 +93,11 @@ def image_send_notification(bytes_written, expected_size, image_meta, request,
|
||||
|
||||
|
||||
def get_remaining_quota(context, db_api, image_id=None):
|
||||
"""
|
||||
This method is called to see if the user is allowed to store an image
|
||||
of the given size in glance based on their quota and current usage.
|
||||
"""Method called to see if the user is allowed to store an image.
|
||||
|
||||
Checks if it is allowed based on the given size in glance based on their
|
||||
quota and current usage.
|
||||
|
||||
:param context:
|
||||
:param db_api: The db_api in use for this configuration
|
||||
:param image_id: The image that will be replaced with this new data size
|
||||
@ -103,7 +105,7 @@ def get_remaining_quota(context, db_api, image_id=None):
|
||||
None means infinity
|
||||
"""
|
||||
|
||||
#NOTE(jbresnah) in the future this value will come from a call to
|
||||
# NOTE(jbresnah) in the future this value will come from a call to
|
||||
# keystone.
|
||||
users_quota = CONF.user_storage_quota
|
||||
|
||||
@ -135,9 +137,11 @@ def get_remaining_quota(context, db_api, image_id=None):
|
||||
|
||||
|
||||
def check_quota(context, image_size, db_api, image_id=None):
|
||||
"""
|
||||
This method is called to see if the user is allowed to store an image
|
||||
of the given size in glance based on their quota and current usage.
|
||||
"""Method called to see if the user is allowed to store an image.
|
||||
|
||||
Checks if it is allowed based on the given size in glance based on their
|
||||
quota and current usage.
|
||||
|
||||
:param context:
|
||||
:param image_size: The size of the image we hope to store
|
||||
:param db_api: The db_api in use for this configuration
|
||||
@ -153,7 +157,7 @@ def check_quota(context, image_size, db_api, image_id=None):
|
||||
user = getattr(context, 'user', '<unknown>')
|
||||
|
||||
if image_size is None:
|
||||
#NOTE(jbresnah) When the image size is None it means that it is
|
||||
# NOTE(jbresnah) When the image size is None it means that it is
|
||||
# not known. In this case the only time we will raise an
|
||||
# exception is when there is no room left at all, thus we know
|
||||
# it will not fit
|
||||
|
@ -91,12 +91,12 @@ class ContextMiddleware(BaseContextMiddleware):
|
||||
return glance.context.RequestContext(**kwargs)
|
||||
|
||||
def _get_authenticated_context(self, req):
|
||||
#NOTE(bcwaldon): X-Roles is a csv string, but we need to parse
|
||||
# NOTE(bcwaldon): X-Roles is a csv string, but we need to parse
|
||||
# it into a list to be useful
|
||||
roles_header = req.headers.get('X-Roles', '')
|
||||
roles = [r.strip().lower() for r in roles_header.split(',')]
|
||||
|
||||
#NOTE(bcwaldon): This header is deprecated in favor of X-Auth-Token
|
||||
# NOTE(bcwaldon): This header is deprecated in favor of X-Auth-Token
|
||||
deprecated_token = req.headers.get('X-Storage-Token')
|
||||
|
||||
service_catalog = None
|
||||
|
@ -44,7 +44,7 @@ class VersionNegotiationFilter(wsgi.Middleware):
|
||||
LOG.debug(msg % args)
|
||||
|
||||
# If the request is for /versions, just return the versions container
|
||||
#TODO(bcwaldon): deprecate this behavior
|
||||
# TODO(bcwaldon): deprecate this behavior
|
||||
if req.path_info_peek() == "versions":
|
||||
return self.versions_app
|
||||
|
||||
|
@ -448,7 +448,7 @@ class ImageTarget(object):
|
||||
return self.image.extra_properties[key]
|
||||
|
||||
|
||||
#Metadef Namespace classes
|
||||
# Metadef Namespace classes
|
||||
class MetadefNamespaceProxy(glance.domain.proxy.MetadefNamespace):
|
||||
|
||||
def __init__(self, namespace, context, policy):
|
||||
@ -501,7 +501,7 @@ class MetadefNamespaceFactoryProxy(
|
||||
meta_namespace_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
|
||||
#Metadef Object classes
|
||||
# Metadef Object classes
|
||||
class MetadefObjectProxy(glance.domain.proxy.MetadefObject):
|
||||
|
||||
def __init__(self, meta_object, context, policy):
|
||||
@ -553,7 +553,7 @@ class MetadefObjectFactoryProxy(glance.domain.proxy.MetadefObjectFactory):
|
||||
meta_object_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
|
||||
#Metadef ResourceType classes
|
||||
# Metadef ResourceType classes
|
||||
class MetadefResourceTypeProxy(glance.domain.proxy.MetadefResourceType):
|
||||
|
||||
def __init__(self, meta_resource_type, context, policy):
|
||||
@ -600,7 +600,7 @@ class MetadefResourceTypeFactoryProxy(
|
||||
resource_type_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
|
||||
#Metadef namespace properties classes
|
||||
# Metadef namespace properties classes
|
||||
class MetadefPropertyProxy(glance.domain.proxy.MetadefProperty):
|
||||
|
||||
def __init__(self, namespace_property, context, policy):
|
||||
|
@ -39,8 +39,8 @@ class ProtectedImageFactoryProxy(glance.domain.proxy.ImageFactory):
|
||||
extra_properties[key] = extra_props[key]
|
||||
else:
|
||||
raise exception.ReservedProperty(property=key)
|
||||
return super(ProtectedImageFactoryProxy, self).\
|
||||
new_image(extra_properties=extra_properties, **kwargs)
|
||||
return super(ProtectedImageFactoryProxy, self).new_image(
|
||||
extra_properties=extra_properties, **kwargs)
|
||||
|
||||
|
||||
class ProtectedImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
@ -13,9 +13,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store as store
|
||||
import webob.exc
|
||||
|
||||
from glance.common import exception
|
||||
import glance.openstack.common.log as logging
|
||||
|
@ -268,8 +268,8 @@ class Controller(controller.BaseController):
|
||||
key, 'read', req.context) is False):
|
||||
# NOTE(bourke): if read protected, re-add to image_meta to
|
||||
# prevent deletion
|
||||
image_meta['properties'][key] = \
|
||||
orig_meta['properties'][key]
|
||||
image_meta['properties'][key] = orig_meta[
|
||||
'properties'][key]
|
||||
elif (self.prop_enforcer.check_property_rules(
|
||||
key, 'delete', req.context) is False):
|
||||
msg = "Property '%s' is protected" % key
|
||||
@ -1073,7 +1073,7 @@ class Controller(controller.BaseController):
|
||||
content_type="text/plain")
|
||||
except exception.InUseByStore as e:
|
||||
msg = (_LI("Image %s could not be deleted because it is in use: "
|
||||
"%s") % (id, utils.exception_to_str(e)))
|
||||
"%s") % (id, utils.exception_to_str(e))) # noqa
|
||||
for line in msg.split('\n'):
|
||||
LOG.info(line)
|
||||
raise HTTPConflict(explanation=msg,
|
||||
|
@ -12,12 +12,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import glance_store as store_api
|
||||
from oslo.config import cfg
|
||||
import webob.exc
|
||||
|
||||
import glance_store as store_api
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import store_utils
|
||||
from glance.common import utils
|
||||
@ -233,7 +231,7 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
content_type='text/plain')
|
||||
|
||||
except webob.exc.HTTPError:
|
||||
#NOTE(bcwaldon): Ideally, we would just call 'raise' here,
|
||||
# NOTE(bcwaldon): Ideally, we would just call 'raise' here,
|
||||
# but something in the above function calls is affecting the
|
||||
# exception context and we must explicitly re-raise the
|
||||
# caught exception.
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
import webob.exc
|
||||
|
||||
import glance.api.policy
|
||||
from glance.common import exception
|
||||
@ -95,8 +93,8 @@ class ImageDataController(object):
|
||||
LOG.debug("Cannot save data for image %(id)s: %(e)s",
|
||||
{'id': image_id, 'e': utils.exception_to_str(e)})
|
||||
self._restore(image_repo, image)
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
|
||||
except exception.InvalidImageStatusTransition as e:
|
||||
msg = utils.exception_to_str(e)
|
||||
@ -208,12 +206,12 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
|
||||
chunk_size=chunk_size))
|
||||
except exception.Forbidden as e:
|
||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||
#NOTE(saschpe): "response.app_iter = ..." currently resets Content-MD5
|
||||
# NOTE(saschpe): "response.app_iter = ..." currently resets Content-MD5
|
||||
# (https://github.com/Pylons/webob/issues/86), so it should be set
|
||||
# afterwards for the time being.
|
||||
if image.checksum:
|
||||
response.headers['Content-MD5'] = image.checksum
|
||||
#NOTE(markwash): "response.app_iter = ..." also erroneously resets the
|
||||
# NOTE(markwash): "response.app_iter = ..." also erroneously resets the
|
||||
# content-length
|
||||
response.headers['Content-Length'] = str(image.size)
|
||||
|
||||
|
@ -14,10 +14,10 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import six
|
||||
import webob
|
||||
|
||||
import glance_store
|
||||
import six
|
||||
import webob
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
@ -58,8 +58,8 @@ class ImageMembersController(object):
|
||||
|
||||
"""
|
||||
image_repo = self.gateway.get_repo(req.context)
|
||||
image_member_factory = self.gateway\
|
||||
.get_image_member_factory(req.context)
|
||||
image_member_factory = self.gateway.get_image_member_factory(
|
||||
req.context)
|
||||
try:
|
||||
image = image_repo.get(image_id)
|
||||
member_repo = image.get_member_repo()
|
||||
@ -106,8 +106,8 @@ class ImageMembersController(object):
|
||||
except exception.Forbidden as e:
|
||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||
except ValueError as e:
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
|
||||
def index(self, req, image_id):
|
||||
"""
|
||||
@ -278,14 +278,14 @@ _MEMBER_SCHEMA = {
|
||||
'created_at': {
|
||||
'type': 'string',
|
||||
'description': _('Date and time of image member creation'),
|
||||
#TODO(brian-rosmaita): our jsonschema library doesn't seem to like the
|
||||
# TODO(brian-rosmaita): our jsonschema library doesn't seem to like the
|
||||
# format attribute, figure out why (and also fix in images.py)
|
||||
#'format': 'date-time',
|
||||
# 'format': 'date-time',
|
||||
},
|
||||
'updated_at': {
|
||||
'type': 'string',
|
||||
'description': _('Date and time of last modification of image member'),
|
||||
#'format': 'date-time',
|
||||
# 'format': 'date-time',
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
import webob.exc
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
|
@ -211,7 +211,7 @@ class ImagesController(object):
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
except exception.InUseByStore as e:
|
||||
msg = (_LI("Image %s could not be deleted "
|
||||
"because it is in use: %s") % (image_id, e.msg))
|
||||
"because it is in use: %s") % (image_id, e.msg)) # noqa
|
||||
LOG.info(msg)
|
||||
raise webob.exc.HTTPConflict(explanation=msg)
|
||||
|
||||
@ -246,8 +246,8 @@ class ImagesController(object):
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=bse.msg)
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(ve))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(ve))
|
||||
|
||||
def _do_add_locations(self, image, path_pos, value):
|
||||
pos = self._get_locations_op_pos(path_pos,
|
||||
@ -262,8 +262,8 @@ class ImagesController(object):
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=bse.msg)
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(ve))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(ve))
|
||||
|
||||
def _do_remove_locations(self, image, path_pos):
|
||||
pos = self._get_locations_op_pos(path_pos,
|
||||
@ -276,8 +276,8 @@ class ImagesController(object):
|
||||
# from the backend store.
|
||||
image.locations.pop(pos)
|
||||
except Exception as e:
|
||||
raise webob.exc.HTTPInternalServerError(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPInternalServerError(
|
||||
explanation=utils.exception_to_str(e))
|
||||
if (len(image.locations) == 0) and (image.status == 'active'):
|
||||
image.status = 'queued'
|
||||
|
||||
@ -312,8 +312,8 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
|
||||
for key in cls._disallowed_properties:
|
||||
if key in image:
|
||||
msg = _("Attribute '%s' is read-only.") % key
|
||||
raise webob.exc.HTTPForbidden(explanation=
|
||||
utils.exception_to_str(msg))
|
||||
raise webob.exc.HTTPForbidden(
|
||||
explanation=utils.exception_to_str(msg))
|
||||
|
||||
def create(self, request):
|
||||
body = self._get_request_body(request)
|
||||
@ -741,15 +741,15 @@ def get_base_properties():
|
||||
'type': 'string',
|
||||
'description': _('Date and time of image registration'
|
||||
' (READ-ONLY)'),
|
||||
#TODO(bcwaldon): our jsonschema library doesn't seem to like the
|
||||
# TODO(bcwaldon): our jsonschema library doesn't seem to like the
|
||||
# format attribute, figure out why!
|
||||
#'format': 'date-time',
|
||||
# 'format': 'date-time',
|
||||
},
|
||||
'updated_at': {
|
||||
'type': 'string',
|
||||
'description': _('Date and time of the last image modification'
|
||||
' (READ-ONLY)'),
|
||||
#'format': 'date-time',
|
||||
# 'format': 'date-time',
|
||||
},
|
||||
'tags': {
|
||||
'type': 'array',
|
||||
|
@ -26,7 +26,7 @@ from glance.openstack.common import jsonutils as json
|
||||
|
||||
class Namespace(types.Base, WSMEModelTransformer):
|
||||
|
||||
#Base fields
|
||||
# Base fields
|
||||
namespace = wsme.wsattr(types.text, mandatory=True)
|
||||
display_name = wsme.wsattr(types.text, mandatory=False)
|
||||
description = wsme.wsattr(types.text, mandatory=False)
|
||||
@ -34,18 +34,18 @@ class Namespace(types.Base, WSMEModelTransformer):
|
||||
protected = wsme.wsattr(bool, mandatory=False)
|
||||
owner = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
#Not using datetime since time format has to be
|
||||
#in glance.openstack.common.timeutils.isotime() format
|
||||
# Not using datetime since time format has to be
|
||||
# in glance.openstack.common.timeutils.isotime() format
|
||||
created_at = wsme.wsattr(types.text, mandatory=False)
|
||||
updated_at = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
#Contained fields
|
||||
# Contained fields
|
||||
resource_type_associations = wsme.wsattr([ResourceTypeAssociation],
|
||||
mandatory=False)
|
||||
properties = wsme.wsattr({types.text: PropertyType}, mandatory=False)
|
||||
objects = wsme.wsattr([MetadefObject], mandatory=False)
|
||||
|
||||
#Generated fields
|
||||
# Generated fields
|
||||
self = wsme.wsattr(types.text, mandatory=False)
|
||||
schema = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
@ -70,7 +70,7 @@ class Namespaces(types.Base, WSMEModelTransformer):
|
||||
|
||||
namespaces = wsme.wsattr([Namespace], mandatory=False)
|
||||
|
||||
#Pagination
|
||||
# Pagination
|
||||
next = wsme.wsattr(types.text, mandatory=False)
|
||||
schema = wsme.wsattr(types.text, mandatory=True)
|
||||
first = wsme.wsattr(types.text, mandatory=True)
|
||||
|
@ -27,12 +27,12 @@ class MetadefObject(types.Base, WSMEModelTransformer):
|
||||
description = wsme.wsattr(types.text, mandatory=False)
|
||||
properties = wsme.wsattr({types.text: PropertyType}, mandatory=False)
|
||||
|
||||
#Not using datetime since time format has to be
|
||||
#in glance.openstack.common.timeutils.isotime() format
|
||||
# Not using datetime since time format has to be
|
||||
# in glance.openstack.common.timeutils.isotime() format
|
||||
created_at = wsme.wsattr(types.text, mandatory=False)
|
||||
updated_at = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
#Generated fields
|
||||
# Generated fields
|
||||
self = wsme.wsattr(types.text, mandatory=False)
|
||||
schema = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
|
@ -21,8 +21,8 @@ from glance.common.wsme_utils import WSMEModelTransformer
|
||||
|
||||
|
||||
class PropertyType(types.Base, WSMEModelTransformer):
|
||||
#When used in collection of PropertyTypes, name is a dictionary key
|
||||
#and not included as separate field.
|
||||
# When used in collection of PropertyTypes, name is a dictionary key
|
||||
# and not included as separate field.
|
||||
name = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
type = wsme.wsattr(types.text, mandatory=True)
|
||||
|
@ -24,8 +24,8 @@ class ResourceTypeAssociation(types.Base, WSMEModelTransformer):
|
||||
prefix = wsme.wsattr(types.text, mandatory=False)
|
||||
properties_target = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
#Not using datetime since time format has to be
|
||||
#in glance.openstack.common.timeutils.isotime() format
|
||||
# Not using datetime since time format has to be
|
||||
# in glance.openstack.common.timeutils.isotime() format
|
||||
created_at = wsme.wsattr(types.text, mandatory=False)
|
||||
updated_at = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
@ -45,8 +45,8 @@ class ResourceTypeAssociations(types.Base, WSMEModelTransformer):
|
||||
class ResourceType(types.Base, WSMEModelTransformer):
|
||||
name = wsme.wsattr(types.text, mandatory=True)
|
||||
|
||||
#Not using datetime since time format has to be
|
||||
#in glance.openstack.common.timeutils.isotime() format
|
||||
# Not using datetime since time format has to be
|
||||
# in glance.openstack.common.timeutils.isotime() format
|
||||
created_at = wsme.wsattr(types.text, mandatory=False)
|
||||
updated_at = wsme.wsattr(types.text, mandatory=False)
|
||||
|
||||
|
@ -33,7 +33,7 @@ class Controller(object):
|
||||
self.task_schema = tasks.get_task_schema()
|
||||
self.task_collection_schema = tasks.get_collection_schema()
|
||||
|
||||
#Metadef schemas
|
||||
# Metadef schemas
|
||||
self.metadef_namespace_schema = metadef_namespaces.get_schema()
|
||||
self.metadef_namespace_collection_schema = \
|
||||
metadef_namespaces.get_collection_schema()
|
||||
|
@ -15,12 +15,12 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import webob.exc
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
@ -228,8 +228,8 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
|
||||
def __init__(self, task_schema=None, partial_task_schema=None):
|
||||
super(ResponseSerializer, self).__init__()
|
||||
self.task_schema = task_schema or get_task_schema()
|
||||
self.partial_task_schema = partial_task_schema \
|
||||
or _get_partial_task_schema()
|
||||
self.partial_task_schema = (partial_task_schema
|
||||
or _get_partial_task_schema())
|
||||
|
||||
def _inject_location_header(self, response, task):
|
||||
location = self._get_task_location(task)
|
||||
|
@ -21,10 +21,11 @@
|
||||
Glance API Server
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
|
||||
from glance.common import utils
|
||||
|
||||
# Monkey patch socket, time, select, threads
|
||||
|
@ -65,8 +65,8 @@ And command is one of:
|
||||
|
||||
{1}
|
||||
|
||||
And CONFPATH is the optional configuration file to use.""".\
|
||||
format(', '.join(ALL_SERVERS), ', '.join(ALL_COMMANDS))
|
||||
And CONFPATH is the optional configuration file to use.""".format(
|
||||
', '.join(ALL_SERVERS), ', '.join(ALL_COMMANDS))
|
||||
|
||||
exitcode = 0
|
||||
|
||||
|
@ -21,10 +21,11 @@
|
||||
Reference implementation server for Glance Registry
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
|
||||
# Monkey patch socket and time
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True, time=True, thread=True)
|
||||
|
||||
|
@ -444,7 +444,7 @@ def replication_load(options, args):
|
||||
updated.append(meta['id'])
|
||||
except ImageAlreadyPresentException:
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE)
|
||||
% image_uuid)
|
||||
% image_uuid) # noqa
|
||||
|
||||
return updated
|
||||
|
||||
@ -515,7 +515,7 @@ def replication_livecopy(options, args):
|
||||
_check_upload_response_headers(headers, body)
|
||||
updated.append(image['id'])
|
||||
except ImageAlreadyPresentException:
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id'])
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id']) # noqa
|
||||
|
||||
return updated
|
||||
|
||||
@ -736,10 +736,10 @@ def main():
|
||||
try:
|
||||
command(options, args)
|
||||
except TypeError as e:
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__})
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
|
||||
sys.exit("ERROR: %s" % e)
|
||||
except ValueError as e:
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__})
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
|
||||
sys.exit("ERROR: %s" % e)
|
||||
|
||||
|
||||
|
@ -29,7 +29,6 @@ Keystone (an identity management system).
|
||||
http://service_endpoint/
|
||||
"""
|
||||
import httplib2
|
||||
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from glance.common import exception
|
||||
|
@ -26,9 +26,9 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
def run_task(task_id, task_type, context,
|
||||
task_repo=None, image_repo=None, image_factory=None):
|
||||
#TODO(nikhil): if task_repo is None get new task repo
|
||||
#TODO(nikhil): if image_repo is None get new image repo
|
||||
#TODO(nikhil): if image_factory is None get new image factory
|
||||
# TODO(nikhil): if task_repo is None get new task repo
|
||||
# TODO(nikhil): if image_repo is None get new image repo
|
||||
# TODO(nikhil): if image_factory is None get new image factory
|
||||
LOG.info(_LI("Loading known task scripts for task_id %(task_id)s "
|
||||
"of type %(task_type)s"), {'task_id': task_id,
|
||||
'task_type': task_type})
|
||||
|
@ -66,14 +66,14 @@ def _execute(t_id, task_repo, image_repo, image_factory):
|
||||
# Note: The message string contains Error in it to indicate
|
||||
# in the task.message that it's a error message for the user.
|
||||
|
||||
#TODO(nikhil): need to bring back save_and_reraise_exception when
|
||||
# TODO(nikhil): need to bring back save_and_reraise_exception when
|
||||
# necessary
|
||||
err_msg = ("Error: " + six.text_type(type(e)) + ': ' +
|
||||
common_utils.exception_to_str(e))
|
||||
log_msg = _LE(err_msg + ("Task ID %s" % task.task_id))
|
||||
log_msg = _LE(err_msg + ("Task ID %s" % task.task_id)) # noqa
|
||||
LOG.exception(log_msg)
|
||||
|
||||
task.fail(_LE(err_msg))
|
||||
task.fail(_LE(err_msg)) # noqa
|
||||
finally:
|
||||
task_repo.save(task)
|
||||
|
||||
|
@ -101,7 +101,7 @@ def validate_location_uri(location):
|
||||
raise StandardError(msg)
|
||||
|
||||
else:
|
||||
#TODO(nikhil): add other supported uris
|
||||
# TODO(nikhil): add other supported uris
|
||||
supported = ['http', ]
|
||||
msg = _("The given uri is not valid. Please specify a "
|
||||
"valid uri from the following list of supported uri "
|
||||
|
@ -67,8 +67,8 @@ class SwiftParams(object):
|
||||
|
||||
def _form_default_params(self):
|
||||
default = {}
|
||||
if CONF.swift_store_user and CONF.swift_store_key \
|
||||
and CONF.swift_store_auth_address:
|
||||
if (CONF.swift_store_user and CONF.swift_store_key
|
||||
and CONF.swift_store_auth_address):
|
||||
default['user'] = CONF.swift_store_user
|
||||
default['key'] = CONF.swift_store_key
|
||||
default['auth_address'] = CONF.swift_store_auth_address
|
||||
|
@ -38,9 +38,8 @@ import uuid
|
||||
import netaddr
|
||||
from OpenSSL import crypto
|
||||
from oslo.config import cfg
|
||||
from webob import exc
|
||||
|
||||
import six
|
||||
from webob import exc
|
||||
|
||||
from glance.common import exception
|
||||
from glance.openstack.common import excutils
|
||||
|
@ -433,7 +433,7 @@ class Debug(Middleware):
|
||||
sys.stdout.write(part)
|
||||
sys.stdout.flush()
|
||||
yield part
|
||||
print
|
||||
print()
|
||||
|
||||
|
||||
class APIMapper(routes.Mapper):
|
||||
|
@ -20,9 +20,11 @@ from glance.openstack.common import local
|
||||
|
||||
|
||||
class RequestContext(object):
|
||||
"""
|
||||
Stores information about the security context under which the user
|
||||
accesses the system, as well as additional request information.
|
||||
"""Stores information about the security context.
|
||||
|
||||
Stores how the user accesses the system, as well as additional request
|
||||
information.
|
||||
|
||||
"""
|
||||
|
||||
user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}'
|
||||
@ -47,8 +49,7 @@ class RequestContext(object):
|
||||
self.user_domain = user_domain
|
||||
self.project_domain = project_domain
|
||||
if not self.is_admin:
|
||||
self.is_admin = \
|
||||
self.policy_enforcer.check_is_admin(self)
|
||||
self.is_admin = self.policy_enforcer.check_is_admin(self)
|
||||
|
||||
if not hasattr(local.store, 'context'):
|
||||
self.update_store()
|
||||
@ -67,11 +68,11 @@ class RequestContext(object):
|
||||
return {
|
||||
'request_id': self.request_id,
|
||||
|
||||
#NOTE(bcwaldon): openstack-common logging expects 'user'
|
||||
# NOTE(bcwaldon): openstack-common logging expects 'user'
|
||||
'user': self.user,
|
||||
'user_id': self.user,
|
||||
|
||||
#NOTE(bcwaldon): openstack-common logging expects 'tenant'
|
||||
# NOTE(bcwaldon): openstack-common logging expects 'tenant'
|
||||
'tenant': self.tenant,
|
||||
'tenant_id': self.tenant,
|
||||
'project_id': self.tenant,
|
||||
|
@ -209,7 +209,7 @@ def _image_format(image_id, **values):
|
||||
image['locations'].append(location_ref)
|
||||
DATA['locations'].append(location_ref)
|
||||
|
||||
#NOTE(bcwaldon): store properties as a list to match sqlalchemy driver
|
||||
# NOTE(bcwaldon): store properties as a list to match sqlalchemy driver
|
||||
properties = values.pop('properties', {})
|
||||
properties = [{'name': k,
|
||||
'value': v,
|
||||
|
@ -34,11 +34,6 @@ import sqlalchemy.orm as sa_orm
|
||||
import sqlalchemy.sql as sa_sql
|
||||
|
||||
from glance.common import exception
|
||||
from glance.db.sqlalchemy import models
|
||||
from glance import i18n
|
||||
import glance.openstack.common.log as os_logging
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.db.sqlalchemy.metadef_api import namespace as metadef_namespace_api
|
||||
from glance.db.sqlalchemy.metadef_api import object as metadef_object_api
|
||||
from glance.db.sqlalchemy.metadef_api import property as metadef_property_api
|
||||
@ -46,6 +41,10 @@ from glance.db.sqlalchemy.metadef_api\
|
||||
import resource_type as metadef_resource_type_api
|
||||
from glance.db.sqlalchemy.metadef_api\
|
||||
import resource_type_association as metadef_association_api
|
||||
from glance.db.sqlalchemy import models
|
||||
from glance import i18n
|
||||
import glance.openstack.common.log as os_logging
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
BASE = models.BASE
|
||||
sa_logger = None
|
||||
@ -201,8 +200,8 @@ def _check_image_id(image_id):
|
||||
:param image_id: The id of the image we want to check
|
||||
:return: Raise NoFound exception if given image id is invalid
|
||||
"""
|
||||
if image_id and \
|
||||
len(image_id) > models.Image.id.property.columns[0].type.length:
|
||||
if (image_id and
|
||||
len(image_id) > models.Image.id.property.columns[0].type.length):
|
||||
raise exception.NotFound()
|
||||
|
||||
|
||||
@ -212,10 +211,10 @@ def _image_get(context, image_id, session=None, force_show_deleted=False):
|
||||
session = session or get_session()
|
||||
|
||||
try:
|
||||
query = session.query(models.Image)\
|
||||
.options(sa_orm.joinedload(models.Image.properties))\
|
||||
.options(sa_orm.joinedload(models.Image.locations))\
|
||||
.filter_by(id=image_id)
|
||||
query = session.query(models.Image).options(
|
||||
sa_orm.joinedload(models.Image.properties)).options(
|
||||
sa_orm.joinedload(
|
||||
models.Image.locations)).filter_by(id=image_id)
|
||||
|
||||
# filter out deleted images if context disallows it
|
||||
if not force_show_deleted and not _can_show_deleted(context):
|
||||
@ -396,7 +395,8 @@ def _paginate_query(query, model, limit, sort_keys, marker=None,
|
||||
|
||||
|
||||
def _make_conditions_from_filters(filters, is_public=None):
|
||||
#NOTE(venkatesh) make copy of the filters are to be altered in this method.
|
||||
# NOTE(venkatesh) make copy of the filters are to be altered in this
|
||||
# method.
|
||||
filters = filters.copy()
|
||||
|
||||
image_conditions = []
|
||||
@ -485,9 +485,8 @@ def _select_images_query(context, image_conditions, admin_as_user,
|
||||
|
||||
regular_user = (not context.is_admin) or admin_as_user
|
||||
|
||||
query_member = session.query(models.Image) \
|
||||
.join(models.Image.members) \
|
||||
.filter(img_conditional_clause)
|
||||
query_member = session.query(models.Image).join(
|
||||
models.Image.members).filter(img_conditional_clause)
|
||||
if regular_user:
|
||||
member_filters = [models.ImageMember.deleted == False]
|
||||
if context.owner is not None:
|
||||
@ -497,27 +496,26 @@ def _select_images_query(context, image_conditions, admin_as_user,
|
||||
models.ImageMember.status == member_status])
|
||||
query_member = query_member.filter(sa_sql.and_(*member_filters))
|
||||
|
||||
#NOTE(venkatesh) if the 'visibility' is set to 'shared', we just
|
||||
# NOTE(venkatesh) if the 'visibility' is set to 'shared', we just
|
||||
# query the image members table. No union is required.
|
||||
if visibility is not None and visibility == 'shared':
|
||||
return query_member
|
||||
|
||||
query_image = session.query(models.Image)\
|
||||
.filter(img_conditional_clause)
|
||||
query_image = session.query(models.Image).filter(img_conditional_clause)
|
||||
if regular_user:
|
||||
query_image = query_image.filter(models.Image.is_public == True)
|
||||
query_image_owner = None
|
||||
if context.owner is not None:
|
||||
query_image_owner = session.query(models.Image) \
|
||||
.filter(models.Image.owner == context.owner) \
|
||||
.filter(img_conditional_clause)
|
||||
query_image_owner = session.query(models.Image).filter(
|
||||
models.Image.owner == context.owner).filter(
|
||||
img_conditional_clause)
|
||||
if query_image_owner is not None:
|
||||
query = query_image.union(query_image_owner, query_member)
|
||||
else:
|
||||
query = query_image.union(query_member)
|
||||
return query
|
||||
else:
|
||||
#Admin user
|
||||
# Admin user
|
||||
return query_image
|
||||
|
||||
|
||||
@ -552,11 +550,11 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
showing_deleted = 'changes-since' in filters or filters.get('deleted',
|
||||
False)
|
||||
|
||||
img_conditions, prop_conditions, tag_conditions = \
|
||||
_make_conditions_from_filters(filters, is_public)
|
||||
img_cond, prop_cond, tag_cond = _make_conditions_from_filters(
|
||||
filters, is_public)
|
||||
|
||||
query = _select_images_query(context,
|
||||
img_conditions,
|
||||
img_cond,
|
||||
admin_as_user,
|
||||
member_status,
|
||||
visibility)
|
||||
@ -567,15 +565,15 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
elif visibility == 'private':
|
||||
query = query.filter(models.Image.is_public == False)
|
||||
|
||||
if prop_conditions:
|
||||
for prop_condition in prop_conditions:
|
||||
query = query.join(models.ImageProperty, aliased=True)\
|
||||
.filter(sa_sql.and_(*prop_condition))
|
||||
if prop_cond:
|
||||
for prop_condition in prop_cond:
|
||||
query = query.join(models.ImageProperty, aliased=True).filter(
|
||||
sa_sql.and_(*prop_condition))
|
||||
|
||||
if tag_conditions:
|
||||
for tag_condition in tag_conditions:
|
||||
query = query.join(models.ImageTag, aliased=True)\
|
||||
.filter(sa_sql.and_(*tag_condition))
|
||||
if tag_cond:
|
||||
for tag_condition in tag_cond:
|
||||
query = query.join(models.ImageTag, aliased=True).filter(
|
||||
sa_sql.and_(*tag_condition))
|
||||
|
||||
marker_image = None
|
||||
if marker is not None:
|
||||
@ -591,8 +589,9 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
marker=marker_image,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
query = query.options(sa_orm.joinedload(models.Image.properties))\
|
||||
.options(sa_orm.joinedload(models.Image.locations))
|
||||
query = query.options(sa_orm.joinedload(
|
||||
models.Image.properties)).options(
|
||||
sa_orm.joinedload(models.Image.locations))
|
||||
if return_tag:
|
||||
query = query.options(sa_orm.joinedload(models.Image.tags))
|
||||
|
||||
@ -671,7 +670,7 @@ def _image_update(context, values, image_id, purge_props=False,
|
||||
:param image_id: If None, create the image, otherwise, find and update it
|
||||
"""
|
||||
|
||||
#NOTE(jbresnah) values is altered in this so a copy is needed
|
||||
# NOTE(jbresnah) values is altered in this so a copy is needed
|
||||
values = values.copy()
|
||||
|
||||
session = get_session()
|
||||
@ -714,7 +713,7 @@ def _image_update(context, values, image_id, purge_props=False,
|
||||
if image_id:
|
||||
# Don't drop created_at if we're passing it in...
|
||||
_drop_protected_attrs(models.Image, values)
|
||||
#NOTE(iccha-sethi): updated_at must be explicitly set in case
|
||||
# NOTE(iccha-sethi): updated_at must be explicitly set in case
|
||||
# only ImageProperty table was modifited
|
||||
values['updated_at'] = timeutils.utcnow()
|
||||
|
||||
@ -792,10 +791,8 @@ def image_location_update(context, image_id, location, session=None):
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=loc_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
location_ref = session.query(models.ImageLocation).filter_by(
|
||||
id=loc_id).filter_by(image_id=image_id).one()
|
||||
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
updated_time = timeutils.utcnow()
|
||||
@ -824,10 +821,8 @@ def image_location_delete(context, image_id, location_id, status,
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=location_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
location_ref = session.query(models.ImageLocation).filter_by(
|
||||
id=location_id).filter_by(image_id=image_id).one()
|
||||
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
|
||||
@ -846,12 +841,12 @@ def image_location_delete(context, image_id, location_id, status,
|
||||
def _image_locations_set(context, image_id, locations, session=None):
|
||||
# NOTE(zhiyan): 1. Remove records from DB for deleted locations
|
||||
session = session or get_session()
|
||||
query = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.filter(~models.ImageLocation.id.in_([loc['id']
|
||||
for loc in locations
|
||||
if loc.get('id')]))
|
||||
query = session.query(models.ImageLocation).filter_by(
|
||||
image_id=image_id).filter_by(
|
||||
deleted=False).filter(~models.ImageLocation.id.in_(
|
||||
[loc['id']
|
||||
for loc in locations
|
||||
if loc.get('id')]))
|
||||
for loc_id in [loc_ref.id for loc_ref in query.all()]:
|
||||
image_location_delete(context, image_id, loc_id, 'deleted',
|
||||
session=session)
|
||||
@ -868,10 +863,8 @@ def _image_locations_delete_all(context, image_id,
|
||||
delete_time=None, session=None):
|
||||
"""Delete all image locations for given image"""
|
||||
session = session or get_session()
|
||||
location_refs = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.all()
|
||||
location_refs = session.query(models.ImageLocation).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False).all()
|
||||
|
||||
for loc_id in [loc_ref.id for loc_ref in location_refs]:
|
||||
image_location_delete(context, image_id, loc_id, 'deleted',
|
||||
@ -933,9 +926,8 @@ def _image_child_entry_delete_all(child_model_cls, image_id, delete_time=None,
|
||||
"""
|
||||
session = session or get_session()
|
||||
|
||||
query = session.query(child_model_cls) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False)
|
||||
query = session.query(child_model_cls).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False)
|
||||
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
|
||||
@ -1114,7 +1106,7 @@ def _can_show_deleted(context):
|
||||
|
||||
|
||||
def image_tag_set_all(context, image_id, tags):
|
||||
#NOTE(kragniz): tag ordering should match exactly what was provided, so a
|
||||
# NOTE(kragniz): tag ordering should match exactly what was provided, so a
|
||||
# subsequent call to image_tag_get_all returns them in the correct order
|
||||
|
||||
session = get_session()
|
||||
@ -1143,10 +1135,9 @@ def image_tag_delete(context, image_id, value, session=None):
|
||||
"""Delete an image tag."""
|
||||
_check_image_id(image_id)
|
||||
session = session or get_session()
|
||||
query = session.query(models.ImageTag)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.filter_by(value=value)\
|
||||
.filter_by(deleted=False)
|
||||
query = session.query(models.ImageTag).filter_by(
|
||||
image_id=image_id).filter_by(
|
||||
value=value).filter_by(deleted=False)
|
||||
try:
|
||||
tag_ref = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
@ -1168,10 +1159,8 @@ def image_tag_get_all(context, image_id, session=None):
|
||||
"""Get a list of tags for a specific image."""
|
||||
_check_image_id(image_id)
|
||||
session = session or get_session()
|
||||
tags = session.query(models.ImageTag.value)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.filter_by(deleted=False)\
|
||||
.all()
|
||||
tags = session.query(models.ImageTag.value).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False).all()
|
||||
return [tag[0] for tag in tags]
|
||||
|
||||
|
||||
@ -1319,8 +1308,8 @@ def task_get_all(context, filters=None, marker=None, limit=None,
|
||||
session = get_session()
|
||||
query = session.query(models.Task)
|
||||
|
||||
if not (context.is_admin or admin_as_user == True) and \
|
||||
context.owner is not None:
|
||||
if (not (context.is_admin or admin_as_user == True)
|
||||
and context.owner is not None):
|
||||
query = query.filter(models.Task.owner == context.owner)
|
||||
|
||||
showing_deleted = False
|
||||
|
@ -287,8 +287,9 @@ def _export_data_to_file(meta, path):
|
||||
json_file.write(json.dumps(values))
|
||||
except Exception as e:
|
||||
LOG.exception(utils.exception_to_str(e))
|
||||
LOG.info(_LI("Namespace %s saved in %s"),
|
||||
namespace_file_name, file_name)
|
||||
msg = _LI("Namespace %(namespace_file_name)s saved in %(file_name)s")
|
||||
LOG.info(msg % {'namespace_file_name': namespace_file_name,
|
||||
'file_name': file_name})
|
||||
|
||||
|
||||
def db_load_metadefs(engine, metadata_path=None):
|
||||
|
@ -86,8 +86,8 @@ def _get(context, namespace_id, session):
|
||||
.filter_by(id=namespace_id)
|
||||
namespace_rec = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition namespace not found for id=%s",
|
||||
namespace_id))
|
||||
msg = _LW("Metadata definition namespace not found for id=%s")
|
||||
LOG.warn(msg % namespace_id)
|
||||
raise exc.MetadefRecordNotFound(record_type='namespace',
|
||||
id=namespace_id)
|
||||
|
||||
|
@ -34,8 +34,8 @@ def _get(context, object_id, session):
|
||||
.filter_by(id=object_id)
|
||||
metadef_object = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition object not found for id %s",
|
||||
object_id))
|
||||
msg = _LW("Metadata definition object not found for id %s")
|
||||
LOG.warn(msg % object_id)
|
||||
raise exc.MetadefRecordNotFound(record_type='object', id=object_id)
|
||||
|
||||
return metadef_object
|
||||
|
@ -36,8 +36,8 @@ def _get(context, property_id, session):
|
||||
property_rec = query.one()
|
||||
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition property not found for id=%s",
|
||||
property_id))
|
||||
msg = _LW("Metadata definition property not found for id=%s")
|
||||
LOG.warn(msg % property_id)
|
||||
raise exc.MetadefRecordNotFound(
|
||||
record_type='property', id=property_id)
|
||||
|
||||
|
@ -1 +0,0 @@
|
||||
# template repository default module
|
@ -237,7 +237,7 @@ def _upgrade_db2(t_images, t_image_members, t_image_properties):
|
||||
|
||||
|
||||
def _add_db2_constraints():
|
||||
#Create the foreign keys
|
||||
# Create the foreign keys
|
||||
sql_commands = [
|
||||
"""ALTER TABLE image_members ADD CONSTRAINT member_image_id
|
||||
FOREIGN KEY (image_id)
|
||||
@ -251,7 +251,7 @@ def _add_db2_constraints():
|
||||
|
||||
|
||||
def _remove_db2_constraints():
|
||||
#remove the foreign keys constraints
|
||||
# Remove the foreign keys constraints
|
||||
sql_commands = [
|
||||
"""ALTER TABLE image_members DROP CONSTRAINT member_image_id;""",
|
||||
"""ALTER TABLE image_properties DROP CONSTRAINT property_image_id;"""
|
||||
@ -457,8 +457,8 @@ def _downgrade_other(t_images, t_image_members, t_image_properties, dialect):
|
||||
_update_all_uuids_to_ids(t_images, t_image_members, t_image_properties)
|
||||
|
||||
t_images.c.id.alter(primary_key=True)
|
||||
#we have to use raw sql for postgresql as we have errors
|
||||
#if we use alter type on sqlalchemy
|
||||
# we have to use raw sql for postgresql as we have errors
|
||||
# if we use alter type on sqlalchemy
|
||||
if dialect == 'postgresql':
|
||||
t_images.bind.execute('''ALTER TABLE images
|
||||
ALTER COLUMN id TYPE INTEGER
|
||||
@ -536,23 +536,22 @@ def _update_all_ids_to_uuids(t_images, t_image_members, t_image_properties):
|
||||
old_id = image["id"]
|
||||
new_id = str(uuid.uuid4())
|
||||
|
||||
t_images.update().\
|
||||
where(t_images.c.id == old_id).\
|
||||
values(id=new_id).execute()
|
||||
t_images.update().where(
|
||||
t_images.c.id == old_id).values(id=new_id).execute()
|
||||
|
||||
t_image_members.update().\
|
||||
where(t_image_members.c.image_id == old_id).\
|
||||
values(image_id=new_id).execute()
|
||||
t_image_members.update().where(
|
||||
t_image_members.c.image_id == old_id).values(
|
||||
image_id=new_id).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(t_image_properties.c.image_id == old_id).\
|
||||
values(image_id=new_id).execute()
|
||||
t_image_properties.update().where(
|
||||
t_image_properties.c.image_id == old_id).values(
|
||||
image_id=new_id).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).\
|
||||
values(value=new_id).execute()
|
||||
t_image_properties.update().where(
|
||||
and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).values(
|
||||
value=new_id).execute()
|
||||
|
||||
|
||||
def _update_all_uuids_to_ids(t_images, t_image_members, t_image_properties):
|
||||
@ -563,22 +562,22 @@ def _update_all_uuids_to_ids(t_images, t_image_members, t_image_properties):
|
||||
for image in images:
|
||||
old_id = image["id"]
|
||||
|
||||
t_images.update().\
|
||||
where(t_images.c.id == old_id).\
|
||||
values(id=str(new_id)).execute()
|
||||
t_images.update().where(
|
||||
t_images.c.id == old_id).values(
|
||||
id=str(new_id)).execute()
|
||||
|
||||
t_image_members.update().\
|
||||
where(t_image_members.c.image_id == old_id).\
|
||||
values(image_id=str(new_id)).execute()
|
||||
t_image_members.update().where(
|
||||
t_image_members.c.image_id == old_id).values(
|
||||
image_id=str(new_id)).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(t_image_properties.c.image_id == old_id).\
|
||||
values(image_id=str(new_id)).execute()
|
||||
t_image_properties.update().where(
|
||||
t_image_properties.c.image_id == old_id).values(
|
||||
image_id=str(new_id)).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).\
|
||||
values(value=str(new_id)).execute()
|
||||
t_image_properties.update().where(
|
||||
and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).values(
|
||||
value=str(new_id)).execute()
|
||||
|
||||
new_id += 1
|
||||
|
@ -54,9 +54,9 @@ def migrate_location_credentials(migrate_engine, to_quoted):
|
||||
for image in images:
|
||||
try:
|
||||
fixed_uri = legacy_parse_uri(image['location'], to_quoted)
|
||||
images_table.update()\
|
||||
.where(images_table.c.id == image['id'])\
|
||||
.values(location=fixed_uri).execute()
|
||||
images_table.update().where(
|
||||
images_table.c.id == image['id']).values(
|
||||
location=fixed_uri).execute()
|
||||
except exception.BadStoreUri as e:
|
||||
reason = utils.exception_to_str(e)
|
||||
msg = _LE("Invalid store uri for image: %(image_id)s. "
|
||||
|
@ -28,7 +28,7 @@ Fixes bug #1081043
|
||||
"""
|
||||
import types # noqa
|
||||
|
||||
#NOTE(flaper87): This is bad but there ain't better way to do it.
|
||||
# NOTE(flaper87): This is bad but there ain't better way to do it.
|
||||
from glance_store._drivers import swift # noqa
|
||||
from oslo.config import cfg
|
||||
import six.moves.urllib.parse as urlparse
|
||||
@ -84,9 +84,9 @@ def migrate_location_credentials(migrate_engine, to_quoted):
|
||||
for image in images:
|
||||
try:
|
||||
fixed_uri = fix_uri_credentials(image['location'], to_quoted)
|
||||
images_table.update()\
|
||||
.where(images_table.c.id == image['id'])\
|
||||
.values(location=fixed_uri).execute()
|
||||
images_table.update().where(
|
||||
images_table.c.id == image['id']).values(
|
||||
location=fixed_uri).execute()
|
||||
except exception.Invalid:
|
||||
msg = _LW("Failed to decrypt location value for image"
|
||||
" %(image_id)s") % {'image_id': image['id']}
|
||||
@ -125,7 +125,7 @@ def fix_uri_credentials(uri, to_quoted):
|
||||
return
|
||||
try:
|
||||
decrypted_uri = decrypt_location(uri)
|
||||
#NOTE (ameade): If a uri is not encrypted or incorrectly encoded then we
|
||||
# NOTE (ameade): If a uri is not encrypted or incorrectly encoded then we
|
||||
# we raise an exception.
|
||||
except (TypeError, ValueError) as e:
|
||||
raise exception.Invalid(str(e))
|
||||
|
@ -21,7 +21,7 @@ from glance.db.sqlalchemy.migrate_repo import schema
|
||||
def upgrade(migrate_engine):
|
||||
meta = sqlalchemy.schema.MetaData(migrate_engine)
|
||||
|
||||
#NOTE(bcwaldon): load the images table for the ForeignKey below
|
||||
# NOTE(bcwaldon): load the images table for the ForeignKey below
|
||||
sqlalchemy.Table('images', meta, autoload=True)
|
||||
|
||||
image_locations_table = sqlalchemy.Table(
|
||||
|
@ -53,6 +53,6 @@ def downgrade(migrate_engine):
|
||||
image_records = image_locations_table.select().execute().fetchall()
|
||||
|
||||
for image_location in image_records:
|
||||
images_table.update(values={'location': image_location.value})\
|
||||
.where(images_table.c.id == image_location.image_id)\
|
||||
.execute()
|
||||
images_table.update(
|
||||
values={'location': image_location.value}).where(
|
||||
images_table.c.id == image_location.image_id).execute()
|
||||
|
@ -29,8 +29,8 @@ def upgrade(migrate_engine):
|
||||
new_meta_data.create(image_locations)
|
||||
|
||||
noe = pickle.dumps({})
|
||||
s = sqlalchemy.sql.select([image_locations]).\
|
||||
where(image_locations.c.meta_data != noe)
|
||||
s = sqlalchemy.sql.select([image_locations]).where(
|
||||
image_locations.c.meta_data != noe)
|
||||
conn = migrate_engine.connect()
|
||||
res = conn.execute(s)
|
||||
|
||||
@ -38,9 +38,8 @@ def upgrade(migrate_engine):
|
||||
meta_data = row['meta_data']
|
||||
x = pickle.loads(meta_data)
|
||||
if x != {}:
|
||||
stmt = image_locations.update().\
|
||||
where(image_locations.c.id == row['id']).\
|
||||
values(storage_meta_data=x)
|
||||
stmt = image_locations.update().where(
|
||||
image_locations.c.id == row['id']).values(storage_meta_data=x)
|
||||
conn.execute(stmt)
|
||||
conn.close()
|
||||
image_locations.columns['meta_data'].drop()
|
||||
@ -55,8 +54,8 @@ def downgrade(migrate_engine):
|
||||
old_meta_data.create(image_locations)
|
||||
|
||||
noj = json.dumps({})
|
||||
s = sqlalchemy.sql.select([image_locations]).\
|
||||
where(image_locations.c.meta_data != noj)
|
||||
s = sqlalchemy.sql.select([image_locations]).where(
|
||||
image_locations.c.meta_data != noj)
|
||||
conn = migrate_engine.connect()
|
||||
res = conn.execute(s)
|
||||
|
||||
@ -64,9 +63,9 @@ def downgrade(migrate_engine):
|
||||
x = row['meta_data']
|
||||
meta_data = json.loads(x)
|
||||
if meta_data != {}:
|
||||
stmt = image_locations.update().\
|
||||
where(image_locations.c.id == row['id']).\
|
||||
values(old_meta_data=meta_data)
|
||||
stmt = image_locations.update().where(
|
||||
image_locations.c.id == row['id']).values(
|
||||
old_meta_data=meta_data)
|
||||
conn.execute(stmt)
|
||||
conn.close()
|
||||
image_locations.columns['meta_data'].drop()
|
||||
|
@ -25,7 +25,7 @@ TASKS_MIGRATE_COLUMNS = ['input', 'message', 'result']
|
||||
|
||||
def define_task_info_table(meta):
|
||||
Table('tasks', meta, autoload=True)
|
||||
#NOTE(nikhil): input and result are stored as text in the DB.
|
||||
# NOTE(nikhil): input and result are stored as text in the DB.
|
||||
# SQLAlchemy marshals the data to/from JSON using custom type
|
||||
# JSONEncodedDict. It uses simplejson underneath.
|
||||
task_info = Table('task_info',
|
||||
@ -86,9 +86,7 @@ def downgrade(migrate_engine):
|
||||
'message': task_info.message
|
||||
}
|
||||
|
||||
tasks_table\
|
||||
.update(values=values)\
|
||||
.where(tasks_table.c.id == task_info.task_id)\
|
||||
.execute()
|
||||
tasks_table.update(values=values).where(
|
||||
tasks_table.c.id == task_info.task_id).execute()
|
||||
|
||||
drop_tables([task_info_table])
|
||||
|
@ -35,11 +35,10 @@ def upgrade(migrate_engine):
|
||||
mapping = {'active': 'active', 'pending_delete': 'pending_delete',
|
||||
'deleted': 'deleted', 'killed': 'deleted'}
|
||||
for src, dst in six.iteritems(mapping):
|
||||
subq = sqlalchemy.sql.select([images_table.c.id])\
|
||||
.where(images_table.c.status == src)
|
||||
image_locations_table.update(values={'status': dst})\
|
||||
.where(image_locations_table.c.image_id.in_(subq))\
|
||||
.execute()
|
||||
subq = sqlalchemy.sql.select([images_table.c.id]).where(
|
||||
images_table.c.status == src)
|
||||
image_locations_table.update(values={'status': dst}).where(
|
||||
image_locations_table.c.image_id.in_(subq)).execute()
|
||||
|
||||
|
||||
def downgrade(migrate_engine):
|
||||
|
@ -1 +0,0 @@
|
||||
# template repository default versions module
|
@ -238,7 +238,7 @@ class TaskInfo(BASE, models.ModelBase):
|
||||
|
||||
task = relationship(Task, backref=backref('info', uselist=False))
|
||||
|
||||
#NOTE(nikhil): input and result are stored as text in the DB.
|
||||
# NOTE(nikhil): input and result are stored as text in the DB.
|
||||
# SQLAlchemy marshals the data to/from JSON using custom type
|
||||
# JSONEncodedDict. It uses simplejson underneath.
|
||||
input = Column(JSONEncodedDict())
|
||||
|
@ -221,7 +221,7 @@ class TaskFactory(object):
|
||||
return self.task_helper.proxy(t)
|
||||
|
||||
|
||||
#Metadef Namespace classes
|
||||
# Metadef Namespace classes
|
||||
class MetadefNamespaceRepo(object):
|
||||
def __init__(self, base,
|
||||
namespace_proxy_class=None, namespace_proxy_kwargs=None):
|
||||
@ -294,7 +294,7 @@ class MetadefNamespaceFactory(object):
|
||||
return self.meta_namespace_helper.proxy(t)
|
||||
|
||||
|
||||
#Metadef object classes
|
||||
# Metadef object classes
|
||||
class MetadefObjectRepo(object):
|
||||
def __init__(self, base,
|
||||
object_proxy_class=None, object_proxy_kwargs=None):
|
||||
@ -355,7 +355,7 @@ class MetadefObjectFactory(object):
|
||||
return self.meta_object_helper.proxy(t)
|
||||
|
||||
|
||||
#Metadef ResourceType classes
|
||||
# Metadef ResourceType classes
|
||||
class MetadefResourceTypeRepo(object):
|
||||
def __init__(self, base, resource_type_proxy_class=None,
|
||||
resource_type_proxy_kwargs=None):
|
||||
@ -406,7 +406,7 @@ class MetadefResourceTypeFactory(object):
|
||||
return self.resource_type_helper.proxy(t)
|
||||
|
||||
|
||||
#Metadef namespace property classes
|
||||
# Metadef namespace property classes
|
||||
class MetadefPropertyRepo(object):
|
||||
def __init__(self, base,
|
||||
property_proxy_class=None, property_proxy_kwargs=None):
|
||||
|
@ -13,6 +13,7 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import glance_store
|
||||
|
||||
from glance.api import authorization
|
||||
from glance.api import policy
|
||||
@ -24,7 +25,6 @@ import glance.domain
|
||||
import glance.location
|
||||
import glance.notifier
|
||||
import glance.quota
|
||||
import glance_store
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
@ -48,11 +48,10 @@ class Gateway(object):
|
||||
policy_image_factory, context, self.notifier)
|
||||
if property_utils.is_property_protection_enabled():
|
||||
property_rules = property_utils.PropertyRules(self.policy)
|
||||
protected_image_factory = property_protections.\
|
||||
ProtectedImageFactoryProxy(notifier_image_factory, context,
|
||||
property_rules)
|
||||
pif = property_protections.ProtectedImageFactoryProxy(
|
||||
notifier_image_factory, context, property_rules)
|
||||
authorized_image_factory = authorization.ImageFactoryProxy(
|
||||
protected_image_factory, context)
|
||||
pif, context)
|
||||
else:
|
||||
authorized_image_factory = authorization.ImageFactoryProxy(
|
||||
notifier_image_factory, context)
|
||||
@ -80,11 +79,10 @@ class Gateway(object):
|
||||
policy_image_repo, context, self.notifier)
|
||||
if property_utils.is_property_protection_enabled():
|
||||
property_rules = property_utils.PropertyRules(self.policy)
|
||||
protected_image_repo = property_protections.\
|
||||
ProtectedImageRepoProxy(notifier_image_repo, context,
|
||||
property_rules)
|
||||
pir = property_protections.ProtectedImageRepoProxy(
|
||||
notifier_image_repo, context, property_rules)
|
||||
authorized_image_repo = authorization.ImageRepoProxy(
|
||||
protected_image_repo, context)
|
||||
pir, context)
|
||||
else:
|
||||
authorized_image_repo = authorization.ImageRepoProxy(
|
||||
notifier_image_repo, context)
|
||||
@ -165,20 +163,16 @@ class Gateway(object):
|
||||
resource_type_factory = glance.domain.MetadefResourceTypeFactory()
|
||||
policy_resource_type_factory = policy.MetadefResourceTypeFactoryProxy(
|
||||
resource_type_factory, context, self.policy)
|
||||
authorized_resource_type_factory = \
|
||||
authorization.MetadefResourceTypeFactoryProxy(
|
||||
policy_resource_type_factory, context)
|
||||
return authorized_resource_type_factory
|
||||
return authorization.MetadefResourceTypeFactoryProxy(
|
||||
policy_resource_type_factory, context)
|
||||
|
||||
def get_metadef_resource_type_repo(self, context):
|
||||
resource_type_repo = glance.db.MetadefResourceTypeRepo(
|
||||
context, self.db_api)
|
||||
policy_object_repo = policy.MetadefResourceTypeRepoProxy(
|
||||
resource_type_repo, context, self.policy)
|
||||
authorized_resource_type_repo = \
|
||||
authorization.MetadefResourceTypeRepoProxy(policy_object_repo,
|
||||
context)
|
||||
return authorized_resource_type_repo
|
||||
return authorization.MetadefResourceTypeRepoProxy(policy_object_repo,
|
||||
context)
|
||||
|
||||
def get_metadef_property_factory(self, context):
|
||||
prop_factory = glance.domain.MetadefPropertyFactory()
|
||||
|
@ -20,13 +20,13 @@ Cache driver that uses SQLite to store information about cached images
|
||||
from __future__ import absolute_import
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
import sqlite3
|
||||
import stat
|
||||
import time
|
||||
|
||||
from eventlet import sleep
|
||||
from eventlet import timeout
|
||||
from oslo.config import cfg
|
||||
import sqlite3
|
||||
|
||||
from glance.common import exception
|
||||
from glance.image_cache.drivers import base
|
||||
@ -473,7 +473,7 @@ class Driver(base.Driver):
|
||||
items.append((mtime, os.path.basename(path)))
|
||||
|
||||
items.sort()
|
||||
return [image_id for (mtime, image_id) in items]
|
||||
return [image_id for (modtime, image_id) in items]
|
||||
|
||||
def get_cache_files(self, basepath):
|
||||
"""
|
||||
|
@ -277,7 +277,7 @@ class Driver(base.Driver):
|
||||
LOG.debug("Fetch finished, moving "
|
||||
"'%(incomplete_path)s' to '%(final_path)s'",
|
||||
dict(incomplete_path=incomplete_path,
|
||||
final_path=final_path))
|
||||
final_path=final_path))
|
||||
os.rename(incomplete_path, final_path)
|
||||
|
||||
# Make sure that we "pop" the image from the queue...
|
||||
@ -376,7 +376,7 @@ class Driver(base.Driver):
|
||||
items.append((mtime, os.path.basename(path)))
|
||||
|
||||
items.sort()
|
||||
return [image_id for (mtime, image_id) in items]
|
||||
return [image_id for (modtime, image_id) in items]
|
||||
|
||||
def _reap_old_files(self, dirpath, entry_type, grace=None):
|
||||
now = time.time()
|
||||
|
@ -16,9 +16,7 @@
|
||||
"""
|
||||
Prefetches images into the Image Cache
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
|
||||
import glance_store
|
||||
|
||||
from glance.common import exception
|
||||
|
@ -211,8 +211,8 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
{'image_id': self.image.image_id,
|
||||
'error': utils.exception_to_str(e)})
|
||||
self.notifier.error('image.upload', msg)
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
except exception.Duplicate as e:
|
||||
msg = (_("Unable to upload duplicate image data for image"
|
||||
"%(image_id)s: %(error)s") %
|
||||
@ -260,10 +260,9 @@ class TaskRepoProxy(glance.domain.proxy.TaskRepo):
|
||||
self.context = context
|
||||
self.notifier = notifier
|
||||
proxy_kwargs = {'context': self.context, 'notifier': self.notifier}
|
||||
super(TaskRepoProxy, self) \
|
||||
.__init__(task_repo,
|
||||
task_proxy_class=TaskProxy,
|
||||
task_proxy_kwargs=proxy_kwargs)
|
||||
super(TaskRepoProxy, self).__init__(task_repo,
|
||||
task_proxy_class=TaskProxy,
|
||||
task_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
def add(self, task):
|
||||
self.notifier.info('task.create',
|
||||
@ -285,10 +284,10 @@ class TaskStubRepoProxy(glance.domain.proxy.TaskStubRepo):
|
||||
self.context = context
|
||||
self.notifier = notifier
|
||||
proxy_kwargs = {'context': self.context, 'notifier': self.notifier}
|
||||
super(TaskStubRepoProxy, self) \
|
||||
.__init__(task_stub_repo,
|
||||
task_stub_proxy_class=TaskStubProxy,
|
||||
task_stub_proxy_kwargs=proxy_kwargs)
|
||||
super(TaskStubRepoProxy, self).__init__(
|
||||
task_stub_repo,
|
||||
task_stub_proxy_class=TaskStubProxy,
|
||||
task_stub_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
|
||||
class TaskFactoryProxy(glance.domain.proxy.TaskFactory):
|
||||
|
@ -23,12 +23,6 @@ __all__ = [
|
||||
import copy
|
||||
import itertools
|
||||
|
||||
from glance.openstack.common import gettextutils
|
||||
|
||||
# TODO(zhiyan): Remove translation from in-line
|
||||
# help message of option definition code.
|
||||
gettextutils.install('glance', lazy=False)
|
||||
|
||||
import glance.api.middleware.context
|
||||
import glance.api.policy
|
||||
import glance.common.config
|
||||
@ -40,6 +34,7 @@ import glance.common.wsgi
|
||||
import glance.image_cache
|
||||
import glance.image_cache.drivers.sqlite
|
||||
import glance.notifier
|
||||
from glance.openstack.common import gettextutils
|
||||
import glance.openstack.common.lockutils
|
||||
import glance.openstack.common.log
|
||||
import glance.registry
|
||||
@ -47,6 +42,10 @@ import glance.registry.client
|
||||
import glance.registry.client.v1.api
|
||||
import glance.scrubber
|
||||
|
||||
# TODO(zhiyan): Remove translation from in-line
|
||||
# help message of option definition code.
|
||||
gettextutils.install('glance', lazy=False)
|
||||
|
||||
|
||||
_global_opt_lists = [
|
||||
glance.openstack.common.log.common_cli_opts,
|
||||
|
@ -14,10 +14,9 @@
|
||||
|
||||
import copy
|
||||
|
||||
import six
|
||||
|
||||
import glance_store as store
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
import glance.api.common
|
||||
import glance.common.exception as exception
|
||||
|
@ -15,10 +15,10 @@
|
||||
|
||||
import abc
|
||||
import calendar
|
||||
import eventlet
|
||||
import os
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
@ -125,6 +125,7 @@ class ScrubQueue(object):
|
||||
@abc.abstractmethod
|
||||
def has_image(self, image_id):
|
||||
"""Returns whether the queue contains an image or not.
|
||||
|
||||
:param image_id: The opaque image identifier
|
||||
|
||||
:retval a boolean value to inform including or not
|
||||
|
@ -549,8 +549,7 @@ class FunctionalTest(test_utils.BaseTestCase):
|
||||
|
||||
self.api_protocol = 'http'
|
||||
self.api_port, api_sock = test_utils.get_unused_port_and_socket()
|
||||
self.registry_port, registry_sock = \
|
||||
test_utils.get_unused_port_and_socket()
|
||||
self.registry_port, reg_sock = test_utils.get_unused_port_and_socket()
|
||||
|
||||
conf_dir = os.path.join(self.test_dir, 'etc')
|
||||
utils.safe_mkdirs(conf_dir)
|
||||
@ -572,7 +571,7 @@ class FunctionalTest(test_utils.BaseTestCase):
|
||||
|
||||
self.registry_server = RegistryServer(self.test_dir,
|
||||
self.registry_port,
|
||||
sock=registry_sock)
|
||||
sock=reg_sock)
|
||||
|
||||
self.scrubber_daemon = ScrubberDaemon(self.test_dir)
|
||||
|
||||
|
@ -160,7 +160,7 @@ class DriverTests(object):
|
||||
# Image IDs aren't predictable, but they should be populated
|
||||
self.assertTrue(uuid.UUID(image['id']))
|
||||
|
||||
#NOTE(bcwaldon): the tags attribute should not be returned as a part
|
||||
# NOTE(bcwaldon): the tags attribute should not be returned as a part
|
||||
# of a core image entity
|
||||
self.assertFalse('tags' in image)
|
||||
|
||||
@ -435,7 +435,7 @@ class DriverTests(object):
|
||||
"""Specify a deleted image as a marker if showing deleted images."""
|
||||
self.db_api.image_destroy(self.adm_context, UUID3)
|
||||
images = self.db_api.image_get_all(self.adm_context, marker=UUID3)
|
||||
#NOTE(bcwaldon): an admin should see all images (deleted or not)
|
||||
# NOTE(bcwaldon): an admin should see all images (deleted or not)
|
||||
self.assertEqual(2, len(images))
|
||||
|
||||
def test_image_get_all_marker_deleted_showing_deleted(self):
|
||||
@ -894,7 +894,8 @@ class DriverTests(object):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
self.db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -946,7 +947,8 @@ class DriverTests(object):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = self.db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -976,7 +978,7 @@ class DriverTests(object):
|
||||
self.db_api.image_tag_set_all(self.context, UUID1, ['ping', 'pong'])
|
||||
|
||||
tags = self.db_api.image_tag_get_all(self.context, UUID1)
|
||||
#NOTE(bcwaldon): tag ordering should match exactly what was provided
|
||||
# NOTE(bcwaldon): tag ordering should match exactly what was provided
|
||||
self.assertEqual(['ping', 'pong'], tags)
|
||||
|
||||
def test_image_tag_get_all(self):
|
||||
@ -1699,7 +1701,7 @@ class TestVisibility(test_utils.BaseTestCase):
|
||||
'is_public': is_public,
|
||||
}
|
||||
fixtures.append(fixture)
|
||||
return [build_image_fixture(**fixture) for fixture in fixtures]
|
||||
return [build_image_fixture(**f) for f in fixtures]
|
||||
|
||||
def create_images(self, images):
|
||||
for fixture in images:
|
||||
|
@ -14,11 +14,10 @@
|
||||
|
||||
import copy
|
||||
|
||||
from glance import context
|
||||
import glance.tests.functional.db as db_tests
|
||||
|
||||
from glance.common import config
|
||||
from glance.common import exception
|
||||
from glance import context
|
||||
import glance.tests.functional.db as db_tests
|
||||
from glance.tests import utils as test_utils
|
||||
|
||||
|
||||
|
@ -21,7 +21,7 @@ Utility methods to set testcases up for Swift and/or S3 tests.
|
||||
from __future__ import print_function
|
||||
|
||||
import BaseHTTPServer
|
||||
import thread
|
||||
import threading
|
||||
|
||||
from glance.openstack.common import units
|
||||
|
||||
@ -76,7 +76,7 @@ def setup_http(test):
|
||||
def serve_requests(httpd):
|
||||
httpd.serve_forever()
|
||||
|
||||
thread.start_new_thread(serve_requests, (remote_server,))
|
||||
threading.Thread(target=serve_requests, args=(remote_server,)).start()
|
||||
test.http_server = remote_server
|
||||
test.http_ip = remote_ip
|
||||
test.http_port = remote_port
|
||||
|
@ -27,7 +27,7 @@ class TestRootApi(functional.FunctionalTest):
|
||||
def test_version_configurations(self):
|
||||
"""Test that versioning is handled properly through all channels"""
|
||||
|
||||
#v1 and v2 api enabled
|
||||
# v1 and v2 api enabled
|
||||
self.cleanup()
|
||||
self.start_servers(**self.__dict__.copy())
|
||||
|
||||
@ -69,7 +69,7 @@ class TestRootApi(functional.FunctionalTest):
|
||||
self.assertEqual(content, versions_json)
|
||||
self.stop_servers()
|
||||
|
||||
#v2 api enabled
|
||||
# v2 api enabled
|
||||
self.cleanup()
|
||||
self.api_server.enable_v1_api = False
|
||||
self.api_server.enable_v2_api = True
|
||||
@ -103,7 +103,7 @@ class TestRootApi(functional.FunctionalTest):
|
||||
self.assertEqual(content, versions_json)
|
||||
self.stop_servers()
|
||||
|
||||
#v1 api enabled
|
||||
# v1 api enabled
|
||||
self.cleanup()
|
||||
self.api_server.enable_v1_api = True
|
||||
self.api_server.enable_v2_api = False
|
||||
|
@ -17,10 +17,10 @@
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
import sys
|
||||
|
||||
import httplib2
|
||||
from six.moves import xrange
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -32,14 +32,13 @@ from six.moves import xrange
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import units
|
||||
from glance.tests import functional
|
||||
from glance.tests.functional.store_utils import get_http_uri
|
||||
from glance.tests.functional.store_utils import setup_http
|
||||
from glance.tests.utils import execute
|
||||
from glance.tests.utils import minimal_headers
|
||||
from glance.tests.utils import skip_if_disabled
|
||||
from glance.tests.utils import xattr_writes_supported
|
||||
|
||||
from glance.tests.functional.store_utils import get_http_uri
|
||||
from glance.tests.functional.store_utils import setup_http
|
||||
|
||||
FIVE_KB = 5 * units.Ki
|
||||
|
||||
|
||||
|
@ -56,7 +56,7 @@ class TestGlanceManage(functional.FunctionalTest):
|
||||
self.assertTrue('CREATE TABLE image_tags' in out)
|
||||
self.assertTrue('CREATE TABLE image_locations' in out)
|
||||
|
||||
#NOTE(bcwaldon): For some reason we need double-quotes around
|
||||
# NOTE(bcwaldon): For some reason we need double-quotes around
|
||||
# these two table names
|
||||
# NOTE(vsergeyev): There are some cases when we have no double-quotes
|
||||
self.assertTrue(
|
||||
|
@ -15,10 +15,11 @@
|
||||
|
||||
"""Functional test case that tests logging output"""
|
||||
|
||||
import httplib2
|
||||
import os
|
||||
import stat
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.tests import functional
|
||||
|
||||
|
||||
|
@ -433,8 +433,8 @@ class TestApi(functional.FunctionalTest):
|
||||
|
||||
# 21. GET /images with filter on user-defined property 'distro'.
|
||||
# Verify both images are returned
|
||||
path = "http://%s:%d/v1/images?property-distro=Ubuntu" % \
|
||||
("127.0.0.1", self.api_port)
|
||||
path = "http://%s:%d/v1/images?property-distro=Ubuntu" % (
|
||||
"127.0.0.1", self.api_port)
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(path, 'GET')
|
||||
self.assertEqual(response.status, 200)
|
||||
@ -445,8 +445,8 @@ class TestApi(functional.FunctionalTest):
|
||||
|
||||
# 22. GET /images with filter on user-defined property 'distro' but
|
||||
# with non-existent value. Verify no images are returned
|
||||
path = "http://%s:%d/v1/images?property-distro=fedora" % \
|
||||
("127.0.0.1", self.api_port)
|
||||
path = "http://%s:%d/v1/images?property-distro=fedora" % (
|
||||
"127.0.0.1", self.api_port)
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(path, 'GET')
|
||||
self.assertEqual(response.status, 200)
|
||||
|
@ -20,10 +20,10 @@ based storage backend.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import httplib2
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import httplib2
|
||||
from six.moves import xrange
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -12,11 +12,11 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import units
|
||||
from glance.tests import functional
|
||||
|
@ -27,18 +27,17 @@ test accounts only.
|
||||
If a connection cannot be established, all the test cases are
|
||||
skipped.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
from glance.openstack.common import units
|
||||
|
||||
from glance.tests import functional
|
||||
from glance.tests.utils import minimal_headers
|
||||
from glance.tests.utils import skip_if_disabled
|
||||
|
@ -738,7 +738,7 @@ class TestImages(functional.FunctionalTest):
|
||||
image = jsonutils.loads(response.text)
|
||||
image_id = image['id']
|
||||
|
||||
#try to populate it with oversized data
|
||||
# try to populate it with oversized data
|
||||
path = self._url('/v2/images/%s/file' % image_id)
|
||||
headers = self._headers({'Content-Type': 'application/octet-stream'})
|
||||
|
||||
@ -887,7 +887,7 @@ class TestImages(functional.FunctionalTest):
|
||||
images = jsonutils.loads(response.text)['images']
|
||||
self.assertEqual(0, len(images))
|
||||
|
||||
## Create an image for role member with extra props
|
||||
# Create an image for role member with extra props
|
||||
# Raises 403 since user is not allowed to set 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
@ -899,7 +899,7 @@ class TestImages(functional.FunctionalTest):
|
||||
response = requests.post(path, headers=headers, data=data)
|
||||
self.assertEqual(403, response.status_code)
|
||||
|
||||
## Create an image for role member without 'foo'
|
||||
# Create an image for role member without 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
'X-Roles': 'member'})
|
||||
@ -1037,7 +1037,7 @@ class TestImages(functional.FunctionalTest):
|
||||
images = jsonutils.loads(response.text)['images']
|
||||
self.assertEqual(0, len(images))
|
||||
|
||||
## Create an image for role member with extra props
|
||||
# Create an image for role member with extra props
|
||||
# Raises 403 since user is not allowed to set 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
@ -1049,7 +1049,7 @@ class TestImages(functional.FunctionalTest):
|
||||
response = requests.post(path, headers=headers, data=data)
|
||||
self.assertEqual(403, response.status_code)
|
||||
|
||||
## Create an image for role member without 'foo'
|
||||
# Create an image for role member without 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
'X-Roles': 'member'})
|
||||
|
@ -159,7 +159,7 @@ class TestMetadefObjects(functional.FunctionalTest):
|
||||
"schema": "v2/schemas/metadefs/object"
|
||||
}
|
||||
|
||||
#Simple key values
|
||||
# Simple key values
|
||||
checked_values = set([
|
||||
u'name',
|
||||
u'description',
|
||||
@ -167,7 +167,7 @@ class TestMetadefObjects(functional.FunctionalTest):
|
||||
for key, value in expected_metadata_object.items():
|
||||
if(key in checked_values):
|
||||
self.assertEqual(metadata_object[key], value, key)
|
||||
#Complex key values - properties
|
||||
# Complex key values - properties
|
||||
for key, value in \
|
||||
expected_metadata_object["properties"]['property2'].items():
|
||||
self.assertEqual(
|
||||
|
@ -154,8 +154,8 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
|
||||
for key in cls._disallowed_properties:
|
||||
if key in image:
|
||||
msg = _("Attribute '%s' is read-only.") % key
|
||||
raise webob.exc.HTTPForbidden(explanation=
|
||||
utils.exception_to_str(msg))
|
||||
raise webob.exc.HTTPForbidden(
|
||||
explanation=utils.exception_to_str(msg))
|
||||
|
||||
def create(self, request):
|
||||
body = self._get_request_body(request)
|
||||
|
@ -14,6 +14,7 @@ import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import testtools
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -157,7 +157,7 @@ class TestTasksApi(base.ApiTest):
|
||||
data = json.loads(content)
|
||||
self.assertIsNotNone(data)
|
||||
self.assertEqual(1, len(data['tasks']))
|
||||
#NOTE(venkatesh) find a way to get expected_keys from tasks controller
|
||||
# NOTE(venkatesh) find a way to get expected_keys from tasks controller
|
||||
expected_keys = set(['id', 'type', 'owner', 'status',
|
||||
'created_at', 'updated_at', 'self', 'schema'])
|
||||
task = data['tasks'][0]
|
||||
|
@ -78,9 +78,9 @@ class FakeRegistryConnection(object):
|
||||
|
||||
|
||||
def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
"""
|
||||
Mocks calls to 127.0.0.1 on 9191 and 9292 for testing so
|
||||
that a real Glance server does not need to be up and
|
||||
"""Mocks calls to 127.0.0.1 on 9191 and 9292 for testing.
|
||||
|
||||
Done so that a real Glance server does not need to be up and
|
||||
running
|
||||
"""
|
||||
|
||||
@ -117,7 +117,7 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
return True
|
||||
|
||||
def _clean_url(self, url):
|
||||
#TODO(bcwaldon): Fix the hack that strips off v1
|
||||
# TODO(bcwaldon): Fix the hack that strips off v1
|
||||
return url.replace('/v1', '', 1) if url.startswith('/v1') else url
|
||||
|
||||
def putrequest(self, method, url):
|
||||
@ -132,9 +132,10 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
|
||||
def endheaders(self):
|
||||
hl = [i.lower() for i in self.req.headers.keys()]
|
||||
assert not ('content-length' in hl and
|
||||
'transfer-encoding' in hl), \
|
||||
'Content-Length and Transfer-Encoding are mutually exclusive'
|
||||
assert(not ('content-length' in hl and
|
||||
'transfer-encoding' in hl),
|
||||
'Content-Length and Transfer-Encoding '
|
||||
'are mutually exclusive')
|
||||
|
||||
def send(self, data):
|
||||
# send() is called during chunked-transfer encoding, and
|
||||
@ -163,9 +164,7 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
return res
|
||||
|
||||
def fake_get_connection_type(client):
|
||||
"""
|
||||
Returns the proper connection type
|
||||
"""
|
||||
"""Returns the proper connection type."""
|
||||
DEFAULT_REGISTRY_PORT = 9191
|
||||
DEFAULT_API_PORT = 9292
|
||||
|
||||
@ -199,15 +198,13 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
|
||||
|
||||
def stub_out_registry_server(stubs, **kwargs):
|
||||
"""
|
||||
Mocks calls to 127.0.0.1 on 9191 for testing so
|
||||
that a real Glance Registry server does not need to be up and
|
||||
running
|
||||
"""Mocks calls to 127.0.0.1 on 9191 for testing.
|
||||
|
||||
Done so that a real Glance Registry server does not need to be up and
|
||||
running.
|
||||
"""
|
||||
def fake_get_connection_type(client):
|
||||
"""
|
||||
Returns the proper connection type
|
||||
"""
|
||||
"""Returns the proper connection type."""
|
||||
DEFAULT_REGISTRY_PORT = 9191
|
||||
|
||||
if (client.port == DEFAULT_REGISTRY_PORT and
|
||||
|
@ -41,7 +41,7 @@ class TestCacheManageFilter(test_utils.BaseTestCase):
|
||||
# call
|
||||
resource = self.cache_manage_filter.process_request(bogus_request)
|
||||
|
||||
#check
|
||||
# check
|
||||
self.assertIsNone(resource)
|
||||
|
||||
@mock.patch.object(cached_images.Controller, "get_cached_images")
|
||||
|
@ -9,11 +9,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import sys
|
||||
|
||||
import glance_store as store
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
|
@ -90,7 +90,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_queue_image_not_forced_not_confirmed(self,
|
||||
mock_client, mock_confirm):
|
||||
#options.forced set to False and queue confirmation set to False.
|
||||
# options.forced set to False and queue confirmation set to False.
|
||||
|
||||
mock_confirm.return_value = False
|
||||
mock_options = mock.Mock()
|
||||
@ -102,7 +102,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_queue_image_not_forced_confirmed(self, mock_client, mock_confirm):
|
||||
#options.forced set to False and queue confirmation set to True.
|
||||
# options.forced set to False and queue confirmation set to True.
|
||||
|
||||
mock_confirm.return_value = True
|
||||
mock_options = mock.Mock()
|
||||
@ -127,7 +127,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
def test_delete_cached_image_not_forced_not_confirmed(self,
|
||||
mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to False.
|
||||
# options.forced set to False and delete confirmation set to False.
|
||||
|
||||
mock_confirm.return_value = False
|
||||
mock_options = mock.Mock()
|
||||
@ -141,7 +141,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_delete_cached_image_not_forced_confirmed(self, mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to True.
|
||||
# options.forced set to False and delete confirmation set to True.
|
||||
|
||||
mock_confirm.return_value = True
|
||||
mock_options = mock.Mock()
|
||||
@ -163,7 +163,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
def test_delete_cached_images_not_forced_not_confirmed(self,
|
||||
mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to False.
|
||||
# options.forced set to False and delete confirmation set to False.
|
||||
|
||||
mock_confirm.return_value = False
|
||||
mock_options = mock.Mock()
|
||||
@ -177,7 +177,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_delete_cached_images_not_forced_confirmed(self, mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to True.
|
||||
# options.forced set to False and delete confirmation set to True.
|
||||
|
||||
mock_confirm.return_value = True
|
||||
mock_options = mock.Mock()
|
||||
@ -203,7 +203,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
def test_delete_queued_image_not_forced_not_confirmed(self,
|
||||
mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to False.
|
||||
# options.forced set to False and delete confirmation set to False.
|
||||
|
||||
mock_confirm.return_value = False
|
||||
mock_options = mock.Mock()
|
||||
@ -217,7 +217,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_delete_queued_image_not_forced_confirmed(self, mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to True.
|
||||
# options.forced set to False and delete confirmation set to True.
|
||||
|
||||
mock_confirm.return_value = True
|
||||
mock_options = mock.Mock()
|
||||
@ -239,7 +239,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
def test_delete_queued_images_not_forced_not_confirmed(self,
|
||||
mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to False.
|
||||
# options.forced set to False and delete confirmation set to False.
|
||||
|
||||
mock_confirm.return_value = False
|
||||
mock_options = mock.Mock()
|
||||
@ -253,7 +253,7 @@ class TestGlanceCmdManage(test_utils.BaseTestCase):
|
||||
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
|
||||
def test_delete_queued_images_not_forced_confirmed(self, mock_client,
|
||||
mock_confirm):
|
||||
#options.forced set to False and delete confirmation set to True.
|
||||
# options.forced set to False and delete confirmation set to True.
|
||||
mock_confirm.return_value = True
|
||||
mock_options = mock.Mock()
|
||||
mock_options.force = False
|
||||
|
@ -14,7 +14,6 @@
|
||||
# under the License.
|
||||
|
||||
import testtools
|
||||
|
||||
import webob
|
||||
|
||||
import glance.api.common
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import urllib2
|
||||
|
||||
import mock
|
||||
import urllib2
|
||||
|
||||
from glance.common.scripts.image_import import main as image_import_script
|
||||
import glance.tests.utils as test_utils
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import urllib2
|
||||
|
||||
import mock
|
||||
import urllib2
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common.scripts import utils as script_utils
|
||||
|
@ -15,12 +15,12 @@
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import gettext
|
||||
import socket
|
||||
|
||||
from babel import localedata
|
||||
import eventlet.patcher
|
||||
import fixtures
|
||||
import gettext
|
||||
import mock
|
||||
import routes
|
||||
import six
|
||||
|
@ -29,7 +29,6 @@ from glance.db.sqlalchemy import api
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
|
||||
|
||||
@ -547,8 +546,8 @@ class TestImageMemberRepo(test_utils.BaseTestCase):
|
||||
def test_remove_image_member_does_not_exist(self):
|
||||
fake_uuid = str(uuid.uuid4())
|
||||
image = self.image_repo.get(UUID2)
|
||||
fake_member = glance.domain.ImageMemberFactory()\
|
||||
.new_image_member(image, TENANT4)
|
||||
fake_member = glance.domain.ImageMemberFactory().new_image_member(
|
||||
image, TENANT4)
|
||||
fake_member.id = fake_uuid
|
||||
exc = self.assertRaises(exception.NotFound,
|
||||
self.image_member_repo.remove,
|
||||
|
@ -423,6 +423,6 @@ class TestMetadefRepo(test_utils.BaseTestCase):
|
||||
object)
|
||||
|
||||
def test_list_resource_type(self):
|
||||
resource_type = self.resource_type_repo.list(filters=
|
||||
{'namespace': NAMESPACE1})
|
||||
resource_type = self.resource_type_repo.list(
|
||||
filters={'namespace': NAMESPACE1})
|
||||
self.assertEqual(len(resource_type), 0)
|
||||
|
@ -195,9 +195,9 @@ class TestImageMember(test_utils.BaseTestCase):
|
||||
self.image_member_factory = domain.ImageMemberFactory()
|
||||
self.image_factory = domain.ImageFactory()
|
||||
self.image = self.image_factory.new_image()
|
||||
self.image_member = self.image_member_factory\
|
||||
.new_image_member(image=self.image,
|
||||
member_id=TENANT1)
|
||||
self.image_member = self.image_member_factory.new_image_member(
|
||||
image=self.image,
|
||||
member_id=TENANT1)
|
||||
|
||||
def test_status_enumerated(self):
|
||||
self.image_member.status = 'pending'
|
||||
@ -377,7 +377,7 @@ class TestTask(test_utils.BaseTestCase):
|
||||
self.assertEqual(self.task.status, 'failure')
|
||||
|
||||
def test_invalid_status_transitions_from_pending(self):
|
||||
#test do not allow transition from pending to success
|
||||
# test do not allow transition from pending to success
|
||||
self.assertRaises(
|
||||
exception.InvalidTaskStatusTransition,
|
||||
self.task.succeed,
|
||||
@ -385,14 +385,14 @@ class TestTask(test_utils.BaseTestCase):
|
||||
)
|
||||
|
||||
def test_invalid_status_transitions_from_success(self):
|
||||
#test do not allow transition from success to processing
|
||||
# test do not allow transition from success to processing
|
||||
self.task.begin_processing()
|
||||
self.task.succeed('')
|
||||
self.assertRaises(
|
||||
exception.InvalidTaskStatusTransition,
|
||||
self.task.begin_processing
|
||||
)
|
||||
#test do not allow transition from success to failure
|
||||
# test do not allow transition from success to failure
|
||||
self.assertRaises(
|
||||
exception.InvalidTaskStatusTransition,
|
||||
self.task.fail,
|
||||
@ -400,14 +400,14 @@ class TestTask(test_utils.BaseTestCase):
|
||||
)
|
||||
|
||||
def test_invalid_status_transitions_from_failure(self):
|
||||
#test do not allow transition from failure to processing
|
||||
# test do not allow transition from failure to processing
|
||||
self.task.begin_processing()
|
||||
self.task.fail('')
|
||||
self.assertRaises(
|
||||
exception.InvalidTaskStatusTransition,
|
||||
self.task.begin_processing
|
||||
)
|
||||
#test do not allow transition from failure to success
|
||||
# test do not allow transition from failure to success
|
||||
self.assertRaises(
|
||||
exception.InvalidTaskStatusTransition,
|
||||
self.task.succeed,
|
||||
|
@ -215,8 +215,8 @@ class ImageServiceTestCase(test_utils.BaseTestCase):
|
||||
image_meta_with_proto['Content-Length'] = len(image_body)
|
||||
|
||||
for key in IMG_RESPONSE_ACTIVE:
|
||||
image_meta_with_proto['x-image-meta-%s' % key] = \
|
||||
IMG_RESPONSE_ACTIVE[key]
|
||||
image_meta_with_proto[
|
||||
'x-image-meta-%s' % key] = IMG_RESPONSE_ACTIVE[key]
|
||||
|
||||
c.conn.prime_request('POST', 'v1/images',
|
||||
image_body, image_meta_with_proto,
|
||||
@ -230,8 +230,8 @@ class ImageServiceTestCase(test_utils.BaseTestCase):
|
||||
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
|
||||
|
||||
image_meta = {'id': '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'}
|
||||
image_meta_headers = \
|
||||
glance_replicator.ImageService._dict_to_headers(image_meta)
|
||||
image_meta_headers = glance_replicator.ImageService._dict_to_headers(
|
||||
image_meta)
|
||||
image_meta_headers['x-auth-token'] = 'noauth'
|
||||
image_meta_headers['Content-Type'] = 'application/octet-stream'
|
||||
c.conn.prime_request('PUT', 'v1/images/%s' % image_meta['id'],
|
||||
|
@ -27,7 +27,7 @@ import stubout
|
||||
from glance.common import exception
|
||||
from glance import image_cache
|
||||
from glance.openstack.common import units
|
||||
#NOTE(bcwaldon): This is imported to load the registry config options
|
||||
# NOTE(bcwaldon): This is imported to load the registry config options
|
||||
import glance.registry # noqa
|
||||
from glance.tests import utils as test_utils
|
||||
from glance.tests.utils import skip_if_disabled
|
||||
@ -152,8 +152,8 @@ class ImageCacheTestCase(object):
|
||||
incomplete_file.close()
|
||||
|
||||
mtime = os.path.getmtime(incomplete_file_path_1)
|
||||
pastday = datetime.datetime.fromtimestamp(mtime) - \
|
||||
datetime.timedelta(days=1)
|
||||
pastday = (datetime.datetime.fromtimestamp(mtime) -
|
||||
datetime.timedelta(days=1))
|
||||
atime = int(time.mktime(pastday.timetuple()))
|
||||
mtime = atime
|
||||
os.utime(incomplete_file_path_1, (atime, mtime))
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import os
|
||||
|
||||
import mock
|
||||
import os
|
||||
|
||||
from glance.common import exception
|
||||
from glance.image_cache import client
|
||||
@ -35,22 +35,22 @@ class CacheClientTestCase(utils.BaseTestCase):
|
||||
|
||||
def test_get_cached_images(self):
|
||||
expected_data = '{"cached_images": "some_images"}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.get_cached_images(), "some_images")
|
||||
self.client.do_request.assert_called_with("GET", "/cached_images")
|
||||
|
||||
def test_get_queued_images(self):
|
||||
expected_data = '{"queued_images": "some_images"}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.get_queued_images(), "some_images")
|
||||
self.client.do_request.assert_called_with("GET", "/queued_images")
|
||||
|
||||
def test_delete_all_cached_images(self):
|
||||
expected_data = '{"num_deleted": 4}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.delete_all_cached_images(), 4)
|
||||
self.client.do_request.assert_called_with("DELETE", "/cached_images")
|
||||
|
||||
@ -68,8 +68,8 @@ class CacheClientTestCase(utils.BaseTestCase):
|
||||
|
||||
def test_delete_all_queued_images(self):
|
||||
expected_data = '{"num_deleted": 4}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.delete_all_queued_images(), 4)
|
||||
self.client.do_request.assert_called_with("DELETE", "/queued_images")
|
||||
|
||||
|
@ -49,6 +49,7 @@ from glance.db.sqlalchemy import models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
|
||||
|
||||
@ -118,7 +119,7 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
self._create_unversioned_001_db(self.migrate_engine)
|
||||
|
||||
old_version = migration.INIT_VERSION
|
||||
#we must start from version 1
|
||||
# we must start from version 1
|
||||
migration.INIT_VERSION = 1
|
||||
self.addCleanup(setattr, migration, 'INIT_VERSION', old_version)
|
||||
|
||||
@ -319,9 +320,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
uuids = {}
|
||||
for name in ('kernel', 'ramdisk', 'normal'):
|
||||
image_name = '%s migration 012 test' % name
|
||||
rows = images.select()\
|
||||
.where(images.c.name == image_name)\
|
||||
.execute().fetchall()
|
||||
rows = images.select().where(
|
||||
images.c.name == image_name).execute().fetchall()
|
||||
|
||||
self.assertEqual(len(rows), 1)
|
||||
|
||||
@ -331,19 +331,16 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
uuids[name] = row['id']
|
||||
|
||||
# Find all image_members to ensure image_id has been updated
|
||||
results = image_members.select()\
|
||||
.where(image_members.c.image_id ==
|
||||
uuids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_members.select().where(
|
||||
image_members.c.image_id == uuids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
# Find all image_properties to ensure image_id has been updated
|
||||
# as well as ensure kernel_id and ramdisk_id values have been
|
||||
# updated too
|
||||
results = image_properties.select()\
|
||||
.where(image_properties.c.image_id ==
|
||||
uuids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_properties.select().where(
|
||||
image_properties.c.image_id == uuids['normal']
|
||||
).execute().fetchall()
|
||||
self.assertEqual(len(results), 2)
|
||||
for row in results:
|
||||
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
|
||||
@ -363,9 +360,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
ids = {}
|
||||
for name in ('kernel', 'ramdisk', 'normal'):
|
||||
image_name = '%s migration 012 test' % name
|
||||
rows = images.select()\
|
||||
.where(images.c.name == image_name)\
|
||||
.execute().fetchall()
|
||||
rows = images.select().where(
|
||||
images.c.name == image_name).execute().fetchall()
|
||||
self.assertEqual(len(rows), 1)
|
||||
|
||||
row = rows[0]
|
||||
@ -374,19 +370,15 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
ids[name] = row['id']
|
||||
|
||||
# Find all image_members to ensure image_id has been updated
|
||||
results = image_members.select()\
|
||||
.where(image_members.c.image_id ==
|
||||
ids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_members.select().where(
|
||||
image_members.c.image_id == ids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
# Find all image_properties to ensure image_id has been updated
|
||||
# as well as ensure kernel_id and ramdisk_id values have been
|
||||
# updated too
|
||||
results = image_properties.select()\
|
||||
.where(image_properties.c.image_id ==
|
||||
ids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_properties.select().where(
|
||||
image_properties.c.image_id == ids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 2)
|
||||
for row in results:
|
||||
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
|
||||
@ -622,7 +614,7 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
}
|
||||
data = [
|
||||
{'id': 'fake-19-1', 'location': 'http://glance.example.com'},
|
||||
#NOTE(bcwaldon): images with a location of None should
|
||||
# NOTE(bcwaldon): images with a location of None should
|
||||
# not be migrated
|
||||
{'id': 'fake-19-2', 'location': None},
|
||||
]
|
||||
@ -670,8 +662,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
def _check_026(self, engine, data):
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
results = image_locations.select()\
|
||||
.where(image_locations.c.image_id == data).execute()
|
||||
results = image_locations.select().where(
|
||||
image_locations.c.image_id == data).execute()
|
||||
|
||||
r = list(results)
|
||||
self.assertEqual(len(r), 1)
|
||||
@ -756,8 +748,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
image_id = data[1]
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
|
||||
records = image_locations.select().\
|
||||
where(image_locations.c.image_id == image_id).execute().fetchall()
|
||||
records = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
for r in records:
|
||||
d = jsonutils.loads(r['meta_data'])
|
||||
@ -768,8 +760,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
|
||||
records = image_locations.select().\
|
||||
where(image_locations.c.image_id == image_id).execute().fetchall()
|
||||
records = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
for r in records:
|
||||
md = r['meta_data']
|
||||
@ -855,9 +847,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
def _check_031(self, engine, image_id):
|
||||
locations_table = db_utils.get_table(engine, 'image_locations')
|
||||
result = locations_table.select()\
|
||||
.where(locations_table.c.image_id == image_id)\
|
||||
.execute().fetchall()
|
||||
result = locations_table.select().where(
|
||||
locations_table.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
locations = set([(x['value'], x['meta_data']) for x in result])
|
||||
actual_locations = set([
|
||||
@ -980,8 +971,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
'deleted', 'pending_delete', 'deleted']
|
||||
|
||||
for (idx, image_id) in enumerate(data):
|
||||
results = image_locations.select()\
|
||||
.where(image_locations.c.image_id == image_id).execute()
|
||||
results = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute()
|
||||
r = list(results)
|
||||
self.assertEqual(len(r), 1)
|
||||
self.assertIn('status', r[0])
|
||||
@ -1074,7 +1065,7 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
col_data = [col.name for col in table.columns]
|
||||
self.assertEqual(expected_cols, col_data)
|
||||
|
||||
# metadef_properties
|
||||
# metadef_properties
|
||||
table = sqlalchemy.Table("metadef_properties", meta, autoload=True)
|
||||
index_namespace_id_name = (
|
||||
'ix_metadef_properties_namespace_id_name',
|
||||
@ -1092,7 +1083,7 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
col_data = [col.name for col in table.columns]
|
||||
self.assertEqual(expected_cols, col_data)
|
||||
|
||||
# metadef_resource_types
|
||||
# metadef_resource_types
|
||||
table = sqlalchemy.Table(
|
||||
"metadef_resource_types", meta, autoload=True)
|
||||
index_resource_types_name = (
|
||||
@ -1109,7 +1100,7 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
col_data = [col.name for col in table.columns]
|
||||
self.assertEqual(expected_cols, col_data)
|
||||
|
||||
# metadef_namespace_resource_types
|
||||
# metadef_namespace_resource_types
|
||||
table = sqlalchemy.Table(
|
||||
"metadef_namespace_resource_types", meta, autoload=True)
|
||||
index_ns_res_types_res_type_id_ns_id = (
|
||||
|
@ -12,11 +12,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
from mock import patch
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
from glance.common import exception
|
||||
@ -145,7 +144,7 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_quota_exceeded_no_size(self):
|
||||
quota = 10
|
||||
data = '*' * (quota + 1)
|
||||
#NOTE(jbresnah) When the image size is None it means that it is
|
||||
# NOTE(jbresnah) When the image size is None it means that it is
|
||||
# not known. In this case the only time we will raise an
|
||||
# exception is when there is no room left at all, thus we know
|
||||
# it will not fit.
|
||||
@ -592,16 +591,16 @@ class TestImageMemberQuotas(test_utils.BaseTestCase):
|
||||
|
||||
self.image_member_factory.new_image_member(self.image,
|
||||
'fake_id')
|
||||
self.base_image_member_factory.new_image_member\
|
||||
.assert_called_once_with(self.image.base, 'fake_id')
|
||||
nim = self.base_image_member_factory.new_image_member
|
||||
nim .assert_called_once_with(self.image.base, 'fake_id')
|
||||
|
||||
def test_new_image_member_unlimited_members(self):
|
||||
self.config(image_member_quota=-1)
|
||||
|
||||
self.image_member_factory.new_image_member(self.image,
|
||||
'fake_id')
|
||||
self.base_image_member_factory.new_image_member\
|
||||
.assert_called_once_with(self.image.base, 'fake_id')
|
||||
nim = self.base_image_member_factory.new_image_member
|
||||
nim.assert_called_once_with(self.image.base, 'fake_id')
|
||||
|
||||
def test_new_image_member_too_many_members(self):
|
||||
self.config(image_member_quota=0)
|
||||
|
@ -12,9 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import mox
|
||||
|
||||
import glance_store
|
||||
import mox
|
||||
|
||||
from glance.common import exception
|
||||
import glance.location
|
||||
@ -141,7 +140,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data('YYYY', 4)
|
||||
self.assertEqual(image.size, 4)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
# NOTE(markwash): FakeStore returns image_id for location
|
||||
self.assertEqual(image.locations[0]['url'], UUID2)
|
||||
self.assertEqual(image.checksum, 'Z')
|
||||
self.assertEqual(image.status, 'active')
|
||||
@ -174,7 +173,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data('YYYY', None)
|
||||
self.assertEqual(image.size, 4)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
# NOTE(markwash): FakeStore returns image_id for location
|
||||
self.assertEqual(image.locations[0]['url'], UUID2)
|
||||
self.assertEqual(image.checksum, 'Z')
|
||||
self.assertEqual(image.status, 'active')
|
||||
@ -190,7 +189,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data(data, len)
|
||||
self.assertEqual(image.size, len)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
# NOTE(markwash): FakeStore returns image_id for location
|
||||
location = {'url': image_id, 'metadata': {}, 'status': 'active'}
|
||||
self.assertEqual(image.locations, [location])
|
||||
self.assertEqual(image_stub.locations, [location])
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
|
||||
import glance.location
|
||||
from glance.tests.unit import base
|
||||
|
@ -43,7 +43,7 @@ BASE_URI = 'http://storeurl.com/container'
|
||||
|
||||
|
||||
def sort_url_by_qs_keys(url):
|
||||
#NOTE(kragniz): this only sorts the keys of the query string of a url.
|
||||
# NOTE(kragniz): this only sorts the keys of the query string of a url.
|
||||
# For example, an input of '/v2/tasks?sort_key=id&sort_dir=asc&limit=10'
|
||||
# returns '/v2/tasks?limit=10&sort_dir=asc&sort_key=id'. This is to prevent
|
||||
# non-deterministic ordering of the query string causing problems with unit
|
||||
|
@ -38,7 +38,6 @@ from glance.db.sqlalchemy import api as db_api
|
||||
from glance.db.sqlalchemy import models as db_models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
import glance.registry.client.v1.api as registry
|
||||
from glance.tests.unit import base
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
@ -1406,20 +1405,19 @@ class TestGlanceAPI(base.IsolatedUnitTest):
|
||||
|
||||
req = webob.Request.blank("/images/%s" % image_id)
|
||||
req.method = 'PUT'
|
||||
req.headers['Content-Type'] = \
|
||||
'application/octet-stream'
|
||||
req.headers['Content-Type'] = 'application/octet-stream'
|
||||
req.body = "chunk00000remainder"
|
||||
|
||||
with mock.patch.object(upload_utils, 'initiate_deletion') as \
|
||||
mock_init_del:
|
||||
with mock.patch.object(
|
||||
upload_utils, 'initiate_deletion') as mock_init_del:
|
||||
mock_init_del.side_effect = mock_initiate_deletion
|
||||
with mock.patch.object(registry, 'get_image_metadata') as \
|
||||
mock_get_meta:
|
||||
with mock.patch.object(
|
||||
registry, 'get_image_metadata') as mock_get_meta:
|
||||
mock_get_meta.side_effect = mock_get_image_metadata
|
||||
with mock.patch.object(db_api, '_image_get') as mock_db_get:
|
||||
mock_db_get.side_effect = mock_image_get
|
||||
with mock.patch.object(db_api, '_image_update') as \
|
||||
mock_db_update:
|
||||
with mock.patch.object(
|
||||
db_api, '_image_update') as mock_db_update:
|
||||
mock_db_update.side_effect = mock_image_update
|
||||
|
||||
# Expect a 409 Conflict.
|
||||
@ -3354,7 +3352,7 @@ class TestImageSerializer(base.IsolatedUnitTest):
|
||||
|
||||
self.serializer.show(response, self.FIXTURE)
|
||||
|
||||
#just make sure the app_iter is called
|
||||
# just make sure the app_iter is called
|
||||
for chunk in response.app_iter:
|
||||
pass
|
||||
|
||||
@ -3412,7 +3410,7 @@ class TestImageSerializer(base.IsolatedUnitTest):
|
||||
|
||||
self.stubs.Set(self.serializer.notifier, 'error', fake_error)
|
||||
|
||||
#expected and actually sent bytes differ
|
||||
# expected and actually sent bytes differ
|
||||
glance.api.common.image_send_notification(17, 19, image_meta, req,
|
||||
self.serializer.notifier)
|
||||
|
||||
|
@ -32,7 +32,6 @@ from glance.db.sqlalchemy import api as db_api
|
||||
from glance.db.sqlalchemy import models as db_models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.registry.api import v1 as rserver
|
||||
from glance.tests.unit import base
|
||||
from glance.tests import utils as test_utils
|
||||
@ -1287,8 +1286,8 @@ class TestRegistryAPI(base.IsolatedUnitTest, test_utils.RegistryAPIMixIn):
|
||||
self.log_image_id = False
|
||||
|
||||
def fake_log_info(msg):
|
||||
if 'Successfully created image ' \
|
||||
'0564c64c-3545-4e34-abfb-9d18e5f2f2f9' in msg:
|
||||
if ('Successfully created image '
|
||||
'0564c64c-3545-4e34-abfb-9d18e5f2f2f9' in msg):
|
||||
self.log_image_id = True
|
||||
self.stubs.Set(rserver.images.LOG, 'info', fake_log_info)
|
||||
|
||||
@ -1940,7 +1939,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:admin' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1955,7 +1955,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT1,
|
||||
owner_is_tenant=True)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1974,7 +1975,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1993,7 +1995,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -2018,7 +2021,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -2042,7 +2046,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT1,
|
||||
owner_is_tenant=True)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
|
@ -39,7 +39,7 @@ _gen_uuid = lambda: str(uuid.uuid4())
|
||||
UUID1 = _gen_uuid()
|
||||
UUID2 = _gen_uuid()
|
||||
|
||||
#NOTE(bcwaldon): needed to init config_dir cli opt
|
||||
# NOTE(bcwaldon): needed to init config_dir cli opt
|
||||
config.parse_args(args=[])
|
||||
|
||||
|
||||
|
@ -14,10 +14,10 @@
|
||||
# under the License.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import mock
|
||||
from mock import patch
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
from mock import patch
|
||||
import webob.exc
|
||||
|
||||
from glance.api.v1 import upload_utils
|
||||
|
@ -12,18 +12,16 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import uuid
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
import six
|
||||
import webob
|
||||
|
||||
import glance.api.v2.image_data
|
||||
from glance.common import exception
|
||||
from glance.common import wsgi
|
||||
|
||||
from glance.tests.unit import base
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
@ -24,7 +24,6 @@ from glance.openstack.common import jsonutils
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
||||
|
||||
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
|
||||
ISOTIME = '2012-05-16T15:27:36Z'
|
||||
|
||||
@ -97,11 +96,11 @@ class TestImageMembersController(test_utils.BaseTestCase):
|
||||
self.notifier = unit_test_utils.FakeNotifier()
|
||||
self._create_images()
|
||||
self._create_image_members()
|
||||
self.controller = glance.api.v2.image_members\
|
||||
.ImageMembersController(self.db,
|
||||
self.policy,
|
||||
self.notifier,
|
||||
self.store)
|
||||
self.controller = glance.api.v2.image_members.ImageMembersController(
|
||||
self.db,
|
||||
self.policy,
|
||||
self.notifier,
|
||||
self.store)
|
||||
glance_store.create_stores()
|
||||
|
||||
def _create_images(self):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user