diff --git a/etc/glance-api-paste.ini b/etc/glance-api-paste.ini index 86a4cdb1..7e1184b3 100644 --- a/etc/glance-api-paste.ini +++ b/etc/glance-api-paste.ini @@ -39,6 +39,7 @@ paste.composite_factory = glance.api:root_app_factory /: apiversions /v1: apiv1app /v2: apiv2app +/v3: apiv3app [app:apiversions] paste.app_factory = glance.api.versions:create_resource @@ -49,6 +50,9 @@ paste.app_factory = glance.api.v1.router:API.factory [app:apiv2app] paste.app_factory = glance.api.v2.router:API.factory +[app:apiv3app] +paste.app_factory = glance.api.v3.router:API.factory + [filter:versionnegotiation] paste.filter_factory = glance.api.middleware.version_negotiation:VersionNegotiationFilter.factory diff --git a/glance/api/__init__.py b/glance/api/__init__.py index eaeec572..af69df76 100644 --- a/glance/api/__init__.py +++ b/glance/api/__init__.py @@ -24,4 +24,6 @@ def root_app_factory(loader, global_conf, **local_conf): del local_conf['/v1'] if not CONF.enable_v2_api: del local_conf['/v2'] + if not CONF.enable_v3_api: + del local_conf['/v3'] return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf) diff --git a/glance/api/middleware/version_negotiation.py b/glance/api/middleware/version_negotiation.py index 4a57f09e..67a61388 100644 --- a/glance/api/middleware/version_negotiation.py +++ b/glance/api/middleware/version_negotiation.py @@ -86,6 +86,8 @@ class VersionNegotiationFilter(wsgi.Middleware): major_version = 1 elif subject in ('v2', 'v2.0', 'v2.1', 'v2.2') and CONF.enable_v2_api: major_version = 2 + elif subject in ('v3', 'v3.0') and CONF.enable_v3_api: + major_version = 3 else: raise ValueError() diff --git a/glance/api/v3/__init__.py b/glance/api/v3/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/glance/api/v3/artifacts.py b/glance/api/v3/artifacts.py new file mode 100644 index 00000000..b6067c58 --- /dev/null +++ b/glance/api/v3/artifacts.py @@ -0,0 +1,701 @@ +# Copyright (c) 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import sys + +import glance_store +import jsonschema +from oslo.config import cfg +from oslo.serialization import jsonutils as json +from oslo.utils import excutils +import semantic_version +import six +import webob.exc + +from glance.artifacts import gateway +from glance.artifacts import Showlevel +from glance.common.artifacts import loader +from glance.common.artifacts import serialization +from glance.common import exception +from glance.common import jsonpatchvalidator +from glance.common import utils +from glance.common import wsgi +import glance.db +from glance import i18n +from oslo_log import log as logging + +LOG = logging.getLogger(__name__) +_LE = i18n._LE +_ = i18n._ + +possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), + os.pardir, + os.pardir)) +if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')): + sys.path.insert(0, possible_topdir) + +CONF = cfg.CONF +CONF.import_group("profiler", "glance.common.wsgi") + + +class ArtifactsController(object): + def __init__(self, db_api=None, store_api=None, plugins=None): + self.db_api = db_api or glance.db.get_api() + self.store_api = store_api or glance_store + self.plugins = plugins or loader.ArtifactsPluginLoader( + 'glance.artifacts.types') + self.gateway = gateway.Gateway(self.db_api, + self.store_api, self.plugins) + + @staticmethod + def _do_update_op(artifact, change): + """Call corresponding method of the updater proxy. + + Here 'change' is a typical jsonpatch request dict: + * 'path' - a json-pointer string; + * 'op' - one of the allowed operation types; + * 'value' - value to set (omitted when op = remove) + """ + update_op = getattr(artifact, change['op']) + update_op(change['path'], change.get('value')) + return artifact + + @staticmethod + def _get_artifact_with_dependencies(repo, art_id, + type_name=None, type_version=None): + """Retrieves an artifact with dependencies from db by its id. + + Show level is direct (only direct dependencies are shown). + """ + return repo.get(art_id, show_level=Showlevel.DIRECT, + type_name=type_name, type_version=type_version) + + def show(self, req, type_name, type_version, + show_level=Showlevel.TRANSITIVE, **kwargs): + """Retrieves one artifact by id with its dependencies""" + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + art_id = kwargs.get('id') + artifact = artifact_repo.get(art_id, type_name=type_name, + type_version=type_version, + show_level=show_level) + return artifact + except exception.ArtifactNotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + + def list(self, req, type_name, type_version, state, **kwargs): + """Retrieves a list of artifacts that match some params""" + artifact_repo = self.gateway.get_artifact_repo(req.context) + filters = kwargs.pop('filters', {}) + + filters.update(type_name={'value': type_name}, + state={'value': state}) + if type_version is not None: + filters['type_version'] = {'value': type_version} + if 'version' in filters and filters['version']['value'] == 'latest': + if 'name' in filters: + filters['version']['value'] = self._get_latest_version( + req, filters['name']['value'], type_name, type_version) + else: + raise webob.exc.HTTPBadRequest( + 'Filtering by version without specifying a name is not' + ' supported.') + + return artifact_repo.list(filters=filters, + show_level=Showlevel.BASIC, + **kwargs) + + def _get_latest_version(self, req, name, type_name, type_version=None, + state='creating'): + artifact_repo = self.gateway.get_artifact_repo(req.context) + filters = dict(name={"value": name}, + type_name={"value": type_name}, + state={"value": state}) + if type_version is not None: + filters["type_version"] = {"value": type_version} + result = artifact_repo.list(filters=filters, + show_level=Showlevel.NONE, + sort_keys=['version']) + if len(result): + return result[0].version + + msg = "No artifacts have been found" + raise exception.ArtifactNotFound(message=msg) + + @utils.mutating + def create(self, req, artifact_type, artifact_data, **kwargs): + try: + artifact_factory = self.gateway.get_artifact_type_factory( + req.context, artifact_type) + new_artifact = artifact_factory.new_artifact(**artifact_data) + artifact_repo = self.gateway.get_artifact_repo(req.context) + artifact_repo.add(new_artifact) + # retrieve artifact from db + return self._get_artifact_with_dependencies(artifact_repo, + new_artifact.id) + except TypeError as e: + raise webob.exc.HTTPBadRequest(explanation=e) + except exception.ArtifactNotFound as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.DuplicateLocation as dup: + raise webob.exc.HTTPBadRequest(explanation=dup.msg) + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.InvalidParameterValue as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.LimitExceeded as e: + raise webob.exc.HTTPRequestEntityTooLarge( + explanation=e.msg, request=req, content_type='text/plain') + except exception.Duplicate as dupex: + raise webob.exc.HTTPConflict(explanation=dupex.msg) + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + + @utils.mutating + def update_property(self, req, id, type_name, type_version, path, data, + **kwargs): + """Updates a single property specified by request url.""" + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = self._get_artifact_with_dependencies(artifact_repo, id, + type_name, + type_version) + # use updater mixin to perform updates: generate update path + if req.method == "PUT": + # replaces existing value or creates a new one + if getattr(artifact, kwargs["attr"]): + artifact.replace(path=path, value=data) + else: + artifact.add(path=path, value=data) + else: + # append to an existing value or create a new one + artifact.add(path=path, value=data) + artifact_repo.save(artifact) + return self._get_artifact_with_dependencies(artifact_repo, id) + except (exception.InvalidArtifactPropertyValue, + exception.ArtifactInvalidProperty, + exception.InvalidJsonPatchPath) as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.NotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + + @utils.mutating + def update(self, req, id, type_name, type_version, changes, **kwargs): + """Performs an update via json patch request""" + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = self._get_artifact_with_dependencies(artifact_repo, id, + type_name, + type_version) + updated = artifact + for change in changes: + updated = self._do_update_op(updated, change) + artifact_repo.save(updated) + return self._get_artifact_with_dependencies(artifact_repo, id) + except (exception.InvalidArtifactPropertyValue, + exception.InvalidJsonPatchPath, + exception.InvalidParameterValue) as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.NotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.StorageQuotaFull as e: + msg = (_("Denying attempt to upload artifact because it exceeds " + "the quota: %s") % utils.exception_to_str(e)) + raise webob.exc.HTTPRequestEntityTooLarge( + explanation=msg, request=req, content_type='text/plain') + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.LimitExceeded as e: + raise webob.exc.HTTPRequestEntityTooLarge( + explanation=e.msg, request=req, content_type='text/plain') + + @utils.mutating + def delete(self, req, id, type_name, type_version, **kwargs): + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = self._get_artifact_with_dependencies( + artifact_repo, id, type_name=type_name, + type_version=type_version) + artifact_repo.remove(artifact) + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.NotFound as e: + msg = (_("Failed to find artifact %(artifact_id)s to delete") % + {'artifact_id': id}) + raise webob.exc.HTTPNotFound(explanation=msg) + except exception.InUseByStore as e: + msg = (_("Artifact %s could not be deleted " + "because it is in use: %s") % (id, e.msg)) # noqa + raise webob.exc.HTTPConflict(explanation=msg) + + @utils.mutating + def publish(self, req, id, type_name, type_version, **kwargs): + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = self._get_artifact_with_dependencies( + artifact_repo, id, type_name=type_name, + type_version=type_version) + return artifact_repo.publish(artifact, context=req.context) + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.NotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + + def _upload_list_property(self, method, blob_list, index, data, size): + if method == 'PUT' and not index and len(blob_list) > 0: + # PUT replaces everything, so PUT to non-empty collection is + # forbidden + raise webob.exc.HTTPMethodNotAllowed( + explanation=_("Unable to PUT to non-empty collection")) + if index is not None and index > len(blob_list): + raise webob.exc.HTTPBadRequest( + explanation=_("Index is out of range")) + if index is None: + # both POST and PUT create a new blob list + blob_list.append((data, size)) + elif method == 'POST': + blob_list.insert(index, (data, size)) + else: + blob_list[index] = (data, size) + + @utils.mutating + def upload(self, req, id, type_name, type_version, attr, size, data, + index, **kwargs): + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = self._get_artifact_with_dependencies(artifact_repo, + id, + type_name, + type_version) + blob_prop = artifact.metadata.attributes.blobs.get(attr) + if blob_prop is None: + raise webob.exc.HTTPBadRequest( + explanation=_("Not a blob property '%s'") % attr) + if isinstance(blob_prop, list): + blob_list = getattr(artifact, attr) + self._upload_list_property(req.method, blob_list, + index, data, size) + else: + if index is not None: + raise webob.exc.HTTPBadRequest( + explanation=_("Not a list property '%s'") % attr) + setattr(artifact, attr, (data, size)) + artifact_repo.save(artifact) + return artifact + + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.NotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + except Exception as e: + # TODO(mfedosin): add more exception handlers here + with excutils.save_and_reraise_exception(): + LOG.exception(_LE("Failed to upload image data due to " + "internal error")) + self._restore(artifact_repo, artifact) + + def download(self, req, id, type_name, type_version, attr, index, + **kwargs): + artifact_repo = self.gateway.get_artifact_repo(req.context) + try: + artifact = artifact_repo.get(id, type_name, type_version) + if attr in artifact.metadata.attributes.blobs: + if isinstance(artifact.metadata.attributes.blobs[attr], list): + if index is None: + raise webob.exc.HTTPBadRequest( + explanation=_("Index is required")) + blob_list = getattr(artifact, attr) + try: + return blob_list[index] + except IndexError as e: + raise webob.exc.HTTPBadRequest(explanation=e.message) + else: + if index is not None: + raise webob.exc.HTTPBadRequest(_("Not a list " + "property")) + return getattr(artifact, attr) + else: + message = _("Not a downloadable entity") + raise webob.exc.HTTPBadRequest(explanation=message) + except exception.Forbidden as e: + raise webob.exc.HTTPForbidden(explanation=e.msg) + except exception.NotFound as e: + raise webob.exc.HTTPNotFound(explanation=e.msg) + except exception.Invalid as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + + def _restore(self, artifact_repo, artifact): + """Restore the artifact to queued status. + + :param artifact_repo: The instance of ArtifactRepo + :param artifact: The artifact will be restored + """ + try: + if artifact_repo and artifact: + artifact.state = 'creating' + artifact_repo.save(artifact) + except Exception as e: + msg = (_LE("Unable to restore artifact %(artifact_id)s: %(e)s") % + {'artifact_id': artifact.id, + 'e': utils.exception_to_str(e)}) + LOG.exception(msg) + + +class RequestDeserializer(wsgi.JSONRequestDeserializer, + jsonpatchvalidator.JsonPatchValidatorMixin): + _available_sort_keys = ('name', 'status', 'container_format', + 'disk_format', 'size', 'id', 'created_at', + 'updated_at', 'version') + _default_sort_dir = 'desc' + + _max_limit_number = 1000 + + def __init__(self, schema=None, plugins=None): + super(RequestDeserializer, self).__init__( + methods_allowed=["replace", "remove", "add"]) + self.plugins = plugins or loader.ArtifactsPluginLoader( + 'glance.artifacts.types') + + def _validate_show_level(self, show_level): + try: + return Showlevel.from_str(show_level.strip().lower()) + except exception.ArtifactUnsupportedShowLevel as e: + raise webob.exc.HTTPBadRequest(explanation=e.message) + + def show(self, req): + res = self._process_type_from_request(req, True) + params = req.params.copy() + show_level = params.pop('show_level', None) + if show_level is not None: + res['show_level'] = self._validate_show_level(show_level) + return res + + def _get_request_body(self, req): + output = super(RequestDeserializer, self).default(req) + if 'body' not in output: + msg = _('Body expected in request.') + raise webob.exc.HTTPBadRequest(explanation=msg) + return output['body'] + + def validate_body(self, request): + try: + body = self._get_request_body(request) + return super(RequestDeserializer, self).validate_body(body) + except exception.JsonPatchException as e: + raise webob.exc.HTTPBadRequest(explanation=e) + + def default(self, request): + return self._process_type_from_request(request) + + def _check_type_version(self, type_version): + try: + semantic_version.Version(type_version, partial=True) + except ValueError as e: + raise webob.exc.HTTPBadRequest(explanation=e) + + def _process_type_from_request(self, req, + allow_implicit_version=False): + try: + type_name = req.urlvars.get('type_name') + type_version = req.urlvars.get('type_version') + if type_version is not None: + self._check_type_version(type_version) + # Even if the type_version is not specified and + # 'allow_implicit_version' is False, this call is still needed to + # ensure that at least one version of this type exists. + artifact_type = self.plugins.get_class_by_endpoint(type_name, + type_version) + res = { + 'type_name': artifact_type.metadata.type_name, + 'type_version': + artifact_type.metadata.type_version + if type_version is not None else None + } + if allow_implicit_version: + res['artifact_type'] = artifact_type + return res + + except exception.ArtifactPluginNotFound as e: + raise webob.exc.HTTPBadRequest(explanation=e.msg) + + def create(self, req): + res = self._process_type_from_request(req, True) + res["artifact_data"] = self._get_request_body(req) + return res + + def update(self, req): + res = self._process_type_from_request(req) + res["changes"] = self.validate_body(req) + return res + + def update_property(self, req): + """Data is expected in form {'data': ...}""" + res = self._process_type_from_request(req) + data_schema = { + "type": "object", + "properties": {"data": {}}, + "required": ["data"], + "$schema": "http://json-schema.org/draft-04/schema#"} + try: + json_body = json.loads(req.body) + jsonschema.validate(json_body, data_schema) + # TODO(ivasilevskaya): + # by now the deepest nesting level == 1 (ex. some_list/3), + # has to be fixed for dict properties + attr = req.urlvars["attr"] + path_left = req.urlvars["path_left"] + path = (attr if not path_left + else "%(attr)s/%(path_left)s" % {'attr': attr, + 'path_left': path_left}) + res.update(data=json_body["data"], path=path) + return res + except (ValueError, jsonschema.ValidationError) as e: + msg = _("Invalid json body: %s") % e.message + raise webob.exc.HTTPBadRequest(explanation=msg) + + def upload(self, req): + res = self._process_type_from_request(req) + index = req.urlvars.get('path_left') + try: + # for blobs only one level of indexing is supported + # (ex. bloblist/0) + if index is not None: + index = int(index) + except ValueError: + msg = _("Only list indexes are allowed for blob lists") + raise webob.exc.HTTPBadRequest(explanation=msg) + artifact_size = req.content_length or None + res.update(size=artifact_size, data=req.body_file, + index=index) + return res + + def download(self, req): + res = self._process_type_from_request(req) + index = req.urlvars.get('index') + if index is not None: + index = int(index) + res.update(index=index) + return res + + def _validate_limit(self, limit): + if limit is None: + return self._max_limit_number + try: + limit = int(limit) + except ValueError: + msg = _("Limit param must be an integer") + raise webob.exc.HTTPBadRequest(explanation=msg) + + if limit < 0: + msg = _("Limit param must be positive") + raise webob.exc.HTTPBadRequest(explanation=msg) + + if limit > self._max_limit_number: + msg = _("Limit param" + " must not be higher than %d") % self._max_limit_number + raise webob.exc.HTTPBadRequest(explanation=msg) + + return limit + + def _validate_sort_key(self, sort_key, artifact_type, type_version=None): + if sort_key in self._available_sort_keys: + return sort_key, None + elif type_version is None: + msg = _('Invalid sort key: %(sort_key)s. ' + 'If type version is not set it must be one of' + ' the following: %(available)s.') % \ + {'sort_key': sort_key, + 'available': ', '.join(self._available_sort_keys)} + raise webob.exc.HTTPBadRequest(explanation=msg) + prop_type = artifact_type.metadata.attributes.all.get(sort_key) + if prop_type is None or prop_type.DB_TYPE not in ['string', + 'numeric', + 'int', + 'bool']: + msg = _('Invalid sort key: %(sort_key)s. ' + 'You cannot sort by this property') % \ + {'sort_key': sort_key} + raise webob.exc.HTTPBadRequest(explanation=msg) + + return sort_key, prop_type.DB_TYPE + + def _validate_sort_dir(self, sort_dir): + if sort_dir not in ['asc', 'desc']: + msg = _('Invalid sort direction: %s') % sort_dir + raise webob.exc.HTTPBadRequest(explanation=msg) + + return sort_dir + + def _get_sorting_params(self, params, artifact_type, type_version=None): + + sort_keys = [] + sort_dirs = [] + + if 'sort' in params: + for sort_param in params.pop('sort').strip().split(','): + key, _sep, dir = sort_param.partition(':') + if not dir: + dir = self._default_sort_dir + sort_keys.append(self._validate_sort_key(key.strip(), + artifact_type, + type_version)) + sort_dirs.append(self._validate_sort_dir(dir.strip())) + + if not sort_keys: + sort_keys = [('created_at', None)] + if not sort_dirs: + sort_dirs = [self._default_sort_dir] + + return sort_keys, sort_dirs + + def _bring_to_type(self, type_name, value): + mapper = {'int': int, + 'string': str, + 'text': str, + 'bool': bool, + 'numeric': float} + return mapper[type_name](value) + + def _get_filters(self, artifact_type, params): + filters = dict() + for filter, value in params.items(): + value = value.strip() + prop_type = artifact_type.metadata.attributes.all.get(filter) + if prop_type.DB_TYPE is not None: + str_type = prop_type.DB_TYPE + elif isinstance(prop_type, list): + if not isinstance(prop_type.item_type, list): + str_type = prop_type.item_type.DB_TYPE + else: + raise webob.exc.HTTPBadRequest('Filtering by tuple-like' + ' fields is not supported') + elif isinstance(prop_type, dict): + filters['name'] = filter + '.' + value + continue + else: + raise webob.exc.HTTPBadRequest('Filtering by this property ' + 'is not supported') + substr1, _sep, substr2 = value.partition(':') + if not _sep: + op = 'IN' if isinstance(prop_type, list) else 'EQ' + filters[filter] = dict(operator=op, + value=self._bring_to_type(str_type, + substr1), + type=str_type) + else: + op = substr1.strip().upper() + filters[filter] = dict(operator=op, + value=self._bring_to_type(str_type, + substr2), + type=str_type) + return filters + + def list(self, req): + res = self._process_type_from_request(req, True) + params = req.params.copy() + show_level = params.pop('show_level', None) + if show_level is not None: + res['show_level'] = self._validate_show_level(show_level.strip()) + + limit = params.pop('limit', None) + marker = params.pop('marker', None) + + tags = [] + while 'tag' in params: + tags.append(params.pop('tag').strip()) + + query_params = dict() + + query_params['sort_keys'], query_params['sort_dirs'] = \ + self._get_sorting_params(params, res['artifact_type'], + res['type_version']) + + if marker is not None: + query_params['marker'] = marker + + query_params['limit'] = self._validate_limit(limit) + + if tags: + query_params['filters']['tags'] = {'value': tags} + + query_params['filters'] = self._get_filters(res['artifact_type'], + params) + + query_params['type_name'] = res['artifact_type'].metadata.type_name + + return query_params + + +class ResponseSerializer(wsgi.JSONResponseSerializer): + # TODO(ivasilevskaya): ideally this should be autogenerated/loaded + ARTIFACTS_ENDPOINT = '/v3/artifacts' + fields = ['id', 'name', 'version', 'type_name', 'type_version', + 'visibility', 'state', 'owner', 'scope', 'created_at', + 'updated_at', 'tags', 'dependencies', 'blobs', 'properties'] + + def __init__(self, schema=None): + super(ResponseSerializer, self).__init__() + + def default(self, response, res): + artifact = serialization.serialize_for_client( + res, show_level=Showlevel.DIRECT) + body = json.dumps(artifact, ensure_ascii=False) + response.unicode_body = six.text_type(body) + response.content_type = 'application/json' + + def create(self, response, artifact): + response.status_int = 201 + self.default(response, artifact) + response.location = ( + '%(root_url)s/%(type_name)s/v%(type_version)s/%(id)s' % dict( + root_url=ResponseSerializer.ARTIFACTS_ENDPOINT, + type_name=artifact.metadata.endpoint, + type_version=artifact.metadata.type_version, + id=artifact.id)) + + def list(self, response, res): + artifacts_list = [ + serialization.serialize_for_client(a, show_level=Showlevel.NONE) + for a in res] + body = json.dumps(artifacts_list, ensure_ascii=False) + response.unicode_body = six.text_type(body) + response.content_type = 'application/json' + + def delete(self, response, result): + response.status_int = 204 + + def download(self, response, blob): + response.headers['Content-Type'] = 'application/octet-stream' + response.app_iter = iter(blob.data_stream) + if blob.checksum: + response.headers['Content-MD5'] = blob.checksum + response.headers['Content-Length'] = str(blob.size) + + +def create_resource(): + """Images resource factory method""" + plugins = loader.ArtifactsPluginLoader('glance.artifacts.types') + deserializer = RequestDeserializer(plugins=plugins) + serializer = ResponseSerializer() + controller = ArtifactsController(plugins=plugins) + return wsgi.Resource(controller, deserializer, serializer) diff --git a/glance/api/v3/router.py b/glance/api/v3/router.py new file mode 100644 index 00000000..75dd782b --- /dev/null +++ b/glance/api/v3/router.py @@ -0,0 +1,87 @@ +# Copyright (c) 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from glance.api.v3 import artifacts +from glance.common import wsgi + + +UUID_REGEX = ( + R'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}') + + +class API(wsgi.Router): + + def _get_artifacts_resource(self): + if not self.artifacts_resource: + self.artifacts_resource = artifacts.create_resource() + return self.artifacts_resource + + def __init__(self, mapper): + self.artifacts_resource = None + artifacts_resource = self._get_artifacts_resource() + + def _check_json_content_type(environ, result): + return "application/json" in environ["CONTENT_TYPE"] + + def _check_octet_stream_content_type(environ, result): + return "application/octet-stream" in environ["CONTENT_TYPE"] + + def connect_routes(m, read_only): + with m.submapper(resource_name="artifact_operations", + path_prefix="/{id}", + requirements={'id': UUID_REGEX}) as art: + art.show() + if not read_only: + art.delete() + art.action('update', method='PATCH') + art.link('publish', method='POST') + + def connect_attr_action(attr): + if not read_only: + attr.action("upload", conditions={ + 'method': ["POST", "PUT"], + 'function': _check_octet_stream_content_type}) + attr.action("update_property", + conditions={ + 'method': ["POST", "PUT"], + 'function': _check_json_content_type}) + attr.link("download", method="GET") + + attr_map = art.submapper(resource_name="attr_operations", + path_prefix="/{attr}", path_left=None) + attr_items = art.submapper( + resource_name="attr_item_ops", + path_prefix="/{attr}/{path_left:.*}") + connect_attr_action(attr_map) + connect_attr_action(attr_items) + + m.connect("", action='list', conditions={'method': 'GET'}, + state='active') + m.connect("/drafts", action='list', conditions={'method': 'GET'}, + state='creating') + if not read_only: + m.connect("/drafts", action='create', + conditions={'method': 'POST'}) + + versioned = mapper.submapper(path_prefix='/artifacts/{type_name}/' + 'v{type_version}', + controller=artifacts_resource) + + non_versioned = mapper.submapper(path_prefix='/artifacts/{type_name}', + type_version=None, + controller=artifacts_resource) + connect_routes(versioned, False) + connect_routes(non_versioned, True) + + super(API, self).__init__(mapper) diff --git a/glance/api/versions.py b/glance/api/versions.py index 772378d9..453d41f7 100644 --- a/glance/api/versions.py +++ b/glance/api/versions.py @@ -56,6 +56,9 @@ class Controller(object): } version_objs = [] + if CONF.enable_v3_api: + version_objs.append( + build_version_object(3.0, 'v3', 'EXPERIMENTAL')) if CONF.enable_v2_api: version_objs.extend([ build_version_object(2.3, 'v2', 'CURRENT'), diff --git a/glance/common/artifacts/serialization.py b/glance/common/artifacts/serialization.py index 384cbc6c..8a7d118b 100644 --- a/glance/common/artifacts/serialization.py +++ b/glance/common/artifacts/serialization.py @@ -12,8 +12,11 @@ # License for the specific language governing permissions and limitations # under the License. +import collections + import six +from glance import artifacts as ga from glance.common.artifacts import declarative from glance.common.artifacts import definitions from glance.common import exception @@ -262,3 +265,66 @@ def deserialize_from_db(db_dict, plugins): artifact_properties, plugins) return artifact_type(**artifact_properties) + + +def _process_blobs_for_client(artifact, result): + """Processes artifact's blobs: adds download links and pretty-printed data. + + The result is stored in 'result' dict. + """ + def build_uri(blob_attr, position=None): + """A helper func to build download uri""" + template = "/artifacts/%(type)s/v%(version)s/%(id)s/%(prop)s/download" + format_dict = { + "type": artifact.metadata.endpoint, + "version": artifact.type_version, + "id": artifact.id, + "prop": blob_attr.name + } + if position is not None: + template = "/artifacts/%(type)s/v%(version)s/" \ + "%(id)s/%(prop)s/%(position)s/download" + format_dict["position"] = position + + return template % format_dict + + for blob_attr in artifact.metadata.attributes.blobs.values(): + value = blob_attr.get_value(artifact) + if value is None: + result[blob_attr.name] = None + elif isinstance(value, collections.Iterable): + res_list = [] + for pos, blob in enumerate(value): + blob_dict = blob.to_dict() + blob_dict["download_link"] = build_uri(blob_attr, pos) + res_list.append(blob_dict) + result[blob_attr.name] = res_list + else: + result[blob_attr.name] = value.to_dict() + result[blob_attr.name]["download_link"] = build_uri(blob_attr) + + +def serialize_for_client(artifact, show_level=ga.Showlevel.NONE): + # use serialize_for_db and modify some fields + # (like properties, show only value, not type) + result = {} + + for prop in artifact.metadata.attributes.properties.values(): + result[prop.name] = prop.get_value(artifact) + + if show_level > ga.Showlevel.NONE: + for dep in artifact.metadata.attributes.dependencies.values(): + inner_show_level = (ga.Showlevel.DIRECT + if show_level == ga.Showlevel.DIRECT + else ga.Showlevel.NONE) + value = dep.get_value(artifact) + if value is None: + result[dep.name] = None + elif isinstance(value, list): + result[dep.name] = [serialize_for_client(v, inner_show_level) + for v in value] + else: + result[dep.name] = serialize_for_client(value, + inner_show_level) + _process_blobs_for_client(artifact, result) + return result diff --git a/glance/common/config.py b/glance/common/config.py index b8b96364..6f2982ff 100644 --- a/glance/common/config.py +++ b/glance/common/config.py @@ -150,6 +150,8 @@ common_opts = [ help=_("Deploy the v1 OpenStack Images API.")), cfg.BoolOpt('enable_v2_api', default=True, help=_("Deploy the v2 OpenStack Images API.")), + cfg.BoolOpt('enable_v3_api', default=True, + help=_("Deploy the v3 OpenStack Objects API.")), cfg.BoolOpt('enable_v1_registry', default=True, help=_("Deploy the v1 OpenStack Registry API.")), cfg.BoolOpt('enable_v2_registry', default=True, diff --git a/glance/tests/functional/__init__.py b/glance/tests/functional/__init__.py index 70ad44ff..c7699baa 100644 --- a/glance/tests/functional/__init__.py +++ b/glance/tests/functional/__init__.py @@ -75,6 +75,7 @@ class Server(object): self.property_protection_file = '' self.enable_v1_api = True self.enable_v2_api = True + self.enable_v3_api = True self.enable_v1_registry = True self.enable_v2_registry = True self.needs_database = False @@ -340,6 +341,7 @@ show_multiple_locations = %(show_multiple_locations)s user_storage_quota = %(user_storage_quota)s enable_v1_api = %(enable_v1_api)s enable_v2_api = %(enable_v2_api)s +enable_v3_api = %(enable_v3_api)s lock_path = %(lock_path)s property_protection_file = %(property_protection_file)s property_protection_rule_format = %(property_protection_rule_format)s @@ -386,6 +388,7 @@ paste.composite_factory = glance.api:root_app_factory /: apiversions /v1: apiv1app /v2: apiv2app +/v3: apiv3app [app:apiversions] paste.app_factory = glance.api.versions:create_resource @@ -396,6 +399,9 @@ paste.app_factory = glance.api.v1.router:API.factory [app:apiv2app] paste.app_factory = glance.api.v2.router:API.factory +[app:apiv3app] +paste.app_factory = glance.api.v3.router:API.factory + [filter:versionnegotiation] paste.filter_factory = glance.api.middleware.version_negotiation:VersionNegotiationFilter.factory diff --git a/glance/tests/functional/artifacts/__init__.py b/glance/tests/functional/artifacts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/glance/tests/functional/artifacts/test_artifacts.py b/glance/tests/functional/artifacts/test_artifacts.py new file mode 100644 index 00000000..6a4de631 --- /dev/null +++ b/glance/tests/functional/artifacts/test_artifacts.py @@ -0,0 +1,1116 @@ +# Copyright (c) 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import unittest +import uuid + +import mock +from oslo.serialization import jsonutils +import pkg_resources +import requests + +from glance.api.v3 import artifacts +from glance.api.v3 import router +from glance.common.artifacts import definitions +from glance.common.artifacts import loader +from glance.common import wsgi +from glance.tests import functional + +TENANT1 = str(uuid.uuid4()) + + +class Artifact(definitions.ArtifactType): + __type_name__ = "WithProps" + prop1 = definitions.String() + prop2 = definitions.Integer() + prop_list = definitions.Array(item_type=definitions.Integer()) + tuple_prop = definitions.Array(item_type=[definitions.Integer(), + definitions.Boolean()]) + dict_prop = definitions.Dict(properties={ + "foo": definitions.String(), + "bar_list": definitions.Array(definitions.Integer())}) + dict_prop_strval = definitions.Dict(properties=definitions.String()) + depends_on = definitions.ArtifactReference() + depends_on_list = definitions.ArtifactReferenceList() + + +class ArtifactNoProps(definitions.ArtifactType): + __type_name__ = "NoProp" + + +class ArtifactNoProps1(definitions.ArtifactType): + __type_name__ = "NoProp" + __type_version__ = "0.5" + + +class ArtifactWithBlob(definitions.ArtifactType): + __type_name__ = "WithBlob" + blob1 = definitions.BinaryObject() + blob_list = definitions.BinaryObjectList() + + +def _create_resource(): + plugins = None + mock_this = 'stevedore.extension.ExtensionManager._find_entry_points' + with mock.patch(mock_this) as fep: + path = 'glance.tests.functional.artifacts.test_artifacts' + fep.return_value = [ + pkg_resources.EntryPoint.parse('WithProps=%s:Artifact' % path), + pkg_resources.EntryPoint.parse( + 'NoProp=%s:ArtifactNoProps' % path), + pkg_resources.EntryPoint.parse( + 'NoProp=%s:ArtifactNoProps1' % path), + pkg_resources.EntryPoint.parse( + 'WithBlob=%s:ArtifactWithBlob' % path) + ] + plugins = loader.ArtifactsPluginLoader('glance.artifacts.types') + deserializer = artifacts.RequestDeserializer(plugins=plugins) + serializer = artifacts.ResponseSerializer() + controller = artifacts.ArtifactsController(plugins=plugins) + return wsgi.Resource(controller, deserializer, serializer) + + +class TestRouter(router.API): + def _get_artifacts_resource(self): + return _create_resource() + + +class TestArtifacts(functional.FunctionalTest): + def setUp(self): + super(TestArtifacts, self).setUp() + self.start_servers(**self.__dict__.copy()) + + def tearDown(self): + self.stop_servers() + self._reset_database(self.api_server.sql_connection) + super(TestArtifacts, self).tearDown() + + def _url(self, path): + return 'http://127.0.0.1:%d/v3/artifacts%s' % (self.api_port, path) + + def _headers(self, custom_headers=None): + base_headers = { + 'X-Identity-Status': 'Confirmed', + 'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96', + 'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e', + 'X-Tenant-Id': TENANT1, + 'X-Roles': 'member', + } + base_headers.update(custom_headers or {}) + return base_headers + + def start_servers(self, **kwargs): + new_paste_conf_base = """[pipeline:glance-api] +pipeline = versionnegotiation gzip unauthenticated-context rootapp + +[pipeline:glance-api-caching] +pipeline = versionnegotiation gzip unauthenticated-context cache rootapp + +[pipeline:glance-api-cachemanagement] +pipeline = + versionnegotiation + gzip + unauthenticated-context + cache + cache_manage + rootapp + +[pipeline:glance-api-fakeauth] +pipeline = versionnegotiation gzip fakeauth context rootapp + +[pipeline:glance-api-noauth] +pipeline = versionnegotiation gzip context rootapp + +[composite:rootapp] +paste.composite_factory = glance.api:root_app_factory +/: apiversions +/v1: apiv1app +/v2: apiv2app +/v3: apiv3app + +[app:apiversions] +paste.app_factory = glance.api.versions:create_resource + +[app:apiv1app] +paste.app_factory = glance.api.v1.router:API.factory + +[app:apiv2app] +paste.app_factory = glance.api.v2.router:API.factory + +[app:apiv3app] +paste.app_factory = + glance.tests.functional.artifacts.test_artifacts:TestRouter.factory + +[filter:versionnegotiation] +paste.filter_factory = + glance.api.middleware.version_negotiation:VersionNegotiationFilter.factory + +[filter:gzip] +paste.filter_factory = glance.api.middleware.gzip:GzipMiddleware.factory + +[filter:cache] +paste.filter_factory = glance.api.middleware.cache:CacheFilter.factory + +[filter:cache_manage] +paste.filter_factory = + glance.api.middleware.cache_manage:CacheManageFilter.factory + +[filter:context] +paste.filter_factory = glance.api.middleware.context:ContextMiddleware.factory + +[filter:unauthenticated-context] +paste.filter_factory = + glance.api.middleware.context:UnauthenticatedContextMiddleware.factory + +[filter:fakeauth] +paste.filter_factory = glance.tests.utils:FakeAuthMiddleware.factory +""" + self.cleanup() + self.api_server.paste_conf_base = new_paste_conf_base + super(TestArtifacts, self).start_servers(**kwargs) + + def _create_artifact(self, type_name, type_version='1.0', data=None, + status=201): + # create an artifact first + artifact_data = data or {'name': 'artifact-1', + 'version': '12'} + return self._check_artifact_post('/%s/v%s/drafts' % (type_name, + type_version), + artifact_data, status=status) + + def _check_artifact_method(self, method, url, data=None, status=200, + headers=None): + if not headers: + headers = self._headers() + headers.setdefault("Content-Type", "application/json") + if 'application/json' in headers['Content-Type']: + data = jsonutils.dumps(data) + response = getattr(requests, method)(self._url(url), headers=headers, + data=data) + self.assertEqual(status, response.status_code) + if status >= 400: + return response.text + if "application/json" in response.headers["content-type"]: + return jsonutils.loads(response.text) + return response.text + + def _check_artifact_post(self, url, data, status=201, + headers={'Content-Type': 'application/json'}): + return self._check_artifact_method("post", url, data, status=status, + headers=headers) + + def _check_artifact_get(self, url, status=200): + return self._check_artifact_method("get", url, status=status) + + def _check_artifact_delete(self, url, status=204): + response = requests.delete(self._url(url), headers=self._headers()) + self.assertEqual(status, response.status_code) + return response.text + + def _check_artifact_patch(self, url, data, status=200): + return self._check_artifact_method("patch", url, data, status) + + def _check_artifact_put(self, url, data, status=200): + return self._check_artifact_method("put", url, data, status=status) + + def test_list_any_artifacts(self): + """Returns information about all draft artifacts with given endpoint""" + self._create_artifact('noprop') + artifacts = self._check_artifact_get('/noprop/drafts') + self.assertEqual(1, len(artifacts)) + + def test_list_last_version(self): + """/artifacts/endpoint == /artifacts/endpoint/all-versions""" + self._create_artifact('noprop') + artifacts = self._check_artifact_get('/noprop/drafts') + self.assertEqual(1, len(artifacts)) + # the same result can be achieved if asked for artifact with + # type_version=last version + artifacts_precise = self._check_artifact_get('/noprop/v1.0/drafts') + self.assertEqual(artifacts, artifacts_precise) + + def test_list_artifacts_by_state(self): + """Returns last version of artifacts with given state""" + self._create_artifact('noprop') + creating_state = self._check_artifact_get('/noprop/drafts') + self.assertEqual(1, len(creating_state)) + # no active [/type_name/active == /type_name] + active_state = self._check_artifact_get('/noprop') + self.assertEqual(0, len(active_state)) + + def test_list_artifacts_with_version(self): + """Supplying precise artifact version does not break anything""" + self._create_artifact('noprop') + list_creating = self._check_artifact_get('/noprop/v1.0/drafts') + self.assertEqual(1, len(list_creating)) + bad_version = self._check_artifact_get('/noprop/v1.0bad', + status=400) + self.assertIn("Invalid version string: u'1.0bad'", bad_version) + + def test_get_artifact_by_id_any_version(self): + data = self._create_artifact('noprop') + artifact_id = data['id'] + artifacts = self._check_artifact_get( + '/noprop/%s' % artifact_id) + self.assertEqual(artifact_id, artifacts['id']) + + def test_list_artifact_no_such_version(self): + """Version filtering should be applied for existing plugins. + + An attempt to retrieve an artifact out of existing plugin but with + a wrong version should result in + 400 BadRequest 'No such plugin has been loaded' + """ + msg = self._check_artifact_get('/noprop/v0.0.9', 400) + self.assertIn("No plugin for 'noprop v 0.0.9' has been loaded", + msg) + + def test_get_artifact_by_id(self): + data = self._create_artifact('noprop') + artifact_id = data['id'] + artifacts = self._check_artifact_get( + '/noprop/%s' % artifact_id) + self.assertEqual(artifact_id, artifacts['id']) + # the same result can be achieved if asked for artifact with + # type_version=last version + artifacts_precise = self._check_artifact_get( + '/noprop/v1.0/%s' % artifact_id) + self.assertEqual(artifacts, artifacts_precise) + + def test_get_artifact_basic_show_level(self): + no_prop_art = self._create_artifact('noprop') + art = self._create_artifact( + 'withprops', + data={"name": "name", "version": "42", + "depends_on": no_prop_art['id']}) + self.assertEqual(no_prop_art['id'], art['depends_on']['id']) + self.assertEqual(no_prop_art['name'], art['depends_on']['name']) + + artifact_id = art['id'] + artifact = self._check_artifact_get( + '/withprops/%s?show_level=basic' % artifact_id) + self.assertEqual(artifact_id, artifact['id']) + self.assertIsNone(artifact['depends_on']) + + def test_get_artifact_none_show_level(self): + """Create an artifact (with two deployer-defined properties)""" + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'tags': ['gagaga', 'sesese'], + 'prop1': 'Arthur Dent', + 'prop2': 42} + art = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + expected_artifact = { + 'state': 'creating', + 'name': 'artifact-1', + 'version': '12.0.0', + 'tags': ['gagaga', 'sesese'], + 'visibility': 'private', + 'type_name': 'WithProps', + 'type_version': '1.0', + 'prop1': 'Arthur Dent', + 'prop2': 42 + } + for key, value in expected_artifact.items(): + self.assertEqual(art[key], value, key) + + artifact_id = art['id'] + artifact = self._check_artifact_get( + '/withprops/%s?show_level=none' % artifact_id) + self.assertEqual(artifact_id, artifact['id']) + self.assertIsNone(artifact['prop1']) + self.assertIsNone(artifact['prop2']) + + def test_get_artifact_invalid_show_level(self): + no_prop_art = self._create_artifact('noprop') + art = self._create_artifact( + 'withprops', + data={"name": "name", "version": "42", + "depends_on": no_prop_art['id']}) + self.assertEqual(no_prop_art['id'], art['depends_on']['id']) + self.assertEqual(no_prop_art['name'], art['depends_on']['name']) + + artifact_id = art['id'] + # 'hui' is invalid show level + self._check_artifact_get( + '/noprop/%s?show_level=yoba' % artifact_id, status=400) + + def test_get_artifact_no_such_id(self): + msg = self._check_artifact_get( + '/noprop/%s' % str(uuid.uuid4()), status=404) + self.assertIn('No artifact found with ID', msg) + + def test_get_artifact_present_id_wrong_type(self): + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'prop1': '12', + 'prop2': 12} + art1 = self._create_artifact('withprops', data=artifact_data) + art2 = self._create_artifact('noprop') + # ok id and type_name but bad type_version should result in 404 + self._check_artifact_get('/noprop/v0.5/%s' % str(art2['id']), + status=404) + # try to access art2 by supplying art1.type and art2.id + self._check_artifact_get('/withprops/%s' % str(art2['id']), + status=404) + self._check_artifact_get('/noprop/%s' % str(art1['id']), status=404) + + def test_delete_artifact(self): + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'prop1': '12', + 'prop2': 12} + art1 = self._create_artifact('withprops', data=artifact_data) + self._check_artifact_delete('/withprops/v1.0/%s' % art1['id']) + art1_deleted = self._check_artifact_get('/withprops/%s' % art1['id'], + status=404) + self.assertIn('No artifact found with ID', art1_deleted) + + def test_delete_artifact_no_such_id(self): + self._check_artifact_delete('/noprop/v1/%s' % str(uuid.uuid4()), + status=404) + + @unittest.skip("Test is unstable") + def test_delete_artifact_with_dependency(self): + # make sure that artifact can't be deleted if it has some dependencies + # still not deleted + art = self._create_artifact('withprops') + no_prop_art = self._create_artifact('noprop') + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'value': no_prop_art['id'], + 'op': 'replace', + 'path': '/depends_on'}, + {'value': [no_prop_art['id']], + 'op': 'add', + 'path': '/depends_on_list'}]) + self.assertEqual(no_prop_art['id'], art_updated['depends_on']['id']) + self.assertEqual(1, len(art_updated['depends_on_list'])) + # try to delete an artifact prior to its dependency + res = self._check_artifact_delete('/withprops/v1/%s' % art['id'], + status=400) + self.assertIn( + "Dependency property 'depends_on' has to be deleted first", res) + # delete a dependency + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'op': 'remove', 'path': '/depends_on'}]) + # try to delete prior to deleting artifact_list dependencies + res = self._check_artifact_delete('/withprops/v1/%s' % art['id'], + status=400) + self.assertIn( + "Dependency property 'depends_on_list' has to be deleted first", + res) + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'op': 'remove', 'path': '/depends_on_list'}]) + # delete dependency list + self._check_artifact_delete('/withprops/v1/%s' % art['id']) + + def test_delete_artifact_with_blob(self): + # Upload some data to an artifact + art = self._create_artifact('withblob') + headers = self._headers({'Content-Type': 'application/octet-stream'}) + self._check_artifact_post('/withblob/v1/%s/blob1' % art['id'], + headers=headers, + data='ZZZZZ', status=200) + self._check_artifact_delete('/withblob/v1/%s' % art['id']) + + def test_update_array_property_by_replace_op(self): + art = self._create_artifact('withprops', data={'name': 'some art', + 'version': '4.2'}) + self.assertEqual('some art', art['name']) + data = [{'op': 'replace', 'value': [1, 2, 3], 'path': '/prop_list'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' % + art['id'], + data=data) + self.assertEqual([1, 2, 3], art_updated['prop_list']) + # now try to change first element of the list + data_change_first = [{'op': 'replace', 'value': 42, + 'path': '/prop_list/1'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' % + art['id'], + data=data_change_first) + self.assertEqual([1, 42, 3], art_updated['prop_list']) + # replace last element + data_change_last = [{'op': 'replace', 'value': 24, + 'path': '/prop_list/-'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' % + art['id'], + data=data_change_last) + self.assertEqual([1, 42, 24], art_updated['prop_list']) + + def test_update_dict_property_by_replace_op(self): + art = self._create_artifact( + 'withprops', + data={'name': 'some art', + 'version': '4.2', + 'dict_prop': {'foo': "Fenchurch", 'bar_list': [42, 42]}}) + self.assertEqual({'foo': "Fenchurch", 'bar_list': [42, 42]}, + art['dict_prop']) + data = [{'op': 'replace', 'value': 24, + 'path': '/dict_prop/bar_list/0'}, + {'op': 'replace', 'value': 'cello lesson', + 'path': '/dict_prop/foo'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual({'foo': 'cello lesson', 'bar_list': [24, 42]}, + art_updated['dict_prop']) + + def test_update_empty_dict_property_by_replace_op(self): + art = self._create_artifact('withprops') + self.assertIsNone(art['dict_prop']) + data = [{'op': 'replace', 'value': "don't panic", + 'path': '/dict_prop/foo'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data, status=400) + self.assertIn("The provided path 'dict_prop/foo' is invalid", + art_updated) + + def test_update_empty_dict_property_by_remove_op(self): + art = self._create_artifact('withprops') + self.assertIsNone(art['dict_prop']) + data = [{'op': 'remove', 'path': '/dict_prop/bar_list'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data, status=400) + self.assertIn("The provided path 'dict_prop/bar_list' is invalid", + art_updated) + + def test_update_dict_property_by_remove_op(self): + art = self._create_artifact( + 'withprops', + data={'name': 'some art', 'version': '4.2', + 'dict_prop': {'foo': "Fenchurch", 'bar_list': [42, 42]}}) + self.assertEqual({'foo': 'Fenchurch', 'bar_list': [42, 42]}, + art['dict_prop']) + data = [{'op': 'remove', 'path': '/dict_prop/foo'}, + {'op': 'remove', 'path': '/dict_prop/bar_list/1'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual({'bar_list': [42]}, art_updated['dict_prop']) + # now delete the whole dict + data = [{'op': 'remove', 'path': '/dict_prop'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertIsNone(art_updated['dict_prop']) + + @unittest.skip("Skipping due to a know bug") + def test_update_dict_property_change_values(self): + art = self._create_artifact( + 'withprops', data={'name': 'some art', 'version': '4.2', + 'dict_prop_strval': + {'foo': 'Fenchurch', 'bar': 'no value'}}) + self.assertEqual({'foo': 'Fenchurch', 'bar': 'no value'}, + art['dict_prop_strval']) + new_data = [{'op': 'replace', 'path': '/dict_prop_strval', + 'value': {'new-foo': 'Arthur Dent'}}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=new_data) + self.assertEqual({'new-foo': 'Arthur Dent'}, + art_updated['dict_prop_strval']) + + def test_update_array_property_by_remove_op(self): + art = self._create_artifact( + 'withprops', data={'name': 'some art', + 'version': '4.2', + 'prop_list': [1, 2, 3]}) + self.assertEqual([1, 2, 3], art['prop_list']) + data = [{'op': 'remove', 'path': '/prop_list/0'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual([2, 3], art_updated['prop_list']) + # remove last element + data = [{'op': 'remove', 'path': '/prop_list/-'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual([2], art_updated['prop_list']) + # now delete the whole array + data = [{'op': 'remove', 'path': '/prop_list'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertIsNone(art_updated['prop_list']) + + def test_update_array_property_by_add_op(self): + art = self._create_artifact( + 'withprops', data={'name': 'some art', + 'version': '4.2'}) + self.assertIsNone(art['prop_list']) + data = [{'op': 'add', 'path': '/prop_list', 'value': [2, 12, 0, 6]}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], data=data) + self.assertEqual([2, 12, 0, 6], art_updated['prop_list']) + data = [{'op': 'add', 'path': '/prop_list/2', 'value': 85}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], data=data) + self.assertEqual([2, 12, 85, 0, 6], art_updated['prop_list']) + # add where path='/array/-' means append to the end + data = [{'op': 'add', 'path': '/prop_list/-', 'value': 7}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], data=data) + self.assertEqual([2, 12, 85, 0, 6, 7], art_updated['prop_list']) + # an attempt to add an element to unexistant position should result in + # 400 + self.assertEqual(6, len(art_updated['prop_list'])) + bad_index_data = [{'op': 'add', 'path': '/prop_list/11', + 'value': 42}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=bad_index_data, + status=400) + self.assertIn("The provided path 'prop_list/11' is invalid", + art_updated) + + def test_update_dict_property_by_add_op(self): + art = self._create_artifact("withprops") + self.assertIsNone(art['dict_prop']) + data = [{'op': 'add', 'path': '/dict_prop/foo', 'value': "some value"}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual({"foo": "some value"}, art_updated['dict_prop']) + + def test_update_empty_array_property_by_add_op(self): + """Test jsonpatch add. + + According to RFC 6902: + * if the array is empty, '/array/0' is a valid path + """ + create_data = {'name': 'new artifact', + 'version': '4.2'} + art = self._create_artifact('withprops', data=create_data) + self.assertIsNone(art['prop_list']) + data = [{'op': 'add', 'path': '/prop_list/0', 'value': 3}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual([3], art_updated['prop_list']) + + def test_update_tuple_property_by_index(self): + art = self._create_artifact( + 'withprops', data={'name': 'some art', + 'version': '4.2', + 'tuple_prop': [1, False]}) + self.assertEqual([1, False], art['tuple_prop']) + data = [{'op': 'replace', 'value': True, + 'path': '/tuple_prop/1'}, + {'op': 'replace', 'value': 2, + 'path': '/tuple_prop/0'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertEqual([2, True], art_updated['tuple_prop']) + + def test_update_artifact(self): + art = self._create_artifact('noprop') + self.assertEqual('artifact-1', art['name']) + art_updated = self._check_artifact_patch( + '/noprop/v1/%s' % art['id'], + data=[{'op': 'replace', 'value': '0.0.9', 'path': '/version'}]) + self.assertEqual('0.0.9', art_updated['version']) + + def test_update_artifact_properties(self): + art = self._create_artifact('withprops') + for prop in ['prop1', 'prop2']: + self.assertIsNone(art[prop]) + data = [{'op': 'replace', 'value': 'some value', + 'path': '/prop1'}] + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], data=data) + self.assertEqual('some value', art_updated['prop1']) + + def test_update_artifact_remove_property(self): + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'tags': ['gagaga', 'sesese'], + 'prop1': 'Arthur Dent', + 'prop2': 42} + art = self._create_artifact('withprops', data=artifact_data) + data = [{'op': 'remove', 'path': '/prop1'}] + art_updated = self._check_artifact_patch('/withprops/v1/%s' + % art['id'], + data=data) + self.assertIsNone(art_updated['prop1']) + self.assertEqual(42, art_updated['prop2']) + + def test_update_wrong_property_type(self): + art = self._create_artifact('withprops') + for prop in ['prop2', 'prop2']: + self.assertIsNone(art[prop]) + data = [{'op': 'replace', 'value': 123, 'path': '/prop1'}] + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], data=data, status=400) + self.assertIn("Property 'prop1' may not have value '123'", art_updated) + + def test_update_multiple_properties(self): + with_prop_art = self._create_artifact('withprops') + data = [{'op': 'replace', + 'path': '/prop1', + 'value': 'some value'}, + {'op': 'replace', + 'path': '/prop2', + 'value': 42}] + updated = self._check_artifact_patch( + '/withprops/v1/%s' % with_prop_art['id'], data=data) + self.assertEqual('some value', updated['prop1']) + self.assertEqual(42, updated['prop2']) + + def test_create_artifact_with_dependency(self): + no_prop_art = self._create_artifact('noprop') + art = self._create_artifact( + 'withprops', + data={"name": "name", "version": "42", + "depends_on": no_prop_art['id']}) + self.assertEqual(no_prop_art['id'], art['depends_on']['id']) + self.assertEqual(no_prop_art['name'], art['depends_on']['name']) + + def test_create_artifact_dependency_list(self): + no_prop_art1 = self._create_artifact('noprop') + no_prop_art2 = self._create_artifact('noprop') + art = self._create_artifact( + 'withprops', + data={"name": "name", "version": "42", + "depends_on_list": [no_prop_art1['id'], no_prop_art2['id']]}) + self.assertEqual(2, len(art['depends_on_list'])) + self.assertEqual([no_prop_art1['id'], no_prop_art2['id']], + map(lambda x: x['id'], art['depends_on_list'])) + + def test_create_dependency_list_same_id(self): + no_prop_art = self._create_artifact('noprop') + res = self._create_artifact( + 'withprops', + data={"name": "name", "version": "42", + "depends_on_list": [no_prop_art['id'], + no_prop_art['id']]}, status=400) + self.assertIn("Items have to be unique", res) + + def test_create_artifact_bad_dependency_format(self): + """Invalid dependencies creation. + + Dependencies should be passed: + * as a list of ids if param is an ArtifactReferenceList + * as an id if param is an ArtifactReference + """ + no_prop_art = self._create_artifact('noprop') + art = self._check_artifact_post( + '/withprops/v1/drafts', + {"name": "name", "version": "42", + "depends_on": [no_prop_art['id']]}, status=400) + self.assertIn('Not a valid value type', art) + art = self._check_artifact_post( + '/withprops/v1.0/drafts', + {"name": "name", "version": "42", + "depends_on_list": no_prop_art['id']}, status=400) + self.assertIn('object is not iterable', art) + + def test_update_dependency(self): + no_prop_art = self._create_artifact('noprop') + no_prop_art1 = self._create_artifact('noprop') + with_prop_art = self._create_artifact('withprops') + data = [{'op': 'replace', + 'path': '/depends_on', + 'value': no_prop_art['id']}] + updated = self._check_artifact_patch( + '/withprops/v1/%s' % with_prop_art['id'], data=data) + self.assertEqual(no_prop_art['id'], updated['depends_on']['id']) + self.assertEqual(no_prop_art['name'], updated['depends_on']['name']) + data = [{'op': 'replace', + 'path': '/depends_on', + 'value': no_prop_art1['id']}] + # update again and make sure it changes + updated = self._check_artifact_patch( + '/withprops/v1/%s' % with_prop_art['id'], data=data) + self.assertEqual(no_prop_art1['id'], updated['depends_on']['id']) + self.assertEqual(no_prop_art1['name'], updated['depends_on']['name']) + + def test_update_dependency_circular_reference(self): + with_prop_art = self._create_artifact('withprops') + data = [{'op': 'replace', + 'path': '/depends_on', + 'value': [with_prop_art['id']]}] + not_updated = self._check_artifact_patch( + '/withprops/v1/%s' % with_prop_art['id'], data=data, status=400) + self.assertIn('Artifact with a circular dependency can not be created', + not_updated) + + def test_publish_artifact(self): + art = self._create_artifact('withprops') + # now create dependency + no_prop_art = self._create_artifact('noprop') + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'value': no_prop_art['id'], + 'op': 'replace', + 'path': '/depends_on'}]) + self.assertTrue(art_updated['depends_on'] != []) + # artifact can't be published if any dependency is in non-active state + res = self._check_artifact_post( + '/withprops/v1/%s/publish' % art['id'], {}, status=400) + self.assertIn("Not all dependencies are in 'active' state", res) + # after you publish the dependency -> artifact can be published + dep_published = self._check_artifact_post( + '/noprop/v1/%s/publish' % no_prop_art['id'], {}, status=200) + self.assertEqual('active', dep_published['state']) + art_published = self._check_artifact_post( + '/withprops/v1.0/%s/publish' % art['id'], {}, status=200) + self.assertEqual('active', art_published['state']) + + def test_no_mutable_change_in_published_state(self): + art = self._create_artifact('withprops') + no_prop_art = self._create_artifact('noprop') + no_prop_other = self._create_artifact('noprop') + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'value': no_prop_art['id'], + 'op': 'replace', + 'path': '/depends_on'}]) + self.assertEqual(no_prop_art['id'], art_updated['depends_on']['id']) + # now change dependency to some other artifact + art_updated = self._check_artifact_patch( + '/withprops/v1/%s' % art['id'], + data=[{'value': no_prop_other['id'], + 'op': 'replace', + 'path': '/depends_on'}]) + self.assertEqual(no_prop_other['id'], art_updated['depends_on']['id']) + # publish dependency + dep_published = self._check_artifact_post( + '/noprop/v1/%s/publish' % no_prop_other['id'], {}, status=200) + self.assertEqual('active', dep_published['state']) + # publish artifact + art_published = self._check_artifact_post( + '/withprops/v1.0/%s/publish' % art['id'], {}, status=200) + self.assertEqual('active', art_published['state']) + # try to change dependency, should fail as already published + res = self._check_artifact_patch( + '/withprops/v1/%s' % art_published['id'], + data=[{'op': 'remove', 'path': '/depends_on'}], status=400) + self.assertIn('Attempt to set value of immutable property', res) + + def test_create_artifact_empty_body(self): + self._check_artifact_post('/noprop/v1.0/drafts', {}, 400) + + def test_create_artifact_insufficient_arguments(self): + self._check_artifact_post('/noprop/v1.0/drafts', + {'name': 'some name, no version'}, + status=400) + + def test_create_artifact_no_such_version(self): + """Creation impossible without specifying a correct version. + + An attempt to create an artifact out of existing plugin but with + a wrong version should result in + 400 BadRequest 'No such plugin has been loaded' + """ + # make sure there is no such artifact noprop + self._check_artifact_get('/noprop/v0.0.9', 400) + artifact_data = {'name': 'artifact-1', + 'version': '12'} + msg = self._check_artifact_post('/noprop/v0.0.9/drafts', + artifact_data, + status=400) + self.assertIn("No plugin for 'noprop v 0.0.9' has been loaded", + msg) + + def test_create_artifact_no_type_version_specified(self): + """Creation impossible without specifying a version. + + It should not be possible to create an artifact out of existing plugin + without specifying any version + """ + artifact_data = {'name': 'artifact-1', + 'version': '12'} + self._check_artifact_post('/noprop/drafts', artifact_data, 404) + + def test_create_artifact_no_properties(self): + """Create an artifact with minimum parameters""" + artifact_data = {'name': 'artifact-1', + 'version': '12'} + artifact = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + # verify that all fields have the values expected + expected_artifact = { + 'state': 'creating', + 'name': 'artifact-1', + 'version': '12.0.0', + 'tags': [], + 'visibility': 'private', + 'type_name': 'WithProps', + 'type_version': '1.0', + 'prop1': None, + 'prop2': None + } + for key, value in expected_artifact.items(): + self.assertEqual(artifact[key], value, key) + + def test_create_artifact_with_properties(self): + """Create an artifact (with two deployer-defined properties)""" + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'tags': ['gagaga', 'sesese'], + 'prop1': 'Arthur Dent', + 'prop2': 42} + artifact = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + expected_artifact = { + 'state': 'creating', + 'name': 'artifact-1', + 'version': '12.0.0', + 'tags': ['gagaga', 'sesese'], + 'visibility': 'private', + 'type_name': 'WithProps', + 'type_version': '1.0', + 'prop1': 'Arthur Dent', + 'prop2': 42 + } + for key, value in expected_artifact.items(): + self.assertEqual(artifact[key], value, key) + + def test_create_artifact_not_all_properties(self): + """Create artifact with minimal properties. + + Checks that it is possible to create an artifact by passing all + required properties but omitting some not required + """ + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'visibility': 'private', + 'tags': ['gagaga', 'sesese'], + 'prop1': 'i am ok'} + artifact = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + expected_artifact = { + 'state': 'creating', + 'name': 'artifact-1', + 'version': '12.0.0', + 'tags': ['gagaga', 'sesese'], + 'visibility': 'private', + 'type_name': 'WithProps', + 'type_version': '1.0', + 'prop1': 'i am ok', + 'prop2': None} + for key, value in expected_artifact.items(): + self.assertEqual(artifact[key], value, key) + # now check creation with no properties specified + for prop in ['prop1', 'prop2']: + artifact_data.pop(prop, '') + artifact = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + for prop in ['prop1', 'prop2']: + self.assertIsNone(artifact[prop]) + + def test_create_artifact_invalid_properties(self): + """Any attempt to pass invalid properties should result in 400""" + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'prop1': 1} + res = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data, + status=400) + self.assertIn("Property 'prop1' may not have value '1'", res) + artifact_data.pop('prop1') + artifact_data['nosuchprop'] = "Random" + res = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data, + status=400) + self.assertIn("Artifact has no property nosuchprop", res) + + def test_upload_file(self): + # Upload some data to an artifact + art = self._create_artifact('withblob') + headers = self._headers({'Content-Type': 'application/octet-stream'}) + self._check_artifact_post('/withblob/v1/%s/blob1' % art['id'], + headers=headers, + data='ZZZZZ', status=200) + + def test_upload_list_files(self): + art = self._create_artifact('withblob') + headers = self._headers({'Content-Type': 'application/octet-stream'}) + self._check_artifact_post('/withblob/v1/%s/blob_list' % art['id'], + headers=headers, + data='ZZZZZ', status=200) + self._check_artifact_post('/withblob/v1/%s/blob_list' % art['id'], + headers=headers, + data='YYYYY', status=200) + + def test_download_file(self): + # Download some data from an artifact + art = self._create_artifact('withblob') + artifact_id = art['id'] + headers = self._headers({'Content-Type': 'application/octet-stream'}) + self._check_artifact_post('/withblob/v1/%s/blob1' % art['id'], + headers=headers, + data='ZZZZZ', status=200) + + art = self._check_artifact_get('/withblob/%s' % artifact_id) + self.assertEqual(artifact_id, art['id']) + self.assertIn('download_link', art['blob1']) + + data = self._check_artifact_get( + '/withblob/%s/blob1/download' % art['id']) + self.assertEqual('ZZZZZ', data) + + def test_limit(self): + artifact_data = {'name': 'artifact-1', + 'version': '12'} + self._check_artifact_post('/withprops/v1/drafts', + artifact_data) + artifact_data = {'name': 'artifact-1', + 'version': '13'} + self._check_artifact_post('/withprops/v1/drafts', + artifact_data) + result = self._check_artifact_get('/withprops/v1/drafts') + self.assertEqual(2, len(result)) + + result = self._check_artifact_get('/withprops/v1/drafts?limit=1') + self.assertEqual(1, len(result)) + + def _check_sorting_order(self, expected, actual): + for e, a in zip(expected, actual): + self.assertEqual(e['name'], a['name']) + self.assertEqual(e['version'], a['version']) + self.assertEqual(e['prop1'], a['prop1']) + + def test_sort(self): + artifact_data = {'name': 'artifact-1', + 'version': '12', + 'prop1': 'lala'} + art1 = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + artifact_data = {'name': 'artifact-2', + 'version': '13', + 'prop1': 'lala'} + art2 = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + artifact_data = {'name': 'artifact-3', + 'version': '13', + 'prop1': 'tutu'} + art3 = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + artifact_data = {'name': 'artifact-4', + 'version': '13', + 'prop1': 'hyhy'} + art4 = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + artifact_data = {'name': 'artifact-5', + 'version': '13', + 'prop1': 'bebe'} + art5 = self._check_artifact_post('/withprops/v1.0/drafts', + artifact_data) + + result = self._check_artifact_get( + '/withprops/v1.0/drafts?sort=name') + self.assertEqual(5, len(result)) + + # default direction is 'desc' + expected = [art5, art4, art3, art2, art1] + self._check_sorting_order(expected, result) + + result = self._check_artifact_get( + '/withprops/v1.0/drafts?sort=name:asc') + self.assertEqual(5, len(result)) + + expected = [art1, art2, art3, art4, art5] + self._check_sorting_order(expected, result) + + result = self._check_artifact_get( + '/withprops/v1.0/drafts?sort=version:asc,prop1') + self.assertEqual(5, len(result)) + + expected = [art1, art3, art2, art4, art5] + self._check_sorting_order(expected, result) + + def test_update_property(self): + data = {'name': 'an artifact', + 'version': '42'} + art = self._create_artifact('withprops', data=data) + # update single integer property via PUT + upd = self._check_artifact_put('/withprops/v1.0/%s/prop2' % art['id'], + data={'data': 15}) + self.assertEqual(15, upd['prop2']) + # create list property via PUT + upd = self._check_artifact_put( + '/withprops/v1.0/%s/tuple_prop' % art['id'], + data={'data': [42, True]}) + self.assertEqual([42, True], upd['tuple_prop']) + # change list property via PUT + upd = self._check_artifact_put( + '/withprops/v1.0/%s/tuple_prop/0' % art['id'], data={'data': 24}) + self.assertEqual([24, True], upd['tuple_prop']) + # append to list property via POST + upd = self._check_artifact_post( + '/withprops/v1.0/%s/prop_list' % art['id'], data={'data': [11]}, + status=200) + self.assertEqual([11], upd['prop_list']) + # append to list property via POST + upd = self._check_artifact_post( + '/withprops/v1.0/%s/prop_list/-' % art['id'], + status=200, data={'data': 10}) + self.assertEqual([11, 10], upd['prop_list']) + + def test_bad_update_property(self): + data = {'name': 'an artifact', + 'version': '42'} + art = self._create_artifact('withprops', data=data) + # try to update non existant property + upd = self._check_artifact_put( + '/withprops/v1.0/%s/nosuchprop' % art['id'], + data={'data': 'wont be set'}, status=400) + self.assertIn('Artifact has no property nosuchprop', upd) + # try to pass wrong property value + upd = self._check_artifact_put( + '/withprops/v1.0/%s/tuple_prop' % art['id'], + data={'data': ['should be an int', False]}, status=400) + self.assertIn("Property 'tuple_prop[0]' may not have value", upd) + # try to pass bad body (not a valid json) + upd = self._check_artifact_put( + '/withprops/v1.0/%s/tuple_prop' % art['id'], data="not a json", + status=400) + self.assertIn("Invalid json body", upd) + # try to pass json body invalid under schema + upd = self._check_artifact_put( + '/withprops/v1.0/%s/tuple_prop' % art['id'], + data={"bad": "schema"}, status=400) + self.assertIn("Invalid json body", upd) + + def test_update_different_depths_levels(self): + data = {'name': 'an artifact', + 'version': '42'} + art = self._create_artifact('withprops', data=data) + upd = self._check_artifact_post( + '/withprops/v1.0/%s/dict_prop' % art['id'], + data={'data': {'foo': 'some value'}}, status=200) + self.assertEqual({'foo': 'some value'}, upd['dict_prop']) + upd = self._check_artifact_post( + '/withprops/v1.0/%s/dict_prop/bar_list' % art['id'], + data={'data': [5]}, status=200) + self.assertEqual({'foo': 'some value', 'bar_list': [5]}, + upd['dict_prop']) + upd = self._check_artifact_post( + '/withprops/v1.0/%s/dict_prop/bar_list/0' % art['id'], + data={'data': 15}, status=200) + self.assertEqual({'foo': 'some value', 'bar_list': [5, 15]}, + upd['dict_prop']) + # try to attempt dict_property by non existant path + upd = self._check_artifact_post( + '/withprops/v1.0/%s/dict_prop/bar_list/nosuchkey' % art['id'], + data={'data': 15}, status=400) diff --git a/glance/tests/functional/test_api.py b/glance/tests/functional/test_api.py index 2c8e875f..471db472 100644 --- a/glance/tests/functional/test_api.py +++ b/glance/tests/functional/test_api.py @@ -31,6 +31,11 @@ class TestApiVersions(functional.FunctionalTest): url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = {'versions': [ + { + 'status': 'EXPERIMENTAL', + 'id': 'v3.0', + 'links': [{'href': url % '3', "rel": "self"}], + }, { 'id': 'v2.3', 'status': 'CURRENT', @@ -78,6 +83,11 @@ class TestApiVersions(functional.FunctionalTest): url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = {'versions': [ + { + 'status': 'EXPERIMENTAL', + 'id': 'v3.0', + 'links': [{'href': url % '3', "rel": "self"}], + }, { 'id': 'v2.3', 'status': 'CURRENT', @@ -111,6 +121,7 @@ class TestApiVersions(functional.FunctionalTest): def test_v1_api_configuration(self): self.api_server.enable_v1_api = True self.api_server.enable_v2_api = False + self.api_server.enable_v3_api = False self.start_servers(**self.__dict__.copy()) url = 'http://127.0.0.1:%d/v%%s/' % self.api_port @@ -143,6 +154,11 @@ class TestApiPaths(functional.FunctionalTest): url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = {'versions': [ + { + 'status': 'EXPERIMENTAL', + 'id': 'v3.0', + 'links': [{'href': url % '3', "rel": "self"}], + }, { 'id': 'v2.3', 'status': 'CURRENT', diff --git a/glance/tests/unit/test_opts.py b/glance/tests/unit/test_opts.py index 1e76b7e8..67e1ea48 100644 --- a/glance/tests/unit/test_opts.py +++ b/glance/tests/unit/test_opts.py @@ -78,6 +78,7 @@ class OptsTestCase(utils.BaseTestCase): 'user_storage_quota', 'enable_v1_api', 'enable_v2_api', + 'enable_v3_api', 'enable_v1_registry', 'enable_v2_registry', 'pydev_worker_debug_host', @@ -166,6 +167,7 @@ class OptsTestCase(utils.BaseTestCase): 'user_storage_quota', 'enable_v1_api', 'enable_v2_api', + 'enable_v3_api', 'enable_v1_registry', 'enable_v2_registry', 'pydev_worker_debug_host', @@ -211,6 +213,7 @@ class OptsTestCase(utils.BaseTestCase): 'user_storage_quota', 'enable_v1_api', 'enable_v2_api', + 'enable_v3_api', 'enable_v1_registry', 'enable_v2_registry', 'pydev_worker_debug_host', @@ -260,6 +263,7 @@ class OptsTestCase(utils.BaseTestCase): 'user_storage_quota', 'enable_v1_api', 'enable_v2_api', + 'enable_v3_api', 'enable_v1_registry', 'enable_v2_registry', 'pydev_worker_debug_host', diff --git a/glance/tests/unit/test_versions.py b/glance/tests/unit/test_versions.py index 08f52996..8be83a39 100644 --- a/glance/tests/unit/test_versions.py +++ b/glance/tests/unit/test_versions.py @@ -34,6 +34,12 @@ class VersionsTest(base.IsolatedUnitTest): self.assertEqual('application/json', res.content_type) results = jsonutils.loads(res.body)['versions'] expected = [ + { + 'status': 'EXPERIMENTAL', + 'id': 'v3.0', + 'links': [{'href': 'http://127.0.0.1:9292/v3/', + 'rel': 'self'}], + }, { 'id': 'v2.3', 'status': 'CURRENT', @@ -83,6 +89,12 @@ class VersionsTest(base.IsolatedUnitTest): self.assertEqual('application/json', res.content_type) results = jsonutils.loads(res.body)['versions'] expected = [ + { + 'status': 'EXPERIMENTAL', + 'id': 'v3.0', + 'links': [{'href': 'https://example.com:9292/v3/', + 'rel': 'self'}], + }, { 'id': 'v2.3', 'status': 'CURRENT', @@ -170,12 +182,22 @@ class VersionNegotiationTest(base.IsolatedUnitTest): self.middleware.process_request(request) self.assertEqual('/v2/images', request.path_info) + def test_request_url_v3(self): + request = webob.Request.blank('/v3/artifacts') + self.middleware.process_request(request) + self.assertEqual('/v3/artifacts', request.path_info) + + def test_request_url_v3_0(self): + request = webob.Request.blank('/v3.0/artifacts') + self.middleware.process_request(request) + self.assertEqual('/v3/artifacts', request.path_info) + def test_request_url_v2_3_unsupported(self): request = webob.Request.blank('/v2.3/images') resp = self.middleware.process_request(request) self.assertIsInstance(resp, versions.Controller) - def test_request_url_v3_unsupported(self): - request = webob.Request.blank('/v3/images') + def test_request_url_v4_unsupported(self): + request = webob.Request.blank('/v4/images') resp = self.middleware.process_request(request) self.assertIsInstance(resp, versions.Controller) diff --git a/setup.cfg b/setup.cfg index ae139395..3ddf132f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,6 +20,7 @@ classifier = [entry_points] console_scripts = glance-api = glance.cmd.api:main + glance-artifacts = glance.cmd.artifacts:main glance-cache-prefetcher = glance.cmd.cache_prefetcher:main glance-cache-pruner = glance.cmd.cache_pruner:main glance-cache-manage = glance.cmd.cache_manage:main @@ -47,6 +48,8 @@ glance.database.metadata_backend = glance.search.index_backend = image = glance.search.plugins.images:ImageIndex metadef = glance.search.plugins.metadefs:MetadefIndex +glance.artifacts.types = + MyArtifact = glance.contrib.plugins.artifacts_sample:MY_ARTIFACT glance.flows = import = glance.async.flows.base_import:get_flow @@ -61,7 +64,7 @@ build-dir = doc/build source-dir = doc/source [egg_info] -tag_build = +tag_build = tag_date = 0 tag_svn_revision = 0