Catalog Index Service

Implements: blueprint catalog-index-service

* Glance Index and Search API Implementation
* Tool for indexing metadefinition resources and images from Glance
database into Elasticsearch index

Change-Id: I6c27d032dea094c7bf5a30b02100170e265588d9
Co-Authored-By: Lakshmi N Sampath <lakshmi.sampath@hp.com>
Co-Authored-By: Kamil Rykowski <kamil.rykowski@intel.com>
Co-Authored-By: Travis Tripp <travis.tripp@hp.com>
Co-Authored-By: Wayne Okuma <wayne.okuma@hp.com>
Co-Authored-By: Steve McLellan <steve.mclellan@hp.com>
This commit is contained in:
Lakshmi N Sampath 2014-12-01 22:43:00 -08:00
parent 835a1b87c4
commit 9911f962a4
21 changed files with 2903 additions and 1 deletions

23
etc/glance-search-paste.ini Executable file
View File

@ -0,0 +1,23 @@
# Use this pipeline for no auth - DEFAULT
[pipeline:glance-search]
pipeline = unauthenticated-context rootapp
[pipeline:glance-search-keystone]
pipeline = authtoken context rootapp
[composite:rootapp]
paste.composite_factory = glance.api:root_app_factory
/v0.1: apiv0_1app
[app:apiv0_1app]
paste.app_factory = glance.search.api.v0_1.router:API.factory
[filter:unauthenticated-context]
paste.filter_factory = glance.api.middleware.context:UnauthenticatedContextMiddleware.factory
[filter:authtoken]
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
delay_auth_decision = true
[filter:context]
paste.filter_factory = glance.api.middleware.context:ContextMiddleware.factory

116
etc/glance-search.conf Executable file
View File

@ -0,0 +1,116 @@
[DEFAULT]
# Show more verbose log output (sets INFO log level output)
#verbose = False
# Show debugging output in logs (sets DEBUG log level output)
debug = True
# Address to bind the GRAFFITI server
bind_host = 0.0.0.0
# Port to bind the server to
bind_port = 9393
# Log to this file. Make sure you do not set the same log file for both the API
# and registry servers!
#
# If `log_file` is omitted and `use_syslog` is false, then log messages are
# sent to stdout as a fallback.
log_file = /var/log/glance/search.log
# Backlog requests when creating socket
backlog = 4096
# TCP_KEEPIDLE value in seconds when creating socket.
# Not supported on OS X.
#tcp_keepidle = 600
# Property Protections config file
# This file contains the rules for property protections and the roles/policies
# associated with it.
# If this config value is not specified, by default, property protections
# won't be enforced.
# If a value is specified and the file is not found, then the glance-api
# service will not start.
#property_protection_file =
# Specify whether 'roles' or 'policies' are used in the
# property_protection_file.
# The default value for property_protection_rule_format is 'roles'.
#property_protection_rule_format = roles
# http_keepalive option. If False, server will return the header
# "Connection: close", If True, server will return "Connection: Keep-Alive"
# in its responses. In order to close the client socket connection
# explicitly after the response is sent and read successfully by the client,
# you simply have to set this option to False when you create a wsgi server.
#http_keepalive = True
# ================= Syslog Options ============================
# Send logs to syslog (/dev/log) instead of to file specified
# by `log_file`
#use_syslog = False
# Facility to use. If unset defaults to LOG_USER.
#syslog_log_facility = LOG_LOCAL0
# ================= SSL Options ===============================
# Certificate file to use when starting API server securely
#cert_file = /path/to/certfile
# Private key file to use when starting API server securely
#key_file = /path/to/keyfile
# CA certificate file to use to verify connecting clients
#ca_file = /path/to/cafile
# =============== Policy Options ==================================
# The JSON file that defines policies.
policy_file = search-policy.json
# Default rule. Enforced when a requested rule is not found.
#policy_default_rule = default
# Directories where policy configuration files are stored.
# They can be relative to any directory in the search path
# defined by the config_dir option, or absolute paths.
# The file defined by policy_file must exist for these
# directories to be searched.
#policy_dirs = policy.d
[paste_deploy]
# Name of the paste configuration file that defines the available pipelines
# config_file = glance-search-paste.ini
# Partial name of a pipeline in your paste configuration file with the
# service name removed. For example, if your paste section name is
# [pipeline:glance-registry-keystone], you would configure the flavor below
# as 'keystone'.
#flavor=
#
[database]
# The SQLAlchemy connection string used to connect to the
# database (string value)
# Deprecated group/name - [DEFAULT]/sql_connection
# Deprecated group/name - [DATABASE]/sql_connection
# Deprecated group/name - [sql]/connection
#connection = <None>
[keystone_authtoken]
identity_uri = http://127.0.0.1:35357
admin_tenant_name = %SERVICE_TENANT_NAME%
admin_user = %SERVICE_USER%
admin_password = %SERVICE_PASSWORD%
revocation_cache_time = 10
# =============== ElasticSearch Options =======================
[elasticsearch]
# List of nodes where Elasticsearch instances are running. A single node
# should be defined as an IP address and port number.
# The default is ['127.0.0.1:9200']
#hosts = ['127.0.0.1:9200']

7
etc/search-policy.json Normal file
View File

@ -0,0 +1,7 @@
{
"context_is_admin": "role:admin",
"default": "",
"catalog_index": "role:admin",
"catalog_search": ""
}

View File

@ -676,3 +676,20 @@ class MetadefTagFactoryProxy(glance.domain.proxy.MetadefTagFactory):
meta_tag_factory,
meta_tag_proxy_class=MetadefTagProxy,
meta_tag_proxy_kwargs=proxy_kwargs)
# Catalog Search classes
class CatalogSearchRepoProxy(object):
def __init__(self, search_repo, context, search_policy):
self.context = context
self.policy = search_policy
self.search_repo = search_repo
def search(self, *args, **kwargs):
self.policy.enforce(self.context, 'catalog_search', {})
return self.search_repo.search(*args, **kwargs)
def index(self, *args, **kwargs):
self.policy.enforce(self.context, 'catalog_index', {})
return self.search_repo.index(*args, **kwargs)

52
glance/cmd/index.py Normal file
View File

@ -0,0 +1,52 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_log import log as logging
import stevedore
from glance.common import config
from glance import i18n
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
_LE = i18n._LE
def main():
try:
logging.register_options(CONF)
cfg_files = cfg.find_config_files(project='glance',
prog='glance-api')
cfg_files.extend(cfg.find_config_files(project='glance',
prog='glance-search'))
config.parse_args(default_config_files=cfg_files)
logging.setup(CONF, 'glance')
namespace = 'glance.search.index_backend'
ext_manager = stevedore.extension.ExtensionManager(
namespace, invoke_on_load=True)
for ext in ext_manager.extensions:
try:
ext.obj.setup()
except Exception as e:
LOG.error(_LE("Failed to setup index extension "
"%(ext)s: %(e)s") % {'ext': ext.name,
'e': e})
except RuntimeError as e:
sys.exit("ERROR: %s" % e)

93
glance/cmd/search.py Executable file
View File

@ -0,0 +1,93 @@
#!/usr/bin/env python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glance Catalog Search Server
"""
import os
import sys
import eventlet
from glance.common import utils
# Monkey patch socket, time, select, threads
eventlet.patcher.monkey_patch(socket=True, time=True, select=True,
thread=True, os=True)
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
from oslo.config import cfg
from oslo_log import log as logging
import osprofiler.notifier
import osprofiler.web
from glance.common import config
from glance.common import exception
from glance.common import wsgi
from glance import notifier
CONF = cfg.CONF
CONF.import_group("profiler", "glance.common.wsgi")
logging.register_options(CONF)
KNOWN_EXCEPTIONS = (RuntimeError,
exception.WorkerCreationFailure)
def fail(e):
global KNOWN_EXCEPTIONS
return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1
sys.stderr.write("ERROR: %s\n" % utils.exception_to_str(e))
sys.exit(return_code)
def main():
try:
config.parse_args()
wsgi.set_eventlet_hub()
logging.setup(CONF, 'glance')
if cfg.CONF.profiler.enabled:
_notifier = osprofiler.notifier.create("Messaging",
notifier.messaging, {},
notifier.get_transport(),
"glance", "search",
cfg.CONF.bind_host)
osprofiler.notifier.set(_notifier)
else:
osprofiler.web.disable()
server = wsgi.Server()
server.start(config.load_paste_app('glance-search'),
default_port=9393)
server.wait()
except KNOWN_EXCEPTIONS as e:
fail(e)
if __name__ == '__main__':
main()

View File

@ -25,16 +25,18 @@ import glance.domain
import glance.location
import glance.notifier
import glance.quota
import glance.search
class Gateway(object):
def __init__(self, db_api=None, store_api=None, notifier=None,
policy_enforcer=None):
policy_enforcer=None, es_api=None):
self.db_api = db_api or glance.db.get_api()
self.store_api = store_api or glance_store
self.store_utils = store_utils
self.notifier = notifier or glance.notifier.Notifier()
self.policy = policy_enforcer or policy.Enforcer()
self.es_api = es_api or glance.search.get_api()
def get_image_factory(self, context):
image_factory = glance.domain.ImageFactory()
@ -231,3 +233,9 @@ class Gateway(object):
authorized_tag_repo = authorization.MetadefTagRepoProxy(
notifier_tag_repo, context)
return authorized_tag_repo
def get_catalog_search_repo(self, context):
search_repo = glance.search.CatalogSearchRepo(context, self.es_api)
policy_search_repo = policy.CatalogSearchRepoProxy(
search_repo, context, self.policy)
return policy_search_repo

60
glance/search/__init__.py Normal file
View File

@ -0,0 +1,60 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import elasticsearch
from elasticsearch import helpers
from oslo_config import cfg
search_opts = [
cfg.ListOpt('hosts', default=['127.0.0.1:9200'],
help='List of nodes where Elasticsearch instances are '
'running. A single node should be defined as an IP '
'address and port number.'),
]
CONF = cfg.CONF
CONF.register_opts(search_opts, group='elasticsearch')
def get_api():
es_hosts = CONF.elasticsearch.hosts
es_api = elasticsearch.Elasticsearch(hosts=es_hosts)
return es_api
class CatalogSearchRepo(object):
def __init__(self, context, es_api):
self.context = context
self.es_api = es_api
def search(self, index, doc_type, query, fields, offset, limit,
ignore_unavailable=True):
return self.es_api.search(
index=index,
doc_type=doc_type,
body=query,
_source_include=fields,
from_=offset,
size=limit,
ignore_unavailable=ignore_unavailable)
def index(self, default_index, default_type, actions):
return helpers.bulk(
client=self.es_api,
index=default_index,
doc_type=default_type,
actions=actions)

20
glance/search/api/__init__.py Executable file
View File

@ -0,0 +1,20 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import paste.urlmap
def root_app_factory(loader, global_conf, **local_conf):
return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf)

View File

View File

@ -0,0 +1,55 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import wsgi
from glance.search.api.v0_1 import search
class API(wsgi.Router):
"""WSGI router for Glance Catalog Search v0_1 API requests."""
def __init__(self, mapper):
reject_method_resource = wsgi.Resource(wsgi.RejectMethodController())
search_catalog_resource = search.create_resource()
mapper.connect('/search',
controller=search_catalog_resource,
action='search',
conditions={'method': ['GET']})
mapper.connect('/search',
controller=search_catalog_resource,
action='search',
conditions={'method': ['POST']})
mapper.connect('/search',
controller=reject_method_resource,
action='reject',
allowed_methods='GET, POST',
conditions={'method': ['PUT', 'DELETE',
'PATCH', 'HEAD']})
mapper.connect('/index',
controller=search_catalog_resource,
action='index',
conditions={'method': ['POST']})
mapper.connect('/index',
controller=reject_method_resource,
action='reject',
allowed_methods='POST',
conditions={'method': ['GET', 'PUT', 'DELETE',
'PATCH', 'HEAD']})
super(API, self).__init__(mapper)

373
glance/search/api/v0_1/search.py Executable file
View File

@ -0,0 +1,373 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo.config import cfg
from oslo_log import log as logging
import six
import stevedore
import webob.exc
from glance.api import policy
from glance.common import exception
from glance.common import utils
from glance.common import wsgi
import glance.db
import glance.gateway
from glance import i18n
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
CONF = cfg.CONF
class SearchController(object):
def __init__(self, plugins=None, es_api=None, policy_enforcer=None):
self.es_api = es_api or glance.search.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.gateway = glance.gateway.Gateway(
es_api=self.es_api,
policy_enforcer=self.policy)
self.plugins = plugins or []
def search(self, req, query, index, doc_type=None, fields=None, offset=0,
limit=10):
if fields is None:
fields = []
try:
search_repo = self.gateway.get_catalog_search_repo(req.context)
result = search_repo.search(index,
doc_type,
query,
fields,
offset,
limit,
True)
for plugin in self.plugins:
result = plugin.obj.filter_result(result, req.context)
return result
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
def index(self, req, actions, default_index=None, default_type=None):
try:
search_repo = self.gateway.get_catalog_search_repo(req.context)
success, errors = search_repo.index(
default_index,
default_type,
actions)
return {
'success': success,
'failed': len(errors),
'errors': errors,
}
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['self', 'schema']
def __init__(self, plugins, schema=None):
super(RequestDeserializer, self).__init__()
self.plugins = plugins
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
@classmethod
def _check_allowed(cls, query):
for key in cls._disallowed_properties:
if key in query:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(explanation=msg)
def _get_available_indices(self):
return list(set([p.obj.get_index_name() for p in self.plugins]))
def _get_available_types(self):
return list(set([p.obj.get_document_type() for p in self.plugins]))
def _validate_index(self, index):
available_indices = self._get_available_indices()
if index not in available_indices:
msg = _("Index '%s' is not supported.") % index
raise webob.exc.HTTPBadRequest(explanation=msg)
return index
def _validate_doc_type(self, doc_type):
available_types = self._get_available_types()
if doc_type not in available_types:
msg = _("Document type '%s' is not supported.") % doc_type
raise webob.exc.HTTPBadRequest(explanation=msg)
return doc_type
def _validate_offset(self, offset):
try:
offset = int(offset)
except ValueError:
msg = _("offset param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if offset < 0:
msg = _("offset param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return offset
def _validate_limit(self, limit):
try:
limit = int(limit)
except ValueError:
msg = _("limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 1:
msg = _("limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _validate_actions(self, actions):
if not actions:
msg = _("actions param cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
output = []
allowed_action_types = ['create', 'update', 'delete', 'index']
for action in actions:
action_type = action.get('action', 'index')
document_id = action.get('id')
document_type = action.get('type')
index_name = action.get('index')
data = action.get('data', {})
script = action.get('script')
if index_name is not None:
index_name = self._validate_index(index_name)
if document_type is not None:
document_type = self._validate_doc_type(document_type)
if action_type not in allowed_action_types:
msg = _("Invalid action type: '%s'") % action_type
raise webob.exc.HTTPBadRequest(explanation=msg)
elif (action_type in ['create', 'update', 'index'] and
not any([data, script])):
msg = (_("Action type '%s' requires data or script param.") %
action_type)
raise webob.exc.HTTPBadRequest(explanation=msg)
elif action_type in ['update', 'delete'] and not document_id:
msg = (_("Action type '%s' requires ID of the document.") %
action_type)
raise webob.exc.HTTPBadRequest(explanation=msg)
bulk_action = {
'_op_type': action_type,
'_id': document_id,
'_index': index_name,
'_type': document_type,
}
if script:
data_field = 'params'
bulk_action['script'] = script
elif action_type == 'update':
data_field = 'doc'
else:
data_field = '_source'
bulk_action[data_field] = data
output.append(bulk_action)
return output
def _get_query(self, context, query, doc_types):
is_admin = context.is_admin
if is_admin:
query_params = {
'query': {
'query': query
}
}
else:
filtered_query_list = []
for plugin in self.plugins:
try:
doc_type = plugin.obj.get_document_type()
rbac_filter = plugin.obj.get_rbac_filter(context)
except Exception as e:
LOG.error(_LE("Failed to retrieve RBAC filters "
"from search plugin "
"%(ext)s: %(e)s") %
{'ext': plugin.name, 'e': e})
if doc_type in doc_types:
filter_query = {
"query": query,
"filter": rbac_filter
}
filtered_query = {
'filtered': filter_query
}
filtered_query_list.append(filtered_query)
query_params = {
'query': {
'query': {
"bool": {
"should": filtered_query_list
},
}
}
}
return query_params
def search(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
query = body.pop('query', None)
indices = body.pop('index', None)
doc_types = body.pop('type', None)
fields = body.pop('fields', None)
offset = body.pop('offset', None)
limit = body.pop('limit', None)
highlight = body.pop('highlight', None)
if not indices:
indices = self._get_available_indices()
elif not isinstance(indices, (list, tuple)):
indices = [indices]
if not doc_types:
doc_types = self._get_available_types()
elif not isinstance(doc_types, (list, tuple)):
doc_types = [doc_types]
query_params = self._get_query(request.context, query, doc_types)
query_params['index'] = [self._validate_index(index)
for index in indices]
query_params['doc_type'] = [self._validate_doc_type(doc_type)
for doc_type in doc_types]
if fields is not None:
query_params['fields'] = fields
if offset is not None:
query_params['offset'] = self._validate_offset(offset)
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
if highlight is not None:
query_params['query']['highlight'] = highlight
return query_params
def index(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
default_index = body.pop('default_index', None)
if default_index is not None:
default_index = self._validate_index(default_index)
default_type = body.pop('default_type', None)
if default_type is not None:
default_type = self._validate_doc_type(default_type)
actions = self._validate_actions(body.pop('actions', None))
if not all([default_index, default_type]):
for action in actions:
if not any([action['_index'], default_index]):
msg = (_("Action index is missing and no default "
"index has been set."))
raise webob.exc.HTTPBadRequest(explanation=msg)
if not any([action['_type'], default_type]):
msg = (_("Action document type is missing and no default "
"type has been set."))
raise webob.exc.HTTPBadRequest(explanation=msg)
query_params = {
'default_index': default_index,
'default_type': default_type,
'actions': actions,
}
return query_params
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema
def search(self, response, query_result):
body = json.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def index(self, response, query_result):
body = json.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def get_plugins():
namespace = 'glance.search.index_backend'
ext_manager = stevedore.extension.ExtensionManager(
namespace, invoke_on_load=True)
return ext_manager.extensions
def create_resource():
"""Search resource factory method"""
plugins = get_plugins()
deserializer = RequestDeserializer(plugins)
serializer = ResponseSerializer()
controller = SearchController(plugins)
return wsgi.Resource(controller, deserializer, serializer)

View File

View File

@ -0,0 +1,119 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from elasticsearch import helpers
import six
import glance.search
@six.add_metaclass(abc.ABCMeta)
class IndexBase(object):
chunk_size = 200
def __init__(self):
self.engine = glance.search.get_api()
self.index_name = self.get_index_name()
self.document_type = self.get_document_type()
def setup(self):
"""Comprehensively install search engine index and put data into it."""
self.setup_index()
self.setup_mapping()
self.setup_data()
def setup_index(self):
"""Create the index if it doesn't exist and update its settings."""
index_exists = self.engine.indices.exists(self.index_name)
if not index_exists:
self.engine.indices.create(index=self.index_name)
index_settings = self.get_settings()
if index_settings:
self.engine.indices.put_settings(index=self.index_name,
body=index_settings)
return index_exists
def setup_mapping(self):
"""Update index document mapping."""
index_mapping = self.get_mapping()
if index_mapping:
self.engine.indices.put_mapping(index=self.index_name,
doc_type=self.document_type,
body=index_mapping)
def setup_data(self):
"""Insert all objects from database into search engine."""
object_list = self.get_objects()
documents = []
for obj in object_list:
document = self.serialize(obj)
documents.append(document)
self.save_documents(documents)
def save_documents(self, documents, id_field='id'):
"""Send list of serialized documents into search engine."""
actions = []
for document in documents:
action = {
'_id': document.get(id_field),
'_source': document,
}
actions.append(action)
helpers.bulk(
client=self.engine,
index=self.index_name,
doc_type=self.document_type,
chunk_size=self.chunk_size,
actions=actions)
@abc.abstractmethod
def get_objects(self):
"""Get list of all objects which will be indexed into search engine."""
@abc.abstractmethod
def serialize(self, obj):
"""Serialize database object into valid search engine document."""
@abc.abstractmethod
def get_index_name(self):
"""Get name of the index."""
@abc.abstractmethod
def get_document_type(self):
"""Get name of the document type."""
@abc.abstractmethod
def get_rbac_filter(self, request_context):
"""Get rbac filter as es json filter dsl."""
def filter_result(self, result, request_context):
"""Filter the outgoing search result."""
return result
def get_settings(self):
"""Get an index settings."""
return {}
def get_mapping(self):
"""Get an index mapping."""
return {}

View File

@ -0,0 +1,152 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import joinedload
from oslo_utils import timeutils
from glance.api import policy
from glance.common import property_utils
import glance.db
from glance.db.sqlalchemy import models
from glance.search.plugins import base
class ImageIndex(base.IndexBase):
def __init__(self, db_api=None, policy_enforcer=None):
super(ImageIndex, self).__init__()
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
if property_utils.is_property_protection_enabled():
self.property_rules = property_utils.PropertyRules(self.policy)
self._image_base_properties = [
'checksum', 'created_at', 'container_format', 'disk_format', 'id',
'min_disk', 'min_ram', 'name', 'size', 'virtual_size', 'status',
'tags', 'updated_at', 'visibility', 'protected', 'owner',
'members']
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'image'
def get_mapping(self):
return {
'dynamic': True,
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'string'},
'disk_format': {'type': 'string'},
'container_format': {'type': 'string'},
'size': {'type': 'long'},
'virtual_size': {'type': 'long'},
'status': {'type': 'string'},
'visibility': {'type': 'string'},
'checksum': {'type': 'string'},
'min_disk': {'type': 'long'},
'min_ram': {'type': 'long'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'protected': {'type': 'boolean'},
'members': {'type': 'string', 'index': 'not_analyzed'},
"created_at": {'type': 'date'},
"updated_at": {'type': 'date'}
},
}
def get_rbac_filter(self, request_context):
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
},
{
'term': {
'members': request_context.tenant
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def filter_result(self, result, request_context):
if property_utils.is_property_protection_enabled():
hits = result['hits']['hits']
for hit in hits:
if hit['_type'] == self.get_document_type():
source = hit['_source']
for key in source.keys():
if key not in self._image_base_properties:
if not self.property_rules.check_property_rules(
key, 'read', request_context):
del hit['_source'][key]
return result
def get_objects(self):
session = self.db_api.get_session()
images = session.query(models.Image).options(
joinedload('properties'), joinedload('members'), joinedload('tags')
).filter_by(deleted=False)
return images
def serialize(self, obj):
visibility = 'public' if obj.is_public else 'private'
members = []
for member in obj.members:
if member.status == 'accepted' and member.deleted == 0:
members.append(member.member)
document = {
'id': obj.id,
'name': obj.name,
'tags': obj.tags,
'disk_format': obj.disk_format,
'container_format': obj.container_format,
'size': obj.size,
'virtual_size': obj.virtual_size,
'status': obj.status,
'visibility': visibility,
'checksum': obj.checksum,
'min_disk': obj.min_disk,
'min_ram': obj.min_ram,
'owner': obj.owner,
'protected': obj.protected,
'members': members,
'created_at': timeutils.isotime(obj.created_at),
'updated_at': timeutils.isotime(obj.updated_at)
}
for image_property in obj.properties:
document[image_property.name] = image_property.value
return document

View File

@ -0,0 +1,230 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
import glance.db
from glance.db.sqlalchemy import models_metadef as models
from glance.search.plugins import base
class MetadefIndex(base.IndexBase):
def __init__(self):
super(MetadefIndex, self).__init__()
self.db_api = glance.db.get_api()
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'metadef'
def get_mapping(self):
property_mapping = {
'dynamic': True,
'type': 'nested',
'properties': {
'property': {'type': 'string', 'index': 'not_analyzed'},
'type': {'type': 'string'},
'title': {'type': 'string'},
'description': {'type': 'string'},
}
}
mapping = {
'_id': {
'path': 'namespace',
},
'properties': {
'display_name': {'type': 'string'},
'description': {'type': 'string'},
'namespace': {'type': 'string', 'index': 'not_analyzed'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'visibility': {'type': 'string', 'index': 'not_analyzed'},
'resource_types': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
'prefix': {'type': 'string'},
'properties_target': {'type': 'string'},
},
},
'objects': {
'type': 'nested',
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'properties': property_mapping,
}
},
'properties': property_mapping,
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
}
}
},
}
return mapping
def get_rbac_filter(self, request_context):
# TODO(krykowski): Define base get_rbac_filter in IndexBase class
# which will provide some common subset of query pieces.
# Something like:
# def get_common_context_pieces(self, request_context):
# return [{'term': {'owner': request_context.owner,
# 'type': {'value': self.get_document_type()}}]
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def get_objects(self):
session = self.db_api.get_session()
namespaces = session.query(models.MetadefNamespace).all()
resource_types = session.query(models.MetadefResourceType).all()
resource_types_map = {r.id: r.name for r in resource_types}
for namespace in namespaces:
namespace.resource_types = self.get_namespace_resource_types(
namespace.id, resource_types_map)
namespace.objects = self.get_namespace_objects(namespace.id)
namespace.properties = self.get_namespace_properties(namespace.id)
namespace.tags = self.get_namespace_tags(namespace.id)
return namespaces
def get_namespace_resource_types(self, namespace_id, resource_types):
session = self.db_api.get_session()
namespace_resource_types = session.query(
models.MetadefNamespaceResourceType
).filter_by(namespace_id=namespace_id)
resource_associations = [{
'prefix': r.prefix,
'properties_target': r.properties_target,
'name': resource_types[r.resource_type_id],
} for r in namespace_resource_types]
return resource_associations
def get_namespace_properties(self, namespace_id):
session = self.db_api.get_session()
properties = session.query(
models.MetadefProperty
).filter_by(namespace_id=namespace_id)
return list(properties)
def get_namespace_objects(self, namespace_id):
session = self.db_api.get_session()
namespace_objects = session.query(
models.MetadefObject
).filter_by(namespace_id=namespace_id)
return list(namespace_objects)
def get_namespace_tags(self, namespace_id):
session = self.db_api.get_session()
namespace_tags = session.query(
models.MetadefTag
).filter_by(namespace_id=namespace_id)
return list(namespace_tags)
def serialize(self, obj):
object_docs = [self.serialize_object(ns_obj) for ns_obj in obj.objects]
property_docs = [self.serialize_property(prop.name, prop.json_schema)
for prop in obj.properties]
resource_type_docs = [self.serialize_namespace_resource_type(rt)
for rt in obj.resource_types]
tag_docs = [self.serialize_tag(tag) for tag in obj.tags]
namespace_doc = self.serialize_namespace(obj)
namespace_doc.update({
'objects': object_docs,
'properties': property_docs,
'resource_types': resource_type_docs,
'tags': tag_docs,
})
return namespace_doc
def serialize_namespace(self, namespace):
return {
'namespace': namespace.namespace,
'display_name': namespace.display_name,
'description': namespace.description,
'visibility': namespace.visibility,
'protected': namespace.protected,
'owner': namespace.owner,
}
def serialize_object(self, obj):
obj_properties = obj.json_schema
property_docs = []
for name, schema in six.iteritems(obj_properties):
property_doc = self.serialize_property(name, schema)
property_docs.append(property_doc)
document = {
'name': obj.name,
'description': obj.description,
'properties': property_docs,
}
return document
def serialize_property(self, name, schema):
document = copy.deepcopy(schema)
document['property'] = name
if 'default' in document:
document['default'] = str(document['default'])
if 'enum' in document:
document['enum'] = map(str, document['enum'])
return document
def serialize_namespace_resource_type(self, ns_resource_type):
return {
'name': ns_resource_type['name'],
'prefix': ns_resource_type['prefix'],
'properties_target': ns_resource_type['properties_target']
}
def serialize_tag(self, tag):
return {
'name': tag.name
}

View File

@ -0,0 +1,655 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils import timeutils
from glance.search.plugins import images as images_plugin
from glance.search.plugins import metadefs as metadefs_plugin
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
DATE1 = timeutils.isotime(DATETIME)
# General
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '5a3e60e8-cfa9-4a9e-a90a-62b42cea92b8'
TENANT4 = 'c6c87f25-8a94-47ed-8c83-053c25f42df4'
# Images
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc'
UUID3 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
UUID4 = '6bbe7cc2-eae7-4c0f-b50d-a7160b0c6a86'
CHECKSUM = '93264c3edf5972c9f1cb309543d38a5c'
# Metadefinitions
NAMESPACE1 = 'namespace1'
NAMESPACE2 = 'namespace2'
PROPERTY1 = 'Property1'
PROPERTY2 = 'Property2'
PROPERTY3 = 'Property3'
OBJECT1 = 'Object1'
OBJECT2 = 'Object2'
OBJECT3 = 'Object3'
RESOURCE_TYPE1 = 'ResourceType1'
RESOURCE_TYPE2 = 'ResourceType2'
RESOURCE_TYPE3 = 'ResourceType3'
TAG1 = 'Tag1'
TAG2 = 'Tag2'
TAG3 = 'Tag3'
class DictObj(object):
def __init__(self, **entries):
self.__dict__.update(entries)
def _image_fixture(image_id, **kwargs):
image_members = kwargs.pop('members', [])
extra_properties = kwargs.pop('extra_properties', {})
obj = {
'id': image_id,
'name': None,
'is_public': False,
'properties': {},
'checksum': None,
'owner': None,
'status': 'queued',
'tags': [],
'size': None,
'virtual_size': None,
'locations': [],
'protected': False,
'disk_format': None,
'container_format': None,
'deleted': False,
'min_ram': None,
'min_disk': None,
'created_at': DATETIME,
'updated_at': DATETIME,
}
obj.update(kwargs)
image = DictObj(**obj)
image.tags = set(image.tags)
image.properties = [DictObj(name=k, value=v)
for k, v in extra_properties.items()]
image.members = [DictObj(**m) for m in image_members]
return image
def _db_namespace_fixture(**kwargs):
obj = {
'namespace': None,
'display_name': None,
'description': None,
'visibility': True,
'protected': False,
'owner': None
}
obj.update(kwargs)
return DictObj(**obj)
def _db_property_fixture(name, **kwargs):
obj = {
'name': name,
'json_schema': {"type": "string", "title": "title"},
}
obj.update(kwargs)
return DictObj(**obj)
def _db_object_fixture(name, **kwargs):
obj = {
'name': name,
'description': None,
'json_schema': {},
'required': '[]',
}
obj.update(kwargs)
return DictObj(**obj)
def _db_resource_type_fixture(name, **kwargs):
obj = {
'name': name,
'protected': False,
}
obj.update(kwargs)
return DictObj(**obj)
def _db_namespace_resource_type_fixture(name, prefix, **kwargs):
obj = {
'properties_target': None,
'prefix': prefix,
'name': name,
}
obj.update(kwargs)
return obj
def _db_tag_fixture(name, **kwargs):
obj = {
'name': name,
}
obj.update(**kwargs)
return DictObj(**obj)
class TestImageLoaderPlugin(test_utils.BaseTestCase):
def setUp(self):
super(TestImageLoaderPlugin, self).setUp()
self.db = unit_test_utils.FakeDB()
self.db.reset()
self._create_images()
self.plugin = images_plugin.ImageIndex()
def _create_images(self):
self.simple_image = _image_fixture(
UUID1, owner=TENANT1, checksum=CHECKSUM, name='simple', size=256,
is_public=True, status='active'
)
self.tagged_image = _image_fixture(
UUID2, owner=TENANT1, checksum=CHECKSUM, name='tagged', size=512,
is_public=True, status='active', tags=['ping', 'pong'],
)
self.complex_image = _image_fixture(
UUID3, owner=TENANT2, checksum=CHECKSUM, name='complex', size=256,
is_public=True, status='active',
extra_properties={'mysql_version': '5.6', 'hypervisor': 'lxc'}
)
self.members_image = _image_fixture(
UUID3, owner=TENANT2, checksum=CHECKSUM, name='complex', size=256,
is_public=True, status='active',
members=[
{'member': TENANT1, 'deleted': False, 'status': 'accepted'},
{'member': TENANT2, 'deleted': False, 'status': 'accepted'},
{'member': TENANT3, 'deleted': True, 'status': 'accepted'},
{'member': TENANT4, 'deleted': False, 'status': 'pending'},
]
)
self.images = [self.simple_image, self.tagged_image,
self.complex_image, self.members_image]
def test_index_name(self):
self.assertEqual('glance', self.plugin.get_index_name())
def test_document_type(self):
self.assertEqual('image', self.plugin.get_document_type())
def test_image_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
'members': [],
'min_disk': None,
'min_ram': None,
'name': 'simple',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.simple_image)
self.assertEqual(expected, serialized)
def test_image_with_tags_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc',
'members': [],
'min_disk': None,
'min_ram': None,
'name': 'tagged',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'protected': False,
'size': 512,
'status': 'active',
'tags': set(['ping', 'pong']),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.tagged_image)
self.assertEqual(expected, serialized)
def test_image_with_properties_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'hypervisor': 'lxc',
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'members': [],
'min_disk': None,
'min_ram': None,
'mysql_version': '5.6',
'name': 'complex',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.complex_image)
self.assertEqual(expected, serialized)
def test_image_with_members_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'members': ['6838eb7b-6ded-434a-882c-b344c77fe8df',
'2c014f32-55eb-467d-8fcb-4bd706012f81'],
'min_disk': None,
'min_ram': None,
'name': 'complex',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.members_image)
self.assertEqual(expected, serialized)
def test_setup_data(self):
with mock.patch.object(self.plugin, 'get_objects',
return_value=self.images) as mock_get:
with mock.patch.object(self.plugin, 'save_documents') as mock_save:
self.plugin.setup_data()
mock_get.assert_called_once_with()
mock_save.assert_called_once_with([
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
'size': 256,
'name': 'simple',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set(['pong', 'ping']),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc',
'size': 512,
'name': 'tagged',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'size': 256,
'name': 'complex',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'mysql_version': '5.6',
'disk_format': None,
'protected': False,
'hypervisor': 'lxc',
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'members': ['6838eb7b-6ded-434a-882c-b344c77fe8df',
'2c014f32-55eb-467d-8fcb-4bd706012f81'],
'min_disk': None,
'virtual_size': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'size': 256,
'name': 'complex',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
}
])
class TestMetadefLoaderPlugin(test_utils.BaseTestCase):
def setUp(self):
super(TestMetadefLoaderPlugin, self).setUp()
self.db = unit_test_utils.FakeDB()
self.db.reset()
self._create_resource_types()
self._create_namespaces()
self._create_namespace_resource_types()
self._create_properties()
self._create_tags()
self._create_objects()
self.plugin = metadefs_plugin.MetadefIndex()
def _create_namespaces(self):
self.namespaces = [
_db_namespace_fixture(namespace=NAMESPACE1,
display_name='1',
description='desc1',
visibility='private',
protected=True,
owner=TENANT1),
_db_namespace_fixture(namespace=NAMESPACE2,
display_name='2',
description='desc2',
visibility='public',
protected=False,
owner=TENANT1),
]
def _create_properties(self):
self.properties = [
_db_property_fixture(name=PROPERTY1),
_db_property_fixture(name=PROPERTY2),
_db_property_fixture(name=PROPERTY3)
]
self.namespaces[0].properties = [self.properties[0]]
self.namespaces[1].properties = self.properties[1:]
def _create_objects(self):
self.objects = [
_db_object_fixture(name=OBJECT1,
description='desc1',
json_schema={'property1': {
'type': 'string',
'default': 'value1',
'enum': ['value1', 'value2']
}}),
_db_object_fixture(name=OBJECT2,
description='desc2'),
_db_object_fixture(name=OBJECT3,
description='desc3'),
]
self.namespaces[0].objects = [self.objects[0]]
self.namespaces[1].objects = self.objects[1:]
def _create_resource_types(self):
self.resource_types = [
_db_resource_type_fixture(name=RESOURCE_TYPE1,
protected=False),
_db_resource_type_fixture(name=RESOURCE_TYPE2,
protected=False),
_db_resource_type_fixture(name=RESOURCE_TYPE3,
protected=True),
]
def _create_namespace_resource_types(self):
self.namespace_resource_types = [
_db_namespace_resource_type_fixture(
prefix='p1',
name=self.resource_types[0].name),
_db_namespace_resource_type_fixture(
prefix='p2',
name=self.resource_types[1].name),
_db_namespace_resource_type_fixture(
prefix='p2',
name=self.resource_types[2].name),
]
self.namespaces[0].resource_types = self.namespace_resource_types[:1]
self.namespaces[1].resource_types = self.namespace_resource_types[1:]
def _create_tags(self):
self.tags = [
_db_resource_type_fixture(name=TAG1),
_db_resource_type_fixture(name=TAG2),
_db_resource_type_fixture(name=TAG3),
]
self.namespaces[0].tags = self.tags[:1]
self.namespaces[1].tags = self.tags[1:]
def test_index_name(self):
self.assertEqual('glance', self.plugin.get_index_name())
def test_document_type(self):
self.assertEqual('metadef', self.plugin.get_document_type())
def test_namespace_serialize(self):
metadef_namespace = self.namespaces[0]
expected = {
'namespace': 'namespace1',
'display_name': '1',
'description': 'desc1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df'
}
serialized = self.plugin.serialize_namespace(metadef_namespace)
self.assertEqual(expected, serialized)
def test_object_serialize(self):
metadef_object = self.objects[0]
expected = {
'name': 'Object1',
'description': 'desc1',
'properties': [{
'default': 'value1',
'enum': ['value1', 'value2'],
'property': 'property1',
'type': 'string'
}]
}
serialized = self.plugin.serialize_object(metadef_object)
self.assertEqual(expected, serialized)
def test_property_serialize(self):
metadef_property = self.properties[0]
expected = {
'property': 'Property1',
'type': 'string',
'title': 'title',
}
serialized = self.plugin.serialize_property(
metadef_property.name, metadef_property.json_schema)
self.assertEqual(expected, serialized)
def test_complex_serialize(self):
metadef_namespace = self.namespaces[0]
expected = {
'namespace': 'namespace1',
'display_name': '1',
'description': 'desc1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'objects': [{
'description': 'desc1',
'name': 'Object1',
'properties': [{
'default': 'value1',
'enum': ['value1', 'value2'],
'property': 'property1',
'type': 'string'
}]
}],
'resource_types': [{
'prefix': 'p1',
'name': 'ResourceType1',
'properties_target': None
}],
'properties': [{
'property': 'Property1',
'title': 'title',
'type': 'string'
}],
'tags': [{'name': 'Tag1'}],
}
serialized = self.plugin.serialize(metadef_namespace)
self.assertEqual(expected, serialized)
def test_setup_data(self):
with mock.patch.object(self.plugin, 'get_objects',
return_value=self.namespaces) as mock_get:
with mock.patch.object(self.plugin, 'save_documents') as mock_save:
self.plugin.setup_data()
mock_get.assert_called_once_with()
mock_save.assert_called_once_with([
{
'display_name': '1',
'description': 'desc1',
'objects': [
{
'name': 'Object1',
'description': 'desc1',
'properties': [{
'default': 'value1',
'property': 'property1',
'enum': ['value1', 'value2'],
'type': 'string'
}],
}
],
'namespace': 'namespace1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'properties': [{
'property': 'Property1',
'type': 'string',
'title': 'title'
}],
'resource_types': [{
'prefix': 'p1',
'name': 'ResourceType1',
'properties_target': None
}],
'tags': [{'name': 'Tag1'}],
},
{
'display_name': '2',
'description': 'desc2',
'objects': [
{
'properties': [],
'name': 'Object2',
'description': 'desc2'
},
{
'properties': [],
'name': 'Object3',
'description': 'desc3'
}
],
'namespace': 'namespace2',
'visibility': 'public',
'protected': False,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'properties': [
{
'property': 'Property2',
'type': 'string',
'title': 'title'
},
{
'property': 'Property3',
'type': 'string',
'title': 'title'
}
],
'resource_types': [
{
'name': 'ResourceType2',
'prefix': 'p2',
'properties_target': None,
},
{
'name': 'ResourceType3',
'prefix': 'p2',
'properties_target': None,
}
],
'tags': [
{'name': 'Tag2'},
{'name': 'Tag3'},
],
}
])

View File

@ -0,0 +1,913 @@
# Copyright 2015 Hewlett-Packard Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo.serialization import jsonutils
import webob.exc
from glance.common import exception
import glance.gateway
import glance.search
from glance.search.api.v0_1 import search as search
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
def _action_fixture(op_type, data, index=None, doc_type=None, _id=None,
**kwargs):
action = {
'action': op_type,
'id': _id,
'index': index,
'type': doc_type,
'data': data,
}
if kwargs:
action.update(kwargs)
return action
def _image_fixture(op_type, _id=None, index='glance', doc_type='image',
data=None, **kwargs):
image_data = {
'name': 'image-1',
'disk_format': 'raw',
}
if data is not None:
image_data.update(data)
return _action_fixture(op_type, image_data, index, doc_type, _id, **kwargs)
class TestSearchController(base.IsolatedUnitTest):
def setUp(self):
super(TestSearchController, self).setUp()
self.search_controller = search.SearchController()
def test_search_all(self):
request = unit_test_utils.get_fake_request()
self.search_controller.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = None
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
self.search_controller.search.assert_called_once_with(
request, query, index, doc_type, fields, offset, limit)
def test_search_all_repo(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
repo.search.assert_called_once_with(
index, doc_type, query, fields, offset, limit, True)
def test_search_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Forbidden)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_search_not_found(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.NotFound)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_duplicate(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Duplicate)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_internal_server_error(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=Exception)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_index_complete(self):
request = unit_test_utils.get_fake_request()
self.search_controller.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
self.search_controller.index.assert_called_once_with(
request, actions, default_index, default_type)
def test_index_repo_complete(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
repo.index.assert_called_once_with(
default_index, default_type, actions)
def test_index_repo_minimal(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.search_controller.index(request, actions)
repo.index.assert_called_once_with(None, None, actions)
def test_index_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Forbidden)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.index,
request, actions)
def test_index_not_found(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.NotFound)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.index,
request, actions)
def test_index_duplicate(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Duplicate)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.index,
request, actions)
def test_index_exception(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=Exception)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.index,
request, actions)
class TestSearchDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestSearchDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(search.get_plugins())
def test_single_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'glance',
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
def test_single_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'image',
})
output = self.deserializer.search(request)
self.assertEqual(['image'], output['doc_type'])
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_empty_request_admin(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
request.context.is_admin = True
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_forbidden_schema(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'schema': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_forbidden_self(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'self': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_fields_restriction(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'fields': ['description'],
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(['description'], output['fields'])
def test_highlight_fields(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'highlight': {'fields': {'name': {}}}
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual({'name': {}}, output['query']['highlight']['fields'])
def test_invalid_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_invalid_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_limit_and_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 1,
'offset': 2,
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(1, output['limit'])
self.assertEqual(2, output['offset'])
class TestIndexDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestIndexDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(search.get_plugins())
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [],
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('invalid', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_operation_type(self):
action = _image_fixture('', '1')
action.pop('action')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'index',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_single(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_multiple(self):
actions = [
_image_fixture('create', '1'),
_image_fixture('create', '2', data={'name': 'image-2'}),
]
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': actions,
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-2'},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_missing_data(self):
action = _image_fixture('create', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_with_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'actions': [_image_fixture('create', '1', index=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': 'glance',
'default_type': None
}
self.assertEqual(expected, output)
def test_create_with_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'image',
'actions': [_image_fixture('create', '1', doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': None,
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_with_default_index_and_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [_image_fixture('create', '1', index=None,
doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': 'glance',
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_missing_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': None,
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_create_empty_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', index='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', doc_type='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', index=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', doc_type=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_id(self):
action = _image_fixture('update')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_data(self):
action = _image_fixture('update', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_using_data(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('update', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'doc': {'disk_format': 'raw', 'name': 'image-1'}
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_update_using_script(self):
action = _image_fixture('update', '1', script='<sample script>')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_update_using_script_and_data(self):
action = _image_fixture('update', '1', script='<sample script>')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {'disk_format': 'raw', 'name': 'image-1'},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_delete_missing_id(self):
action = _image_fixture('delete')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_delete_single(self):
action = _image_fixture('delete', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_delete_multiple(self):
action_1 = _image_fixture('delete', '1')
action_1.pop('data')
action_2 = _image_fixture('delete', '2')
action_2.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action_1, action_2],
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
class TestResponseSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestResponseSerializer, self).setUp()
self.serializer = search.ResponseSerializer()
def test_search(self):
expected = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
request = webob.Request.blank('/v0.1/search')
response = webob.Response(request=request)
result = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
self.serializer.search(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
def test_index(self):
expected = {
'success': '1',
'failed': '0',
'errors': [],
}
request = webob.Request.blank('/v0.1/index')
response = webob.Response(request=request)
result = {
'success': '1',
'failed': '0',
'errors': [],
}
self.serializer.index(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)

View File

@ -61,3 +61,6 @@ osprofiler>=0.3.0 # Apache-2.0
# Glance Store
glance_store>=0.3.0 # Apache-2.0
# Glance catalog index
elasticsearch>=1.3.0

View File

@ -29,6 +29,8 @@ console_scripts =
glance-cache-manage = glance.cmd.cache_manage:main
glance-cache-cleaner = glance.cmd.cache_cleaner:main
glance-control = glance.cmd.control:main
glance-search = glance.cmd.search:main
glance-index = glance.cmd.index:main
glance-manage = glance.cmd.manage:main
glance-registry = glance.cmd.registry:main
glance-replicator = glance.cmd.replicator:main
@ -46,6 +48,9 @@ glance.database.migration_backend =
sqlalchemy = oslo.db.sqlalchemy.migration
glance.database.metadata_backend =
sqlalchemy = glance.db.sqlalchemy.metadata
glance.search.index_backend =
image = glance.search.plugins.images:ImageIndex
metadef = glance.search.plugins.metadefs:MetadefIndex
glance.flows =
import = glance.async.flows.base_import:get_flow

View File

@ -35,6 +35,7 @@ commands =
oslo-config-generator --config-file etc/oslo-config-generator/glance-scrubber.conf
oslo-config-generator --config-file etc/oslo-config-generator/glance-cache.conf
oslo-config-generator --config-file etc/oslo-config-generator/glance-manage.conf
oslo-config-generator --config-file etc/oslo-config-generator/glance-search.conf
[testenv:docs]
commands = python setup.py build_sphinx