Remove Catalog Index Service

The Catalog Index Service added in the kilo cycle has been split into a new
project named searchlight. This code now lives in a seperate repository:

    https://git.openstack.org/openstack/searchlight

For more information about the split, see the governance change:
I8b44aac03585c651ef8d5e94624f64a0ed2d10b2

DocImpact
UpgradeImpact
APIImpact

Change-Id: I239ac9e32857f6a728f40c169e773ee977cca3ca
This commit is contained in:
Louis Taylor 2015-06-30 12:19:34 +00:00 committed by Lianhao Lu
parent 4ec1e7d4db
commit feb927c8a1
29 changed files with 2 additions and 3708 deletions

View File

@ -1,23 +0,0 @@
# Use this pipeline for no auth - DEFAULT
[pipeline:glance-search]
pipeline = unauthenticated-context rootapp
[pipeline:glance-search-keystone]
pipeline = authtoken context rootapp
[composite:rootapp]
paste.composite_factory = glance.api:root_app_factory
/v0.1: apiv0_1app
[app:apiv0_1app]
paste.app_factory = glance.search.api.v0_1.router:API.factory
[filter:unauthenticated-context]
paste.filter_factory = glance.api.middleware.context:UnauthenticatedContextMiddleware.factory
[filter:authtoken]
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
delay_auth_decision = true
[filter:context]
paste.filter_factory = glance.api.middleware.context:ContextMiddleware.factory

View File

@ -1,116 +0,0 @@
[DEFAULT]
# Show more verbose log output (sets INFO log level output)
#verbose = False
# Show debugging output in logs (sets DEBUG log level output)
debug = True
# Address to bind the GRAFFITI server
bind_host = 0.0.0.0
# Port to bind the server to
bind_port = 9393
# Log to this file. Make sure you do not set the same log file for both the API
# and registry servers!
#
# If `log_file` is omitted and `use_syslog` is false, then log messages are
# sent to stdout as a fallback.
log_file = /var/log/glance/search.log
# Backlog requests when creating socket
backlog = 4096
# TCP_KEEPIDLE value in seconds when creating socket.
# Not supported on OS X.
#tcp_keepidle = 600
# Property Protections config file
# This file contains the rules for property protections and the roles/policies
# associated with it.
# If this config value is not specified, by default, property protections
# won't be enforced.
# If a value is specified and the file is not found, then the glance-api
# service will not start.
#property_protection_file =
# Specify whether 'roles' or 'policies' are used in the
# property_protection_file.
# The default value for property_protection_rule_format is 'roles'.
#property_protection_rule_format = roles
# http_keepalive option. If False, server will return the header
# "Connection: close", If True, server will return "Connection: Keep-Alive"
# in its responses. In order to close the client socket connection
# explicitly after the response is sent and read successfully by the client,
# you simply have to set this option to False when you create a wsgi server.
#http_keepalive = True
# ================= Syslog Options ============================
# Send logs to syslog (/dev/log) instead of to file specified
# by `log_file`
#use_syslog = False
# Facility to use. If unset defaults to LOG_USER.
#syslog_log_facility = LOG_LOCAL0
# ================= SSL Options ===============================
# Certificate file to use when starting API server securely
#cert_file = /path/to/certfile
# Private key file to use when starting API server securely
#key_file = /path/to/keyfile
# CA certificate file to use to verify connecting clients
#ca_file = /path/to/cafile
# =============== Policy Options ==================================
# The JSON file that defines policies.
policy_file = search-policy.json
# Default rule. Enforced when a requested rule is not found.
#policy_default_rule = default
# Directories where policy configuration files are stored.
# They can be relative to any directory in the search path
# defined by the config_dir option, or absolute paths.
# The file defined by policy_file must exist for these
# directories to be searched.
#policy_dirs = policy.d
[paste_deploy]
# Name of the paste configuration file that defines the available pipelines
# config_file = glance-search-paste.ini
# Partial name of a pipeline in your paste configuration file with the
# service name removed. For example, if your paste section name is
# [pipeline:glance-registry-keystone], you would configure the flavor below
# as 'keystone'.
#flavor=
#
[database]
# The SQLAlchemy connection string used to connect to the
# database (string value)
# Deprecated group/name - [DEFAULT]/sql_connection
# Deprecated group/name - [DATABASE]/sql_connection
# Deprecated group/name - [sql]/connection
#connection = <None>
[keystone_authtoken]
identity_uri = http://127.0.0.1:35357
admin_tenant_name = %SERVICE_TENANT_NAME%
admin_user = %SERVICE_USER%
admin_password = %SERVICE_PASSWORD%
revocation_cache_time = 10
# =============== ElasticSearch Options =======================
[elasticsearch]
# List of nodes where Elasticsearch instances are running. A single node
# should be defined as an IP address and port number.
# The default is ['127.0.0.1:9200']
#hosts = ['127.0.0.1:9200']

View File

@ -1,8 +0,0 @@
{
"context_is_admin": "role:admin",
"default": "",
"catalog_index": "role:admin",
"catalog_search": "",
"catalog_plugins": ""
}

View File

@ -674,24 +674,3 @@ class MetadefTagFactoryProxy(glance.domain.proxy.MetadefTagFactory):
meta_tag_factory,
meta_tag_proxy_class=MetadefTagProxy,
meta_tag_proxy_kwargs=proxy_kwargs)
# Catalog Search classes
class CatalogSearchRepoProxy(object):
def __init__(self, search_repo, context, search_policy):
self.context = context
self.policy = search_policy
self.search_repo = search_repo
def search(self, *args, **kwargs):
self.policy.enforce(self.context, 'catalog_search', {})
return self.search_repo.search(*args, **kwargs)
def plugins_info(self, *args, **kwargs):
self.policy.enforce(self.context, 'catalog_plugins', {})
return self.search_repo.plugins_info(*args, **kwargs)
def index(self, *args, **kwargs):
self.policy.enforce(self.context, 'catalog_index', {})
return self.search_repo.index(*args, **kwargs)

View File

@ -1,34 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_service import service as os_service
from glance import listener
from glance import service
CONF = cfg.CONF
def main():
service.prepare_service()
launcher = os_service.ProcessLauncher(CONF)
launcher.launch_service(
listener.ListenerService(),
workers=service.get_workers('listener'))
launcher.wait()
if __name__ == "__main__":
main()

View File

@ -1,52 +0,0 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_log import log as logging
import stevedore
from glance.common import config
from glance import i18n
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
_LE = i18n._LE
def main():
try:
logging.register_options(CONF)
cfg_files = cfg.find_config_files(project='glance',
prog='glance-api')
cfg_files.extend(cfg.find_config_files(project='glance',
prog='glance-search'))
config.parse_args(default_config_files=cfg_files)
logging.setup(CONF, 'glance')
namespace = 'glance.search.index_backend'
ext_manager = stevedore.extension.ExtensionManager(
namespace, invoke_on_load=True)
for ext in ext_manager.extensions:
try:
ext.obj.setup()
except Exception as e:
LOG.error(_LE("Failed to setup index extension "
"%(ext)s: %(e)s") % {'ext': ext.name,
'e': e})
except RuntimeError as e:
sys.exit("ERROR: %s" % e)

View File

@ -1,94 +0,0 @@
#!/usr/bin/env python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glance Catalog Search Server
"""
import os
import sys
import eventlet
from oslo_utils import encodeutils
# Monkey patch socket, time, select, threads
eventlet.patcher.monkey_patch(socket=True, time=True, select=True,
thread=True, os=True)
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
import osprofiler.notifier
import osprofiler.web
from glance.common import config
from glance.common import exception
from glance.common import wsgi
from glance import notifier
CONF = cfg.CONF
CONF.import_group("profiler", "glance.common.wsgi")
logging.register_options(CONF)
KNOWN_EXCEPTIONS = (RuntimeError,
exception.WorkerCreationFailure)
def fail(e):
global KNOWN_EXCEPTIONS
return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1
sys.stderr.write("ERROR: %s\n" % encodeutils.exception_to_unicode(e))
sys.exit(return_code)
def main():
try:
config.parse_args()
wsgi.set_eventlet_hub()
logging.setup(CONF, 'glance')
if cfg.CONF.profiler.enabled:
_notifier = osprofiler.notifier.create("Messaging",
oslo_messaging, {},
notifier.get_transport(),
"glance", "search",
cfg.CONF.bind_host)
osprofiler.notifier.set(_notifier)
else:
osprofiler.web.disable()
server = wsgi.Server()
server.start(config.load_paste_app('glance-search'),
default_port=9393)
server.wait()
except KNOWN_EXCEPTIONS as e:
fail(e)
if __name__ == '__main__':
main()

View File

@ -557,9 +557,3 @@ class InvalidJsonPatchPath(JsonPatchException):
def __init__(self, message=None, *args, **kwargs):
self.explanation = kwargs.get("explanation")
super(InvalidJsonPatchPath, self).__init__(message, *args, **kwargs)
class SearchNotAvailable(GlanceException):
message = _("The search and index services are not available. Ensure you "
"have the necessary prerequisite dependencies installed like "
"elasticsearch to use these services.")

View File

@ -32,7 +32,6 @@ import functools
import os
import platform
import re
import stevedore
import subprocess
import sys
import uuid
@ -741,10 +740,3 @@ def stash_conf_values():
conf['cert_file'] = CONF.cert_file
return conf
def get_search_plugins():
namespace = 'glance.search.index_backend'
ext_manager = stevedore.extension.ExtensionManager(
namespace, invoke_on_load=True)
return ext_manager.extensions

View File

@ -19,20 +19,13 @@ from oslo_log import log as logging
from glance.api import authorization
from glance.api import policy
from glance.api import property_protections
from glance.common import exception
from glance.common import property_utils
from glance.common import store_utils
import glance.db
import glance.domain
from glance.i18n import _LE
import glance.location
import glance.notifier
import glance.quota
try:
import glance.search
glance_search = glance.search
except ImportError:
glance_search = None
LOG = logging.getLogger(__name__)
@ -40,16 +33,12 @@ LOG = logging.getLogger(__name__)
class Gateway(object):
def __init__(self, db_api=None, store_api=None, notifier=None,
policy_enforcer=None, es_api=None):
policy_enforcer=None):
self.db_api = db_api or glance.db.get_api()
self.store_api = store_api or glance_store
self.store_utils = store_utils
self.notifier = notifier or glance.notifier.Notifier()
self.policy = policy_enforcer or policy.Enforcer()
if es_api:
self.es_api = es_api
else:
self.es_api = glance_search.get_api() if glance_search else None
def get_image_factory(self, context):
image_factory = glance.domain.ImageFactory()
@ -246,15 +235,3 @@ class Gateway(object):
authorized_tag_repo = authorization.MetadefTagRepoProxy(
notifier_tag_repo, context)
return authorized_tag_repo
def get_catalog_search_repo(self, context):
if self.es_api is None:
LOG.error(_LE('The search and index services are not available. '
'Ensure you have the necessary prerequisite '
'dependencies installed like elasticsearch to use '
'these services.'))
raise exception.SearchNotAvailable()
search_repo = glance.search.CatalogSearchRepo(context, self.es_api)
policy_search_repo = policy.CatalogSearchRepoProxy(
search_repo, context, self.policy)
return policy_search_repo

View File

@ -1,90 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import service as os_service
import stevedore
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
class NotificationEndpoint(object):
def __init__(self):
self.plugins = get_plugins()
self.notification_target_map = dict()
for plugin in self.plugins:
try:
event_list = plugin.obj.get_notification_supported_events()
for event in event_list:
self.notification_target_map[event.lower()] = plugin.obj
except Exception as e:
LOG.error(_LE("Failed to retrieve supported notification"
" events from search plugins "
"%(ext)s: %(e)s") %
{'ext': plugin.name, 'e': e})
def info(self, ctxt, publisher_id, event_type, payload, metadata):
event_type_l = event_type.lower()
if event_type_l in self.notification_target_map:
plugin = self.notification_target_map[event_type_l]
handler = plugin.get_notification_handler()
handler.process(
ctxt,
publisher_id,
event_type,
payload,
metadata)
class ListenerService(os_service.Service):
def __init__(self, *args, **kwargs):
super(ListenerService, self).__init__(*args, **kwargs)
self.listeners = []
def start(self):
super(ListenerService, self).start()
transport = oslo_messaging.get_transport(cfg.CONF)
targets = [
oslo_messaging.Target(topic="notifications", exchange="glance")
]
endpoints = [
NotificationEndpoint()
]
listener = oslo_messaging.get_notification_listener(
transport,
targets,
endpoints)
listener.start()
self.listeners.append(listener)
def stop(self):
for listener in self.listeners:
listener.stop()
listener.wait()
super(ListenerService, self).stop()
def get_plugins():
namespace = 'glance.search.index_backend'
ext_manager = stevedore.extension.ExtensionManager(
namespace, invoke_on_load=True)
return ext_manager.extensions

View File

@ -1,77 +0,0 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import elasticsearch
from elasticsearch import helpers
from oslo_config import cfg
from glance.common import utils
search_opts = [
cfg.ListOpt('hosts', default=['127.0.0.1:9200'],
help='List of nodes where Elasticsearch instances are '
'running. A single node should be defined as an IP '
'address and port number.'),
]
CONF = cfg.CONF
CONF.register_opts(search_opts, group='elasticsearch')
def get_api():
es_hosts = CONF.elasticsearch.hosts
es_api = elasticsearch.Elasticsearch(hosts=es_hosts)
return es_api
class CatalogSearchRepo(object):
def __init__(self, context, es_api):
self.context = context
self.es_api = es_api
self.plugins = utils.get_search_plugins() or []
self.plugins_info_dict = self._get_plugin_info()
def search(self, index, doc_type, query, fields, offset, limit,
ignore_unavailable=True):
return self.es_api.search(
index=index,
doc_type=doc_type,
body=query,
_source_include=fields,
from_=offset,
size=limit,
ignore_unavailable=ignore_unavailable)
def index(self, default_index, default_type, actions):
return helpers.bulk(
client=self.es_api,
index=default_index,
doc_type=default_type,
actions=actions)
def plugins_info(self):
return self.plugins_info_dict
def _get_plugin_info(self):
plugin_info = dict()
plugin_info['plugins'] = []
for plugin in self.plugins:
info = dict()
info['type'] = plugin.obj.get_document_type()
info['index'] = plugin.obj.get_index_name()
plugin_info['plugins'].append(info)
return plugin_info

View File

@ -1,20 +0,0 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import paste.urlmap
def root_app_factory(loader, global_conf, **local_conf):
return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf)

View File

@ -1,66 +0,0 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import wsgi
from glance.search.api.v0_1 import search
class API(wsgi.Router):
"""WSGI router for Glance Catalog Search v0_1 API requests."""
def __init__(self, mapper):
reject_method_resource = wsgi.Resource(wsgi.RejectMethodController())
search_catalog_resource = search.create_resource()
mapper.connect('/search',
controller=search_catalog_resource,
action='search',
conditions={'method': ['GET']})
mapper.connect('/search',
controller=search_catalog_resource,
action='search',
conditions={'method': ['POST']})
mapper.connect('/search',
controller=reject_method_resource,
action='reject',
allowed_methods='GET, POST',
conditions={'method': ['PUT', 'DELETE',
'PATCH', 'HEAD']})
mapper.connect('/search/plugins',
controller=search_catalog_resource,
action='plugins_info',
conditions={'method': ['GET']})
mapper.connect('/search/plugins',
controller=reject_method_resource,
action='reject',
allowed_methods='GET',
conditions={'method': ['POST', 'PUT', 'DELETE',
'PATCH', 'HEAD']})
mapper.connect('/index',
controller=search_catalog_resource,
action='index',
conditions={'method': ['POST']})
mapper.connect('/index',
controller=reject_method_resource,
action='reject',
allowed_methods='POST',
conditions={'method': ['GET', 'PUT', 'DELETE',
'PATCH', 'HEAD']})
super(API, self).__init__(mapper)

View File

@ -1,383 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
import six
import webob.exc
from glance.api import policy
from glance.common import exception
from glance.common import utils
from glance.common import wsgi
import glance.db
import glance.gateway
from glance import i18n
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
CONF = cfg.CONF
class SearchController(object):
def __init__(self, plugins=None, es_api=None, policy_enforcer=None):
self.es_api = es_api or glance.search.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.gateway = glance.gateway.Gateway(
es_api=self.es_api,
policy_enforcer=self.policy)
self.plugins = plugins or []
def search(self, req, query, index, doc_type=None, fields=None, offset=0,
limit=10):
if fields is None:
fields = []
try:
search_repo = self.gateway.get_catalog_search_repo(req.context)
result = search_repo.search(index,
doc_type,
query,
fields,
offset,
limit,
True)
for plugin in self.plugins:
result = plugin.obj.filter_result(result, req.context)
return result
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPInternalServerError()
def plugins_info(self, req):
try:
search_repo = self.gateway.get_catalog_search_repo(req.context)
return search_repo.plugins_info()
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPInternalServerError()
def index(self, req, actions, default_index=None, default_type=None):
try:
search_repo = self.gateway.get_catalog_search_repo(req.context)
success, errors = search_repo.index(
default_index,
default_type,
actions)
return {
'success': success,
'failed': len(errors),
'errors': errors,
}
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPInternalServerError()
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['self', 'schema']
def __init__(self, plugins, schema=None):
super(RequestDeserializer, self).__init__()
self.plugins = plugins
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
@classmethod
def _check_allowed(cls, query):
for key in cls._disallowed_properties:
if key in query:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(explanation=msg)
def _get_available_indices(self):
return list(set([p.obj.get_index_name() for p in self.plugins]))
def _get_available_types(self):
return list(set([p.obj.get_document_type() for p in self.plugins]))
def _validate_index(self, index):
available_indices = self._get_available_indices()
if index not in available_indices:
msg = _("Index '%s' is not supported.") % index
raise webob.exc.HTTPBadRequest(explanation=msg)
return index
def _validate_doc_type(self, doc_type):
available_types = self._get_available_types()
if doc_type not in available_types:
msg = _("Document type '%s' is not supported.") % doc_type
raise webob.exc.HTTPBadRequest(explanation=msg)
return doc_type
def _validate_offset(self, offset):
try:
offset = int(offset)
except ValueError:
msg = _("offset param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if offset < 0:
msg = _("offset param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return offset
def _validate_limit(self, limit):
try:
limit = int(limit)
except ValueError:
msg = _("limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 1:
msg = _("limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _validate_actions(self, actions):
if not actions:
msg = _("actions param cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
output = []
allowed_action_types = ['create', 'update', 'delete', 'index']
for action in actions:
action_type = action.get('action', 'index')
document_id = action.get('id')
document_type = action.get('type')
index_name = action.get('index')
data = action.get('data', {})
script = action.get('script')
if index_name is not None:
index_name = self._validate_index(index_name)
if document_type is not None:
document_type = self._validate_doc_type(document_type)
if action_type not in allowed_action_types:
msg = _("Invalid action type: '%s'") % action_type
raise webob.exc.HTTPBadRequest(explanation=msg)
elif (action_type in ['create', 'update', 'index'] and
not any([data, script])):
msg = (_("Action type '%s' requires data or script param.") %
action_type)
raise webob.exc.HTTPBadRequest(explanation=msg)
elif action_type in ['update', 'delete'] and not document_id:
msg = (_("Action type '%s' requires ID of the document.") %
action_type)
raise webob.exc.HTTPBadRequest(explanation=msg)
bulk_action = {
'_op_type': action_type,
'_id': document_id,
'_index': index_name,
'_type': document_type,
}
if script:
data_field = 'params'
bulk_action['script'] = script
elif action_type == 'update':
data_field = 'doc'
else:
data_field = '_source'
bulk_action[data_field] = data
output.append(bulk_action)
return output
def _get_query(self, context, query, doc_types):
is_admin = context.is_admin
if is_admin:
query_params = {
'query': {
'query': query
}
}
else:
filtered_query_list = []
for plugin in self.plugins:
try:
doc_type = plugin.obj.get_document_type()
rbac_filter = plugin.obj.get_rbac_filter(context)
except Exception as e:
LOG.error(_LE("Failed to retrieve RBAC filters "
"from search plugin "
"%(ext)s: %(e)s") %
{'ext': plugin.name, 'e': e})
if doc_type in doc_types:
filter_query = {
"query": query,
"filter": rbac_filter
}
filtered_query = {
'filtered': filter_query
}
filtered_query_list.append(filtered_query)
query_params = {
'query': {
'query': {
"bool": {
"should": filtered_query_list
},
}
}
}
return query_params
def search(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
query = body.pop('query', None)
indices = body.pop('index', None)
doc_types = body.pop('type', None)
fields = body.pop('fields', None)
offset = body.pop('offset', None)
limit = body.pop('limit', None)
highlight = body.pop('highlight', None)
if not indices:
indices = self._get_available_indices()
elif not isinstance(indices, (list, tuple)):
indices = [indices]
if not doc_types:
doc_types = self._get_available_types()
elif not isinstance(doc_types, (list, tuple)):
doc_types = [doc_types]
query_params = self._get_query(request.context, query, doc_types)
query_params['index'] = [self._validate_index(index)
for index in indices]
query_params['doc_type'] = [self._validate_doc_type(doc_type)
for doc_type in doc_types]
if fields is not None:
query_params['fields'] = fields
if offset is not None:
query_params['offset'] = self._validate_offset(offset)
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
if highlight is not None:
query_params['query']['highlight'] = highlight
return query_params
def index(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
default_index = body.pop('default_index', None)
if default_index is not None:
default_index = self._validate_index(default_index)
default_type = body.pop('default_type', None)
if default_type is not None:
default_type = self._validate_doc_type(default_type)
actions = self._validate_actions(body.pop('actions', None))
if not all([default_index, default_type]):
for action in actions:
if not any([action['_index'], default_index]):
msg = (_("Action index is missing and no default "
"index has been set."))
raise webob.exc.HTTPBadRequest(explanation=msg)
if not any([action['_type'], default_type]):
msg = (_("Action document type is missing and no default "
"type has been set."))
raise webob.exc.HTTPBadRequest(explanation=msg)
query_params = {
'default_index': default_index,
'default_type': default_type,
'actions': actions,
}
return query_params
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema
def search(self, response, query_result):
body = json.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def plugins_info(self, response, query_result):
body = json.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def index(self, response, query_result):
body = json.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def create_resource():
"""Search resource factory method"""
plugins = utils.get_search_plugins()
deserializer = RequestDeserializer(plugins)
serializer = ResponseSerializer()
controller = SearchController(plugins)
return wsgi.Resource(controller, deserializer, serializer)

View File

@ -1,140 +0,0 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from elasticsearch import helpers
import six
import glance.search
@six.add_metaclass(abc.ABCMeta)
class IndexBase(object):
chunk_size = 200
def __init__(self):
self.engine = glance.search.get_api()
self.index_name = self.get_index_name()
self.document_type = self.get_document_type()
def setup(self):
"""Comprehensively install search engine index and put data into it."""
self.setup_index()
self.setup_mapping()
self.setup_data()
def setup_index(self):
"""Create the index if it doesn't exist and update its settings."""
index_exists = self.engine.indices.exists(self.index_name)
if not index_exists:
self.engine.indices.create(index=self.index_name)
index_settings = self.get_settings()
if index_settings:
self.engine.indices.put_settings(index=self.index_name,
body=index_settings)
return index_exists
def setup_mapping(self):
"""Update index document mapping."""
index_mapping = self.get_mapping()
if index_mapping:
self.engine.indices.put_mapping(index=self.index_name,
doc_type=self.document_type,
body=index_mapping)
def setup_data(self):
"""Insert all objects from database into search engine."""
object_list = self.get_objects()
documents = []
for obj in object_list:
document = self.serialize(obj)
documents.append(document)
self.save_documents(documents)
def save_documents(self, documents, id_field='id'):
"""Send list of serialized documents into search engine."""
actions = []
for document in documents:
action = {
'_id': document.get(id_field),
'_source': document,
}
actions.append(action)
helpers.bulk(
client=self.engine,
index=self.index_name,
doc_type=self.document_type,
chunk_size=self.chunk_size,
actions=actions)
@abc.abstractmethod
def get_objects(self):
"""Get list of all objects which will be indexed into search engine."""
@abc.abstractmethod
def serialize(self, obj):
"""Serialize database object into valid search engine document."""
@abc.abstractmethod
def get_index_name(self):
"""Get name of the index."""
@abc.abstractmethod
def get_document_type(self):
"""Get name of the document type."""
@abc.abstractmethod
def get_rbac_filter(self, request_context):
"""Get rbac filter as es json filter dsl."""
def filter_result(self, result, request_context):
"""Filter the outgoing search result."""
return result
def get_settings(self):
"""Get an index settings."""
return {}
def get_mapping(self):
"""Get an index mapping."""
return {}
def get_notification_handler(self):
"""Get the notification handler which implements NotificationBase."""
return None
def get_notification_supported_events(self):
"""Get the list of suppported event types."""
return []
@six.add_metaclass(abc.ABCMeta)
class NotificationBase(object):
def __init__(self, engine, index_name, document_type):
self.engine = engine
self.index_name = index_name
self.document_type = document_type
@abc.abstractmethod
def process(self, ctxt, publisher_id, event_type, payload, metadata):
"""Process the incoming notification message."""

View File

@ -1,163 +0,0 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import joinedload
from oslo_utils import timeutils
from glance.api import policy
from glance.common import property_utils
import glance.db
from glance.db.sqlalchemy import models
from glance.search.plugins import base
from glance.search.plugins import images_notification_handler
class ImageIndex(base.IndexBase):
def __init__(self, db_api=None, policy_enforcer=None):
super(ImageIndex, self).__init__()
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
if property_utils.is_property_protection_enabled():
self.property_rules = property_utils.PropertyRules(self.policy)
self._image_base_properties = [
'checksum', 'created_at', 'container_format', 'disk_format', 'id',
'min_disk', 'min_ram', 'name', 'size', 'virtual_size', 'status',
'tags', 'updated_at', 'visibility', 'protected', 'owner',
'members']
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'image'
def get_mapping(self):
return {
'dynamic': True,
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'string'},
'disk_format': {'type': 'string'},
'container_format': {'type': 'string'},
'size': {'type': 'long'},
'virtual_size': {'type': 'long'},
'status': {'type': 'string'},
'visibility': {'type': 'string'},
'checksum': {'type': 'string'},
'min_disk': {'type': 'long'},
'min_ram': {'type': 'long'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'protected': {'type': 'boolean'},
'members': {'type': 'string', 'index': 'not_analyzed'},
"created_at": {'type': 'date'},
"updated_at": {'type': 'date'}
},
}
def get_rbac_filter(self, request_context):
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
},
{
'term': {
'members': request_context.tenant
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def filter_result(self, result, request_context):
if property_utils.is_property_protection_enabled():
hits = result['hits']['hits']
for hit in hits:
if hit['_type'] == self.get_document_type():
source = hit['_source']
for key in source.keys():
if key not in self._image_base_properties:
if not self.property_rules.check_property_rules(
key, 'read', request_context):
del hit['_source'][key]
return result
def get_objects(self):
session = self.db_api.get_session()
images = session.query(models.Image).options(
joinedload('properties'), joinedload('members'), joinedload('tags')
).filter_by(deleted=False)
return images
def serialize(self, obj):
visibility = 'public' if obj.is_public else 'private'
members = []
for member in obj.members:
if member.status == 'accepted' and member.deleted == 0:
members.append(member.member)
document = {
'id': obj.id,
'name': obj.name,
'tags': obj.tags,
'disk_format': obj.disk_format,
'container_format': obj.container_format,
'size': obj.size,
'virtual_size': obj.virtual_size,
'status': obj.status,
'visibility': visibility,
'checksum': obj.checksum,
'min_disk': obj.min_disk,
'min_ram': obj.min_ram,
'owner': obj.owner,
'protected': obj.protected,
'members': members,
'created_at': timeutils.isotime(obj.created_at),
'updated_at': timeutils.isotime(obj.updated_at)
}
for image_property in obj.properties:
document[image_property.name] = image_property.value
return document
def get_notification_handler(self):
return images_notification_handler.ImageHandler(
self.engine,
self.get_index_name(),
self.get_document_type()
)
def get_notification_supported_events(self):
return ['image.create', 'image.update', 'image.delete']

View File

@ -1,83 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import encodeutils
from glance.search.plugins import base
LOG = logging.getLogger(__name__)
class ImageHandler(base.NotificationBase):
def __init__(self, *args, **kwargs):
super(ImageHandler, self).__init__(*args, **kwargs)
self.image_delete_keys = ['deleted_at', 'deleted',
'is_public', 'properties']
def process(self, ctxt, publisher_id, event_type, payload, metadata):
try:
actions = {
"image.create": self.create,
"image.update": self.update,
"image.delete": self.delete
}
actions[event_type](payload)
return oslo_messaging.NotificationResult.HANDLED
except Exception as e:
LOG.error(encodeutils.exception_to_unicode(e))
def create(self, payload):
id = payload['id']
payload = self.format_image(payload)
self.engine.create(
index=self.index_name,
doc_type=self.document_type,
body=payload,
id=id
)
def update(self, payload):
id = payload['id']
payload = self.format_image(payload)
doc = {"doc": payload}
self.engine.update(
index=self.index_name,
doc_type=self.document_type,
body=doc,
id=id
)
def delete(self, payload):
id = payload['id']
self.engine.delete(
index=self.index_name,
doc_type=self.document_type,
id=id
)
def format_image(self, payload):
visibility = 'public' if payload['is_public'] else 'private'
payload['visibility'] = visibility
payload.update(payload.get('properties', '{}'))
for key in payload.keys():
if key in self.image_delete_keys:
del payload[key]
return payload

View File

@ -1,259 +0,0 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
import glance.db
from glance.db.sqlalchemy import models_metadef as models
from glance.search.plugins import base
from glance.search.plugins import metadefs_notification_handler
class MetadefIndex(base.IndexBase):
def __init__(self):
super(MetadefIndex, self).__init__()
self.db_api = glance.db.get_api()
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'metadef'
def get_mapping(self):
property_mapping = {
'dynamic': True,
'type': 'nested',
'properties': {
'property': {'type': 'string', 'index': 'not_analyzed'},
'type': {'type': 'string'},
'title': {'type': 'string'},
'description': {'type': 'string'},
}
}
mapping = {
'_id': {
'path': 'namespace',
},
'properties': {
'display_name': {'type': 'string'},
'description': {'type': 'string'},
'namespace': {'type': 'string', 'index': 'not_analyzed'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'visibility': {'type': 'string', 'index': 'not_analyzed'},
'resource_types': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
'prefix': {'type': 'string'},
'properties_target': {'type': 'string'},
},
},
'objects': {
'type': 'nested',
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'properties': property_mapping,
}
},
'properties': property_mapping,
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
}
}
},
}
return mapping
def get_rbac_filter(self, request_context):
# TODO(krykowski): Define base get_rbac_filter in IndexBase class
# which will provide some common subset of query pieces.
# Something like:
# def get_common_context_pieces(self, request_context):
# return [{'term': {'owner': request_context.owner,
# 'type': {'value': self.get_document_type()}}]
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def get_objects(self):
session = self.db_api.get_session()
namespaces = session.query(models.MetadefNamespace).all()
resource_types = session.query(models.MetadefResourceType).all()
resource_types_map = {r.id: r.name for r in resource_types}
for namespace in namespaces:
namespace.resource_types = self.get_namespace_resource_types(
namespace.id, resource_types_map)
namespace.objects = self.get_namespace_objects(namespace.id)
namespace.properties = self.get_namespace_properties(namespace.id)
namespace.tags = self.get_namespace_tags(namespace.id)
return namespaces
def get_namespace_resource_types(self, namespace_id, resource_types):
session = self.db_api.get_session()
namespace_resource_types = session.query(
models.MetadefNamespaceResourceType
).filter_by(namespace_id=namespace_id)
resource_associations = [{
'prefix': r.prefix,
'properties_target': r.properties_target,
'name': resource_types[r.resource_type_id],
} for r in namespace_resource_types]
return resource_associations
def get_namespace_properties(self, namespace_id):
session = self.db_api.get_session()
properties = session.query(
models.MetadefProperty
).filter_by(namespace_id=namespace_id)
return list(properties)
def get_namespace_objects(self, namespace_id):
session = self.db_api.get_session()
namespace_objects = session.query(
models.MetadefObject
).filter_by(namespace_id=namespace_id)
return list(namespace_objects)
def get_namespace_tags(self, namespace_id):
session = self.db_api.get_session()
namespace_tags = session.query(
models.MetadefTag
).filter_by(namespace_id=namespace_id)
return list(namespace_tags)
def serialize(self, obj):
object_docs = [self.serialize_object(ns_obj) for ns_obj in obj.objects]
property_docs = [self.serialize_property(prop.name, prop.json_schema)
for prop in obj.properties]
resource_type_docs = [self.serialize_namespace_resource_type(rt)
for rt in obj.resource_types]
tag_docs = [self.serialize_tag(tag) for tag in obj.tags]
namespace_doc = self.serialize_namespace(obj)
namespace_doc.update({
'objects': object_docs,
'properties': property_docs,
'resource_types': resource_type_docs,
'tags': tag_docs,
})
return namespace_doc
def serialize_namespace(self, namespace):
return {
'namespace': namespace.namespace,
'display_name': namespace.display_name,
'description': namespace.description,
'visibility': namespace.visibility,
'protected': namespace.protected,
'owner': namespace.owner,
}
def serialize_object(self, obj):
obj_properties = obj.json_schema
property_docs = []
for name, schema in six.iteritems(obj_properties):
property_doc = self.serialize_property(name, schema)
property_docs.append(property_doc)
document = {
'name': obj.name,
'description': obj.description,
'properties': property_docs,
}
return document
def serialize_property(self, name, schema):
document = copy.deepcopy(schema)
document['property'] = name
if 'default' in document:
document['default'] = str(document['default'])
if 'enum' in document:
document['enum'] = [str(enum) for enum in document['enum']]
return document
def serialize_namespace_resource_type(self, ns_resource_type):
return {
'name': ns_resource_type['name'],
'prefix': ns_resource_type['prefix'],
'properties_target': ns_resource_type['properties_target']
}
def serialize_tag(self, tag):
return {
'name': tag.name
}
def get_notification_handler(self):
return metadefs_notification_handler.MetadefHandler(
self.engine,
self.get_index_name(),
self.get_document_type()
)
def get_notification_supported_events(self):
return [
"metadef_namespace.create",
"metadef_namespace.update",
"metadef_namespace.delete",
"metadef_object.create",
"metadef_object.update",
"metadef_object.delete",
"metadef_property.create",
"metadef_property.update",
"metadef_property.delete",
"metadef_tag.create",
"metadef_tag.update",
"metadef_tag.delete",
"metadef_resource_type.create",
"metadef_resource_type.delete",
"metadef_namespace.delete_properties",
"metadef_namespace.delete_objects",
"metadef_namespace.delete_tags"
]

View File

@ -1,251 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import encodeutils
from glance.search.plugins import base
LOG = logging.getLogger(__name__)
class MetadefHandler(base.NotificationBase):
def __init__(self, *args, **kwargs):
super(MetadefHandler, self).__init__(*args, **kwargs)
self.namespace_delete_keys = ['deleted_at', 'deleted', 'created_at',
'updated_at', 'namespace_old']
self.property_delete_keys = ['deleted', 'deleted_at',
'name_old', 'namespace', 'name']
def process(self, ctxt, publisher_id, event_type, payload, metadata):
try:
actions = {
"metadef_namespace.create": self.create_ns,
"metadef_namespace.update": self.update_ns,
"metadef_namespace.delete": self.delete_ns,
"metadef_object.create": self.create_obj,
"metadef_object.update": self.update_obj,
"metadef_object.delete": self.delete_obj,
"metadef_property.create": self.create_prop,
"metadef_property.update": self.update_prop,
"metadef_property.delete": self.delete_prop,
"metadef_resource_type.create": self.create_rs,
"metadef_resource_type.delete": self.delete_rs,
"metadef_tag.create": self.create_tag,
"metadef_tag.update": self.update_tag,
"metadef_tag.delete": self.delete_tag,
"metadef_namespace.delete_properties": self.delete_props,
"metadef_namespace.delete_objects": self.delete_objects,
"metadef_namespace.delete_tags": self.delete_tags
}
actions[event_type](payload)
return oslo_messaging.NotificationResult.HANDLED
except Exception as e:
LOG.error(encodeutils.exception_to_unicode(e))
def run_create(self, id, payload):
self.engine.create(
index=self.index_name,
doc_type=self.document_type,
body=payload,
id=id
)
def run_update(self, id, payload, script=False):
if script:
self.engine.update(
index=self.index_name,
doc_type=self.document_type,
body=payload,
id=id)
else:
doc = {"doc": payload}
self.engine.update(
index=self.index_name,
doc_type=self.document_type,
body=doc,
id=id)
def run_delete(self, id):
self.engine.delete(
index=self.index_name,
doc_type=self.document_type,
id=id
)
def create_ns(self, payload):
id = payload['namespace']
self.run_create(id, self.format_namespace(payload))
def update_ns(self, payload):
id = payload['namespace_old']
self.run_update(id, self.format_namespace(payload))
def delete_ns(self, payload):
id = payload['namespace']
self.run_delete(id)
def create_obj(self, payload):
id = payload['namespace']
object = self.format_object(payload)
self.create_entity(id, "objects", object)
def update_obj(self, payload):
id = payload['namespace']
object = self.format_object(payload)
self.update_entity(id, "objects", object,
payload['name_old'], "name")
def delete_obj(self, payload):
id = payload['namespace']
self.delete_entity(id, "objects", payload['name'], "name")
def create_prop(self, payload):
id = payload['namespace']
property = self.format_property(payload)
self.create_entity(id, "properties", property)
def update_prop(self, payload):
id = payload['namespace']
property = self.format_property(payload)
self.update_entity(id, "properties", property,
payload['name_old'], "property")
def delete_prop(self, payload):
id = payload['namespace']
self.delete_entity(id, "properties", payload['name'], "property")
def create_rs(self, payload):
id = payload['namespace']
resource_type = dict()
resource_type['name'] = payload['name']
if payload['prefix']:
resource_type['prefix'] = payload['prefix']
if payload['properties_target']:
resource_type['properties_target'] = payload['properties_target']
self.create_entity(id, "resource_types", resource_type)
def delete_rs(self, payload):
id = payload['namespace']
self.delete_entity(id, "resource_types", payload['name'], "name")
def create_tag(self, payload):
id = payload['namespace']
tag = dict()
tag['name'] = payload['name']
self.create_entity(id, "tags", tag)
def update_tag(self, payload):
id = payload['namespace']
tag = dict()
tag['name'] = payload['name']
self.update_entity(id, "tags", tag, payload['name_old'], "name")
def delete_tag(self, payload):
id = payload['namespace']
self.delete_entity(id, "tags", payload['name'], "name")
def delete_props(self, payload):
self.delete_field(payload, "properties")
def delete_objects(self, payload):
self.delete_field(payload, "objects")
def delete_tags(self, payload):
self.delete_field(payload, "tags")
def create_entity(self, id, entity, entity_data):
script = ("if (ctx._source.containsKey('%(entity)s'))"
"{ctx._source.%(entity)s += entity_item }"
"else {ctx._source.%(entity)s=entity_list};" %
{"entity": entity})
params = {
"entity_item": entity_data,
"entity_list": [entity_data]
}
payload = {"script": script, "params": params}
self.run_update(id, payload=payload, script=True)
def update_entity(self, id, entity, entity_data, entity_id, field_name):
entity_id = entity_id.lower()
script = ("obj=null; for(entity_item :ctx._source.%(entity)s)"
"{if(entity_item['%(field_name)s'].toLowerCase() "
" == entity_id ) obj=entity_item;};"
"if(obj!=null)ctx._source.%(entity)s.remove(obj);"
"if (ctx._source.containsKey('%(entity)s'))"
"{ctx._source.%(entity)s += entity_item; }"
"else {ctx._source.%(entity)s=entity_list;}" %
{"entity": entity, "field_name": field_name})
params = {
"entity_item": entity_data,
"entity_list": [entity_data],
"entity_id": entity_id
}
payload = {"script": script, "params": params}
self.run_update(id, payload=payload, script=True)
def delete_entity(self, id, entity, entity_id, field_name):
entity_id = entity_id.lower()
script = ("obj=null; for(entity_item :ctx._source.%(entity)s)"
"{if(entity_item['%(field_name)s'].toLowerCase() "
" == entity_id ) obj=entity_item;};"
"if(obj!=null)ctx._source.%(entity)s.remove(obj);" %
{"entity": entity, "field_name": field_name})
params = {
"entity_id": entity_id
}
payload = {"script": script, "params": params}
self.run_update(id, payload=payload, script=True)
def delete_field(self, payload, field):
id = payload['namespace']
script = ("if (ctx._source.containsKey('%(field)s'))"
"{ctx._source.remove('%(field)s')}") % {"field": field}
payload = {"script": script}
self.run_update(id, payload=payload, script=True)
def format_namespace(self, payload):
for key in self.namespace_delete_keys:
if key in payload.keys():
del payload[key]
return payload
def format_object(self, payload):
formatted_object = dict()
formatted_object['name'] = payload['name']
formatted_object['description'] = payload['description']
if payload['required']:
formatted_object['required'] = payload['required']
formatted_object['properties'] = []
for property in payload['properties']:
formatted_property = self.format_property(property)
formatted_object['properties'].append(formatted_property)
return formatted_object
def format_property(self, payload):
prop_data = dict()
prop_data['property'] = payload['name']
for key, value in six.iteritems(payload):
if key not in self.property_delete_keys and value:
prop_data[key] = value
return prop_data

View File

@ -1,107 +0,0 @@
#!/usr/bin/env python
#
# Copyright 2012-2014 eNovance <licensing@enovance.com>
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import socket
import sys
from oslo_config import cfg
import oslo_i18n
from oslo_log import log
import oslo_messaging
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node, which must be valid in an AMQP '
'key. Can be an opaque identifier. For ZeroMQ only, must '
'be a valid host name, FQDN, or IP address.'),
cfg.IntOpt('listener_workers',
default=1,
help='Number of workers for notification service. A single '
'notification agent is enabled by default.'),
cfg.IntOpt('http_timeout',
default=600,
help='Timeout seconds for HTTP requests. Set it to None to '
'disable timeout.'),
]
CONF.register_opts(OPTS)
CLI_OPTS = [
cfg.StrOpt('os-username',
deprecated_group="DEFAULT",
default=os.environ.get('OS_USERNAME', 'glance'),
help='User name to use for OpenStack service access.'),
cfg.StrOpt('os-password',
deprecated_group="DEFAULT",
secret=True,
default=os.environ.get('OS_PASSWORD', 'admin'),
help='Password to use for OpenStack service access.'),
cfg.StrOpt('os-tenant-id',
deprecated_group="DEFAULT",
default=os.environ.get('OS_TENANT_ID', ''),
help='Tenant ID to use for OpenStack service access.'),
cfg.StrOpt('os-tenant-name',
deprecated_group="DEFAULT",
default=os.environ.get('OS_TENANT_NAME', 'admin'),
help='Tenant name to use for OpenStack service access.'),
cfg.StrOpt('os-cacert',
default=os.environ.get('OS_CACERT'),
help='Certificate chain for SSL validation.'),
cfg.StrOpt('os-auth-url',
deprecated_group="DEFAULT",
default=os.environ.get('OS_AUTH_URL',
'http://localhost:5000/v2.0'),
help='Auth URL to use for OpenStack service access.'),
cfg.StrOpt('os-region-name',
deprecated_group="DEFAULT",
default=os.environ.get('OS_REGION_NAME'),
help='Region name to use for OpenStack service endpoints.'),
cfg.StrOpt('os-endpoint-type',
default=os.environ.get('OS_ENDPOINT_TYPE', 'publicURL'),
help='Type of endpoint in Identity service catalog to use for '
'communication with OpenStack services.'),
cfg.BoolOpt('insecure',
default=False,
help='Disables X.509 certificate validation when an '
'SSL connection to Identity Service is established.'),
]
CONF.register_cli_opts(CLI_OPTS, group="service_credentials")
LOG = log.getLogger(__name__)
_DEFAULT_LOG_LEVELS = ['keystonemiddleware=WARN', 'stevedore=WARN']
class WorkerException(Exception):
"""Exception for errors relating to service workers."""
def get_workers(name):
return 1
def prepare_service(argv=None):
oslo_i18n.enable_lazy()
log.set_defaults(_DEFAULT_LOG_LEVELS)
log.register_options(CONF)
if argv is None:
argv = sys.argv
CONF(argv[1:], project='glance-search')
log.setup(cfg.CONF, 'glance-search')
oslo_messaging.set_transport_defaults('glance')

View File

@ -1,30 +0,0 @@
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_context import context
from glance.common import exception
from glance import gateway
from glance.tests import utils as test_utils
class TestGateway(test_utils.BaseTestCase):
@mock.patch.object(gateway, 'glance_search', None)
def test_get_catalog_search_repo_no_es_api(self):
gate = gateway.Gateway()
self.assertRaises(exception.SearchNotAvailable,
gate.get_catalog_search_repo,
context.get_admin_context())

View File

@ -1,653 +0,0 @@
# Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils import timeutils
from glance.search.plugins import images as images_plugin
from glance.search.plugins import metadefs as metadefs_plugin
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
DATE1 = timeutils.isotime(DATETIME)
# General
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '5a3e60e8-cfa9-4a9e-a90a-62b42cea92b8'
TENANT4 = 'c6c87f25-8a94-47ed-8c83-053c25f42df4'
# Images
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc'
UUID3 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
UUID4 = '6bbe7cc2-eae7-4c0f-b50d-a7160b0c6a86'
CHECKSUM = '93264c3edf5972c9f1cb309543d38a5c'
# Metadefinitions
NAMESPACE1 = 'namespace1'
NAMESPACE2 = 'namespace2'
PROPERTY1 = 'Property1'
PROPERTY2 = 'Property2'
PROPERTY3 = 'Property3'
OBJECT1 = 'Object1'
OBJECT2 = 'Object2'
OBJECT3 = 'Object3'
RESOURCE_TYPE1 = 'ResourceType1'
RESOURCE_TYPE2 = 'ResourceType2'
RESOURCE_TYPE3 = 'ResourceType3'
TAG1 = 'Tag1'
TAG2 = 'Tag2'
TAG3 = 'Tag3'
class DictObj(object):
def __init__(self, **entries):
self.__dict__.update(entries)
def _image_fixture(image_id, **kwargs):
image_members = kwargs.pop('members', [])
extra_properties = kwargs.pop('extra_properties', {})
obj = {
'id': image_id,
'name': None,
'is_public': False,
'properties': {},
'checksum': None,
'owner': None,
'status': 'queued',
'tags': [],
'size': None,
'virtual_size': None,
'locations': [],
'protected': False,
'disk_format': None,
'container_format': None,
'deleted': False,
'min_ram': None,
'min_disk': None,
'created_at': DATETIME,
'updated_at': DATETIME,
}
obj.update(kwargs)
image = DictObj(**obj)
image.tags = set(image.tags)
image.properties = [DictObj(name=k, value=v)
for k, v in extra_properties.items()]
image.members = [DictObj(**m) for m in image_members]
return image
def _db_namespace_fixture(**kwargs):
obj = {
'namespace': None,
'display_name': None,
'description': None,
'visibility': True,
'protected': False,
'owner': None
}
obj.update(kwargs)
return DictObj(**obj)
def _db_property_fixture(name, **kwargs):
obj = {
'name': name,
'json_schema': {"type": "string", "title": "title"},
}
obj.update(kwargs)
return DictObj(**obj)
def _db_object_fixture(name, **kwargs):
obj = {
'name': name,
'description': None,
'json_schema': {},
'required': '[]',
}
obj.update(kwargs)
return DictObj(**obj)
def _db_resource_type_fixture(name, **kwargs):
obj = {
'name': name,
'protected': False,
}
obj.update(kwargs)
return DictObj(**obj)
def _db_namespace_resource_type_fixture(name, prefix, **kwargs):
obj = {
'properties_target': None,
'prefix': prefix,
'name': name,
}
obj.update(kwargs)
return obj
def _db_tag_fixture(name, **kwargs):
obj = {
'name': name,
}
obj.update(**kwargs)
return DictObj(**obj)
class TestImageLoaderPlugin(test_utils.BaseTestCase):
def setUp(self):
super(TestImageLoaderPlugin, self).setUp()
self.db = unit_test_utils.FakeDB(initialize=False)
self._create_images()
self.plugin = images_plugin.ImageIndex()
def _create_images(self):
self.simple_image = _image_fixture(
UUID1, owner=TENANT1, checksum=CHECKSUM, name='simple', size=256,
is_public=True, status='active'
)
self.tagged_image = _image_fixture(
UUID2, owner=TENANT1, checksum=CHECKSUM, name='tagged', size=512,
is_public=True, status='active', tags=['ping', 'pong'],
)
self.complex_image = _image_fixture(
UUID3, owner=TENANT2, checksum=CHECKSUM, name='complex', size=256,
is_public=True, status='active',
extra_properties={'mysql_version': '5.6', 'hypervisor': 'lxc'}
)
self.members_image = _image_fixture(
UUID3, owner=TENANT2, checksum=CHECKSUM, name='complex', size=256,
is_public=True, status='active',
members=[
{'member': TENANT1, 'deleted': False, 'status': 'accepted'},
{'member': TENANT2, 'deleted': False, 'status': 'accepted'},
{'member': TENANT3, 'deleted': True, 'status': 'accepted'},
{'member': TENANT4, 'deleted': False, 'status': 'pending'},
]
)
self.images = [self.simple_image, self.tagged_image,
self.complex_image, self.members_image]
def test_index_name(self):
self.assertEqual('glance', self.plugin.get_index_name())
def test_document_type(self):
self.assertEqual('image', self.plugin.get_document_type())
def test_image_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
'members': [],
'min_disk': None,
'min_ram': None,
'name': 'simple',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.simple_image)
self.assertEqual(expected, serialized)
def test_image_with_tags_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc',
'members': [],
'min_disk': None,
'min_ram': None,
'name': 'tagged',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'protected': False,
'size': 512,
'status': 'active',
'tags': set(['ping', 'pong']),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.tagged_image)
self.assertEqual(expected, serialized)
def test_image_with_properties_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'hypervisor': 'lxc',
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'members': [],
'min_disk': None,
'min_ram': None,
'mysql_version': '5.6',
'name': 'complex',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.complex_image)
self.assertEqual(expected, serialized)
def test_image_with_members_serialize(self):
expected = {
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'container_format': None,
'disk_format': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'members': ['6838eb7b-6ded-434a-882c-b344c77fe8df',
'2c014f32-55eb-467d-8fcb-4bd706012f81'],
'min_disk': None,
'min_ram': None,
'name': 'complex',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'protected': False,
'size': 256,
'status': 'active',
'tags': set([]),
'virtual_size': None,
'visibility': 'public',
'created_at': DATE1,
'updated_at': DATE1
}
serialized = self.plugin.serialize(self.members_image)
self.assertEqual(expected, serialized)
def test_setup_data(self):
with mock.patch.object(self.plugin, 'get_objects',
return_value=self.images) as mock_get:
with mock.patch.object(self.plugin, 'save_documents') as mock_save:
self.plugin.setup_data()
mock_get.assert_called_once_with()
mock_save.assert_called_once_with([
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
'size': 256,
'name': 'simple',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set(['pong', 'ping']),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc',
'size': 512,
'name': 'tagged',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'members': [],
'min_disk': None,
'virtual_size': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'size': 256,
'name': 'complex',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'mysql_version': '5.6',
'disk_format': None,
'protected': False,
'hypervisor': 'lxc',
'created_at': DATE1,
'updated_at': DATE1
},
{
'status': 'active',
'tags': set([]),
'container_format': None,
'min_ram': None,
'visibility': 'public',
'owner': '2c014f32-55eb-467d-8fcb-4bd706012f81',
'members': ['6838eb7b-6ded-434a-882c-b344c77fe8df',
'2c014f32-55eb-467d-8fcb-4bd706012f81'],
'min_disk': None,
'virtual_size': None,
'id': '971ec09a-8067-4bc8-a91f-ae3557f1c4c7',
'size': 256,
'name': 'complex',
'checksum': '93264c3edf5972c9f1cb309543d38a5c',
'disk_format': None,
'protected': False,
'created_at': DATE1,
'updated_at': DATE1
}
])
class TestMetadefLoaderPlugin(test_utils.BaseTestCase):
def setUp(self):
super(TestMetadefLoaderPlugin, self).setUp()
self.db = unit_test_utils.FakeDB(initialize=False)
self._create_resource_types()
self._create_namespaces()
self._create_namespace_resource_types()
self._create_properties()
self._create_tags()
self._create_objects()
self.plugin = metadefs_plugin.MetadefIndex()
def _create_namespaces(self):
self.namespaces = [
_db_namespace_fixture(namespace=NAMESPACE1,
display_name='1',
description='desc1',
visibility='private',
protected=True,
owner=TENANT1),
_db_namespace_fixture(namespace=NAMESPACE2,
display_name='2',
description='desc2',
visibility='public',
protected=False,
owner=TENANT1),
]
def _create_properties(self):
self.properties = [
_db_property_fixture(name=PROPERTY1),
_db_property_fixture(name=PROPERTY2),
_db_property_fixture(name=PROPERTY3)
]
self.namespaces[0].properties = [self.properties[0]]
self.namespaces[1].properties = self.properties[1:]
def _create_objects(self):
self.objects = [
_db_object_fixture(name=OBJECT1,
description='desc1',
json_schema={'property1': {
'type': 'string',
'default': 'value1',
'enum': ['value1', 'value2']
}}),
_db_object_fixture(name=OBJECT2,
description='desc2'),
_db_object_fixture(name=OBJECT3,
description='desc3'),
]
self.namespaces[0].objects = [self.objects[0]]
self.namespaces[1].objects = self.objects[1:]
def _create_resource_types(self):
self.resource_types = [
_db_resource_type_fixture(name=RESOURCE_TYPE1,
protected=False),
_db_resource_type_fixture(name=RESOURCE_TYPE2,
protected=False),
_db_resource_type_fixture(name=RESOURCE_TYPE3,
protected=True),
]
def _create_namespace_resource_types(self):
self.namespace_resource_types = [
_db_namespace_resource_type_fixture(
prefix='p1',
name=self.resource_types[0].name),
_db_namespace_resource_type_fixture(
prefix='p2',
name=self.resource_types[1].name),
_db_namespace_resource_type_fixture(
prefix='p2',
name=self.resource_types[2].name),
]
self.namespaces[0].resource_types = self.namespace_resource_types[:1]
self.namespaces[1].resource_types = self.namespace_resource_types[1:]
def _create_tags(self):
self.tags = [
_db_resource_type_fixture(name=TAG1),
_db_resource_type_fixture(name=TAG2),
_db_resource_type_fixture(name=TAG3),
]
self.namespaces[0].tags = self.tags[:1]
self.namespaces[1].tags = self.tags[1:]
def test_index_name(self):
self.assertEqual('glance', self.plugin.get_index_name())
def test_document_type(self):
self.assertEqual('metadef', self.plugin.get_document_type())
def test_namespace_serialize(self):
metadef_namespace = self.namespaces[0]
expected = {
'namespace': 'namespace1',
'display_name': '1',
'description': 'desc1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df'
}
serialized = self.plugin.serialize_namespace(metadef_namespace)
self.assertEqual(expected, serialized)
def test_object_serialize(self):
metadef_object = self.objects[0]
expected = {
'name': 'Object1',
'description': 'desc1',
'properties': [{
'default': 'value1',
'enum': ['value1', 'value2'],
'property': 'property1',
'type': 'string'
}]
}
serialized = self.plugin.serialize_object(metadef_object)
self.assertEqual(expected, serialized)
def test_property_serialize(self):
metadef_property = self.properties[0]
expected = {
'property': 'Property1',
'type': 'string',
'title': 'title',
}
serialized = self.plugin.serialize_property(
metadef_property.name, metadef_property.json_schema)
self.assertEqual(expected, serialized)
def test_complex_serialize(self):
metadef_namespace = self.namespaces[0]
expected = {
'namespace': 'namespace1',
'display_name': '1',
'description': 'desc1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'objects': [{
'description': 'desc1',
'name': 'Object1',
'properties': [{
'default': 'value1',
'enum': ['value1', 'value2'],
'property': 'property1',
'type': 'string'
}]
}],
'resource_types': [{
'prefix': 'p1',
'name': 'ResourceType1',
'properties_target': None
}],
'properties': [{
'property': 'Property1',
'title': 'title',
'type': 'string'
}],
'tags': [{'name': 'Tag1'}],
}
serialized = self.plugin.serialize(metadef_namespace)
self.assertEqual(expected, serialized)
def test_setup_data(self):
with mock.patch.object(self.plugin, 'get_objects',
return_value=self.namespaces) as mock_get:
with mock.patch.object(self.plugin, 'save_documents') as mock_save:
self.plugin.setup_data()
mock_get.assert_called_once_with()
mock_save.assert_called_once_with([
{
'display_name': '1',
'description': 'desc1',
'objects': [
{
'name': 'Object1',
'description': 'desc1',
'properties': [{
'default': 'value1',
'property': 'property1',
'enum': ['value1', 'value2'],
'type': 'string'
}],
}
],
'namespace': 'namespace1',
'visibility': 'private',
'protected': True,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'properties': [{
'property': 'Property1',
'type': 'string',
'title': 'title'
}],
'resource_types': [{
'prefix': 'p1',
'name': 'ResourceType1',
'properties_target': None
}],
'tags': [{'name': 'Tag1'}],
},
{
'display_name': '2',
'description': 'desc2',
'objects': [
{
'properties': [],
'name': 'Object2',
'description': 'desc2'
},
{
'properties': [],
'name': 'Object3',
'description': 'desc3'
}
],
'namespace': 'namespace2',
'visibility': 'public',
'protected': False,
'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
'properties': [
{
'property': 'Property2',
'type': 'string',
'title': 'title'
},
{
'property': 'Property3',
'type': 'string',
'title': 'title'
}
],
'resource_types': [
{
'name': 'ResourceType2',
'prefix': 'p2',
'properties_target': None,
},
{
'name': 'ResourceType3',
'prefix': 'p2',
'properties_target': None,
}
],
'tags': [
{'name': 'Tag2'},
{'name': 'Tag3'},
],
}
])

View File

@ -1,989 +0,0 @@
# Copyright 2015 Hewlett-Packard Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
import webob.exc
from glance.common import exception
from glance.common import utils
import glance.gateway
import glance.search
from glance.search.api.v0_1 import search as search
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
def _action_fixture(op_type, data, index=None, doc_type=None, _id=None,
**kwargs):
action = {
'action': op_type,
'id': _id,
'index': index,
'type': doc_type,
'data': data,
}
if kwargs:
action.update(kwargs)
return action
def _image_fixture(op_type, _id=None, index='glance', doc_type='image',
data=None, **kwargs):
image_data = {
'name': 'image-1',
'disk_format': 'raw',
}
if data is not None:
image_data.update(data)
return _action_fixture(op_type, image_data, index, doc_type, _id, **kwargs)
class TestSearchController(base.IsolatedUnitTest):
def setUp(self):
super(TestSearchController, self).setUp()
self.search_controller = search.SearchController()
def test_search_all(self):
request = unit_test_utils.get_fake_request()
self.search_controller.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = None
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
self.search_controller.search.assert_called_once_with(
request, query, index, doc_type, fields, offset, limit)
def test_search_all_repo(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
repo.search.assert_called_once_with(
index, doc_type, query, fields, offset, limit, True)
def test_search_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Forbidden)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_search_not_found(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.NotFound)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_duplicate(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Duplicate)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_internal_server_error(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.search = mock.Mock(side_effect=Exception)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_index_complete(self):
request = unit_test_utils.get_fake_request()
self.search_controller.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
self.search_controller.index.assert_called_once_with(
request, actions, default_index, default_type)
def test_index_repo_complete(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
repo.index.assert_called_once_with(
default_index, default_type, actions)
def test_index_repo_minimal(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.search_controller.index(request, actions)
repo.index.assert_called_once_with(None, None, actions)
def test_index_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Forbidden)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.index,
request, actions)
def test_index_not_found(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.NotFound)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.index,
request, actions)
def test_index_duplicate(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Duplicate)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.index,
request, actions)
def test_index_exception(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.index = mock.Mock(side_effect=Exception)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.index,
request, actions)
def test_plugins_info(self):
request = unit_test_utils.get_fake_request()
self.search_controller.plugins_info = mock.Mock(return_value="{}")
self.search_controller.plugins_info(request)
self.search_controller.plugins_info.assert_called_once_with(request)
def test_plugins_info_repo(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.plugins_info = mock.Mock(return_value="{}")
self.search_controller.plugins_info(request)
repo.plugins_info.assert_called_once_with()
def test_plugins_info_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=exception.Forbidden)
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.plugins_info,
request)
def test_plugins_info_not_found(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=exception.NotFound)
self.assertRaises(webob.exc.HTTPNotFound,
self.search_controller.plugins_info, request)
def test_plugins_info_internal_server_error(self):
request = unit_test_utils.get_fake_request()
repo = glance.search.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=Exception)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.search_controller.plugins_info, request)
class TestSearchDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestSearchDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(
utils.get_search_plugins()
)
def test_single_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'glance',
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
def test_single_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'image',
})
output = self.deserializer.search(request)
self.assertEqual(['image'], output['doc_type'])
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_empty_request_admin(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
request.context.is_admin = True
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_forbidden_schema(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'schema': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_forbidden_self(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'self': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_fields_restriction(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'fields': ['description'],
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(['description'], output['fields'])
def test_highlight_fields(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'highlight': {'fields': {'name': {}}}
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual({'name': {}}, output['query']['highlight']['fields'])
def test_invalid_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_invalid_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_limit_and_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 1,
'offset': 2,
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(1, output['limit'])
self.assertEqual(2, output['offset'])
class TestIndexDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestIndexDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(
utils.get_search_plugins()
)
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [],
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('invalid', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_operation_type(self):
action = _image_fixture('', '1')
action.pop('action')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'index',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_single(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_multiple(self):
actions = [
_image_fixture('create', '1'),
_image_fixture('create', '2', data={'name': 'image-2'}),
]
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': actions,
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-2'},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_missing_data(self):
action = _image_fixture('create', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_with_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'actions': [_image_fixture('create', '1', index=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': 'glance',
'default_type': None
}
self.assertEqual(expected, output)
def test_create_with_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'image',
'actions': [_image_fixture('create', '1', doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': None,
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_with_default_index_and_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [_image_fixture('create', '1', index=None,
doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': 'glance',
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_missing_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': None,
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_create_empty_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', index='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', doc_type='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', index=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', doc_type=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_id(self):
action = _image_fixture('update')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_data(self):
action = _image_fixture('update', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_using_data(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('update', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'doc': {'disk_format': 'raw', 'name': 'image-1'}
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_update_using_script(self):
action = _image_fixture('update', '1', script='<sample script>')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_update_using_script_and_data(self):
action = _image_fixture('update', '1', script='<sample script>')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {'disk_format': 'raw', 'name': 'image-1'},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_delete_missing_id(self):
action = _image_fixture('delete')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_delete_single(self):
action = _image_fixture('delete', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_delete_multiple(self):
action_1 = _image_fixture('delete', '1')
action_1.pop('data')
action_2 = _image_fixture('delete', '2')
action_2.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action_1, action_2],
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
class TestResponseSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestResponseSerializer, self).setUp()
self.serializer = search.ResponseSerializer()
def test_plugins_info(self):
expected = {
"plugins": [
{
"index": "glance",
"type": "image"
},
{
"index": "glance",
"type": "metadef"
}
]
}
request = webob.Request.blank('/v0.1/search')
response = webob.Response(request=request)
result = {
"plugins": [
{
"index": "glance",
"type": "image"
},
{
"index": "glance",
"type": "metadef"
}
]
}
self.serializer.search(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
def test_search(self):
expected = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
request = webob.Request.blank('/v0.1/search')
response = webob.Response(request=request)
result = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
self.serializer.search(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
def test_index(self):
expected = {
'success': '1',
'failed': '0',
'errors': [],
}
request = webob.Request.blank('/v0.1/index')
response = webob.Response(request=request)
result = {
'success': '1',
'failed': '0',
'errors': [],
}
self.serializer.index(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)

View File

@ -26,8 +26,6 @@ console_scripts =
glance-cache-manage = glance.cmd.cache_manage:main
glance-cache-cleaner = glance.cmd.cache_cleaner:main
glance-control = glance.cmd.control:main
glance-search = glance.cmd.search:main
glance-index = glance.cmd.index:main
glance-manage = glance.cmd.manage:main
glance-registry = glance.cmd.registry:main
glance-replicator = glance.cmd.replicator:main
@ -45,9 +43,6 @@ glance.database.migration_backend =
sqlalchemy = oslo_db.sqlalchemy.migration
glance.database.metadata_backend =
sqlalchemy = glance.db.sqlalchemy.metadata
glance.search.index_backend =
image = glance.search.plugins.images:ImageIndex
metadef = glance.search.plugins.metadefs:MetadefIndex
glance.artifacts.types =
MyArtifact = glance.contrib.plugins.artifacts_sample:MY_ARTIFACT

View File

@ -28,7 +28,4 @@ qpid-python;python_version=='2.7'
xattr>=0.4
# Documentation
oslosphinx>=2.5.0 # Apache-2.0
# Glance catalog index
elasticsearch>=1.3.0
oslosphinx>=2.5.0 # Apache-2.0

View File

@ -33,7 +33,6 @@ commands =
glance.tests.unit.test_db_metadef \
glance.tests.unit.test_domain \
glance.tests.unit.test_domain_proxy \
glance.tests.unit.test_gateway \
glance.tests.unit.test_image_cache_client \
glance.tests.unit.test_jsonpatchmixin \
glance.tests.unit.test_manage \
@ -43,7 +42,6 @@ commands =
glance.tests.unit.test_policy \
glance.tests.unit.test_schema \
glance.tests.unit.test_scrubber \
glance.tests.unit.test_search \
glance.tests.unit.test_store_artifact \
glance.tests.unit.test_store_location \
glance.tests.unit.test_versions