Adding utils, tests for them and fixing some pep8.
Change-Id: If0e9deafb8923d76beee8f2da07e913beeb4a962
This commit is contained in:
@@ -56,6 +56,7 @@ CONF.import_opt('use_forwarded_for', 'ec2api.api.auth')
|
|||||||
|
|
||||||
# Fault Wrapper around all EC2 requests #
|
# Fault Wrapper around all EC2 requests #
|
||||||
class FaultWrapper(wsgi.Middleware):
|
class FaultWrapper(wsgi.Middleware):
|
||||||
|
|
||||||
"""Calls the middleware stack, captures any exceptions into faults."""
|
"""Calls the middleware stack, captures any exceptions into faults."""
|
||||||
|
|
||||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
@@ -68,6 +69,7 @@ class FaultWrapper(wsgi.Middleware):
|
|||||||
|
|
||||||
|
|
||||||
class RequestLogging(wsgi.Middleware):
|
class RequestLogging(wsgi.Middleware):
|
||||||
|
|
||||||
"""Access-Log akin logging for all EC2 API requests."""
|
"""Access-Log akin logging for all EC2 API requests."""
|
||||||
|
|
||||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
@@ -103,6 +105,7 @@ class RequestLogging(wsgi.Middleware):
|
|||||||
|
|
||||||
|
|
||||||
class EC2KeystoneAuth(wsgi.Middleware):
|
class EC2KeystoneAuth(wsgi.Middleware):
|
||||||
|
|
||||||
"""Authenticate an EC2 request with keystone and convert to context."""
|
"""Authenticate an EC2 request with keystone and convert to context."""
|
||||||
|
|
||||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
@@ -179,7 +182,8 @@ class EC2KeystoneAuth(wsgi.Middleware):
|
|||||||
|
|
||||||
headers["X-Auth-Token"] = token_id
|
headers["X-Auth-Token"] = token_id
|
||||||
o = urlparse.urlparse(CONF.keystone_url
|
o = urlparse.urlparse(CONF.keystone_url
|
||||||
+ ("/users/%s/credentials/OS-EC2/%s" % (user_id, access)))
|
+ ("/users/%s/credentials/OS-EC2/%s"
|
||||||
|
% (user_id, access)))
|
||||||
if o.scheme == "http":
|
if o.scheme == "http":
|
||||||
conn = httplib.HTTPConnection(o.netloc)
|
conn = httplib.HTTPConnection(o.netloc)
|
||||||
else:
|
else:
|
||||||
@@ -226,8 +230,9 @@ class Requestify(wsgi.Middleware):
|
|||||||
'SignatureVersion', 'Version', 'Timestamp']
|
'SignatureVersion', 'Version', 'Timestamp']
|
||||||
args = dict(req.params)
|
args = dict(req.params)
|
||||||
try:
|
try:
|
||||||
expired = ec2utils.is_ec2_timestamp_expired(req.params,
|
expired = ec2utils.is_ec2_timestamp_expired(
|
||||||
expires=CONF.ec2_timestamp_expiry)
|
req.params,
|
||||||
|
expires=CONF.ec2_timestamp_expiry)
|
||||||
if expired:
|
if expired:
|
||||||
msg = _("Timestamp failed validation.")
|
msg = _("Timestamp failed validation.")
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
|
@@ -47,6 +47,7 @@ def _database_to_isoformat(datetimeobj):
|
|||||||
|
|
||||||
|
|
||||||
class APIRequest(object):
|
class APIRequest(object):
|
||||||
|
|
||||||
def __init__(self, action, version, args):
|
def __init__(self, action, version, args):
|
||||||
self.action = action
|
self.action = action
|
||||||
self.version = version
|
self.version = version
|
||||||
@@ -87,7 +88,8 @@ class APIRequest(object):
|
|||||||
|
|
||||||
response_el = xml.createElement(self.action + 'Response')
|
response_el = xml.createElement(self.action + 'Response')
|
||||||
response_el.setAttribute('xmlns',
|
response_el.setAttribute('xmlns',
|
||||||
'http://ec2.amazonaws.com/doc/%s/' % self.version)
|
'http://ec2.amazonaws.com/doc/%s/'
|
||||||
|
% self.version)
|
||||||
request_id_el = xml.createElement('requestId')
|
request_id_el = xml.createElement('requestId')
|
||||||
request_id_el.appendChild(xml.createTextNode(request_id))
|
request_id_el.appendChild(xml.createTextNode(request_id))
|
||||||
response_el.appendChild(request_id_el)
|
response_el.appendChild(request_id_el)
|
||||||
@@ -136,7 +138,7 @@ class APIRequest(object):
|
|||||||
data_el.appendChild(xml.createTextNode(str(data).lower()))
|
data_el.appendChild(xml.createTextNode(str(data).lower()))
|
||||||
elif isinstance(data, datetime.datetime):
|
elif isinstance(data, datetime.datetime):
|
||||||
data_el.appendChild(
|
data_el.appendChild(
|
||||||
xml.createTextNode(_database_to_isoformat(data)))
|
xml.createTextNode(_database_to_isoformat(data)))
|
||||||
elif data is not None:
|
elif data is not None:
|
||||||
data_el.appendChild(xml.createTextNode(str(data)))
|
data_el.appendChild(xml.createTextNode(str(data)))
|
||||||
|
|
||||||
|
@@ -28,14 +28,16 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class CloudController(object):
|
class CloudController(object):
|
||||||
|
|
||||||
"""Cloud Controller
|
"""Cloud Controller
|
||||||
|
|
||||||
Provides the critical dispatch between
|
Provides the critical dispatch between
|
||||||
inbound API calls through the endpoint and messages
|
inbound API calls through the endpoint and messages
|
||||||
sent to the other nodes.
|
sent to the other nodes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'CloudController'
|
return 'CloudController'
|
||||||
|
@@ -14,10 +14,14 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from ec2api import context
|
||||||
|
from ec2api.db import api as db_api
|
||||||
from ec2api import exception
|
from ec2api import exception
|
||||||
|
from ec2api import novadb
|
||||||
from ec2api.openstack.common.gettextutils import _
|
from ec2api.openstack.common.gettextutils import _
|
||||||
from ec2api.openstack.common import log as logging
|
from ec2api.openstack.common import log as logging
|
||||||
from ec2api.openstack.common import timeutils
|
from ec2api.openstack.common import timeutils
|
||||||
|
from ec2api.openstack.common import uuidutils
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -184,3 +188,106 @@ def ec2_id_to_id(ec2_id):
|
|||||||
def id_to_ec2_id(instance_id, template='i-%08x'):
|
def id_to_ec2_id(instance_id, template='i-%08x'):
|
||||||
"""Convert an instance ID (int) to an ec2 ID (i-[base 16 number])."""
|
"""Convert an instance ID (int) to an ec2 ID (i-[base 16 number])."""
|
||||||
return template % int(instance_id)
|
return template % int(instance_id)
|
||||||
|
|
||||||
|
|
||||||
|
def id_to_ec2_inst_id(instance_id):
|
||||||
|
"""Get or create an ec2 instance ID (i-[base 16 number]) from uuid."""
|
||||||
|
if instance_id is None:
|
||||||
|
return None
|
||||||
|
elif uuidutils.is_uuid_like(instance_id):
|
||||||
|
ctxt = context.get_admin_context()
|
||||||
|
int_id = get_int_id_from_instance_uuid(ctxt, instance_id)
|
||||||
|
return id_to_ec2_id(int_id)
|
||||||
|
else:
|
||||||
|
return id_to_ec2_id(instance_id)
|
||||||
|
|
||||||
|
|
||||||
|
def ec2_inst_id_to_uuid(context, ec2_id):
|
||||||
|
""""Convert an instance id to uuid."""
|
||||||
|
int_id = ec2_id_to_id(ec2_id)
|
||||||
|
return get_instance_uuid_from_int_id(context, int_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_instance_uuid_from_int_id(context, int_id):
|
||||||
|
return novadb.get_instance_uuid_by_ec2_id(context, int_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_int_id_from_instance_uuid(context, instance_uuid):
|
||||||
|
if instance_uuid is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
return novadb.get_ec2_instance_id_by_uuid(context, instance_uuid)
|
||||||
|
except exception.NotFound:
|
||||||
|
return novadb.ec2_instance_create(context, instance_uuid)['id']
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE(ft): extra functions to use in vpc specific code or instead of
|
||||||
|
# malformed existed functions
|
||||||
|
|
||||||
|
|
||||||
|
def get_ec2_id(obj_id, kind):
|
||||||
|
# TODO(ft): move to standard conversion function
|
||||||
|
if not isinstance(obj_id, int) and not isinstance(obj_id, long):
|
||||||
|
raise TypeError('obj_id must be int')
|
||||||
|
elif obj_id < 0 or obj_id > 0xffffffff:
|
||||||
|
raise OverflowError('obj_id must be non negative integer')
|
||||||
|
return '%(kind)s-%(id)08x' % {'kind': kind, 'id': obj_id}
|
||||||
|
|
||||||
|
|
||||||
|
_NOT_FOUND_EXCEPTION_MAP = {
|
||||||
|
'vpc': exception.InvalidVpcIDNotFound,
|
||||||
|
'igw': exception.InvalidInternetGatewayIDNotFound,
|
||||||
|
'subnet': exception.InvalidSubnetIDNotFound,
|
||||||
|
'eni': exception.InvalidNetworkInterfaceIDNotFound,
|
||||||
|
'dopt': exception.InvalidDhcpOptionsIDNotFound,
|
||||||
|
'eipalloc': exception.InvalidAllocationIDNotFound,
|
||||||
|
'sg': exception.InvalidSecurityGroupIDNotFound,
|
||||||
|
'rtb': exception.InvalidRouteTableIDNotFound,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_db_item(context, kind, ec2_id):
|
||||||
|
db_id = ec2_id_to_id(ec2_id)
|
||||||
|
item = db_api.get_item_by_id(context, kind, db_id)
|
||||||
|
if item is None:
|
||||||
|
params = {'%s_id' % kind: ec2_id}
|
||||||
|
raise _NOT_FOUND_EXCEPTION_MAP[kind](**params)
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def get_db_items(context, kind, ec2_ids):
|
||||||
|
if ec2_ids is not None:
|
||||||
|
db_ids = [ec2_id_to_id(id) for id in ec2_ids]
|
||||||
|
items = db_api.get_items_by_ids(context, kind, db_ids)
|
||||||
|
if items is None or items == []:
|
||||||
|
params = {'%s_id' % kind: ec2_ids[0]}
|
||||||
|
raise _NOT_FOUND_EXCEPTION_MAP[kind](**params)
|
||||||
|
else:
|
||||||
|
items = db_api.get_items(context, kind)
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
_cidr_re = re.compile("^([0-9]{1,3}\.){3}[0-9]{1,3}/[0-9]{1,2}$")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_cidr(cidr, parameter_name):
|
||||||
|
invalid_format_exception = exception.InvalidParameterValue(
|
||||||
|
value=cidr,
|
||||||
|
parameter=parameter_name,
|
||||||
|
reason='This is not a valid CIDR block.')
|
||||||
|
if not _cidr_re.match(cidr):
|
||||||
|
raise invalid_format_exception
|
||||||
|
address, size = cidr.split("/")
|
||||||
|
octets = address.split(".")
|
||||||
|
if any(int(octet) > 255 for octet in octets):
|
||||||
|
raise invalid_format_exception
|
||||||
|
size = int(size)
|
||||||
|
if size > 32:
|
||||||
|
raise invalid_format_exception
|
||||||
|
|
||||||
|
|
||||||
|
def validate_vpc_cidr(cidr, invalid_cidr_exception_class):
|
||||||
|
validate_cidr(cidr, 'cidrBlock')
|
||||||
|
size = int(cidr.split("/")[-1])
|
||||||
|
if size > 28 or size < 16:
|
||||||
|
raise invalid_cidr_exception_class(cidr_block=cidr)
|
||||||
|
@@ -67,6 +67,7 @@ def ec2_error_response(request_id, code, message, status=500):
|
|||||||
|
|
||||||
|
|
||||||
class Fault(webob.exc.HTTPException):
|
class Fault(webob.exc.HTTPException):
|
||||||
|
|
||||||
"""Captures exception and return REST Response."""
|
"""Captures exception and return REST Response."""
|
||||||
|
|
||||||
def __init__(self, exception):
|
def __init__(self, exception):
|
||||||
|
19
ec2api/db/__init__.py
Normal file
19
ec2api/db/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
DB abstraction for EC2api
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ec2api.db.api import *
|
110
ec2api/db/api.py
Normal file
110
ec2api/db/api.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Defines interface for DB access.
|
||||||
|
|
||||||
|
Functions in this module are imported into the ec2api.db namespace. Call these
|
||||||
|
functions from ec2api.db namespace, not the ec2api.db.api namespace.
|
||||||
|
|
||||||
|
**Related Flags**
|
||||||
|
|
||||||
|
:dbackend: string to lookup in the list of LazyPluggable backends.
|
||||||
|
`sqlalchemy` is the only supported backend right now.
|
||||||
|
|
||||||
|
:connection: string specifying the sqlalchemy connection to use, like:
|
||||||
|
`sqlite:///var/lib/ec2api/ec2api.sqlite`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from eventlet import tpool
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
from ec2api.openstack.common.db import api as db_api
|
||||||
|
from ec2api.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
|
tpool_opts = [
|
||||||
|
cfg.BoolOpt('use_tpool',
|
||||||
|
default=False,
|
||||||
|
deprecated_name='dbapi_use_tpool',
|
||||||
|
deprecated_group='DEFAULT',
|
||||||
|
help='Enable the experimental use of thread pooling for '
|
||||||
|
'all DB API calls'),
|
||||||
|
]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.register_opts(tpool_opts, 'database')
|
||||||
|
CONF.import_opt('backend', 'ec2api.openstack.common.db.options',
|
||||||
|
group='database')
|
||||||
|
|
||||||
|
_BACKEND_MAPPING = {'sqlalchemy': 'ec2api.db.sqlalchemy.api'}
|
||||||
|
|
||||||
|
|
||||||
|
class EC2DBAPI(object):
|
||||||
|
"""ec2's DB API wrapper class.
|
||||||
|
|
||||||
|
This wraps the oslo DB API with an option to be able to use eventlet's
|
||||||
|
thread pooling. Since the CONF variable may not be loaded at the time
|
||||||
|
this class is instantiated, we must look at it on the first DB API call.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__db_api = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _db_api(self):
|
||||||
|
if not self.__db_api:
|
||||||
|
ec2_db_api = db_api.DBAPI(CONF.database.backend,
|
||||||
|
backend_mapping=_BACKEND_MAPPING)
|
||||||
|
if CONF.database.use_tpool:
|
||||||
|
self.__db_api = tpool.Proxy(ec2_db_api)
|
||||||
|
else:
|
||||||
|
self.__db_api = ec2_db_api
|
||||||
|
return self.__db_api
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
return getattr(self._db_api, key)
|
||||||
|
|
||||||
|
|
||||||
|
IMPL = EC2DBAPI()
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def add_item(context, kind, data):
|
||||||
|
return IMPL.add_item(context, kind, data)
|
||||||
|
|
||||||
|
|
||||||
|
def update_item(context, item):
|
||||||
|
IMPL.update_item(context, item)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_item(context, item_id):
|
||||||
|
IMPL.delete_item(context, item_id)
|
||||||
|
|
||||||
|
|
||||||
|
def restore_item(context, kind, data):
|
||||||
|
return IMPL.restore_item(context, kind, data)
|
||||||
|
|
||||||
|
|
||||||
|
def get_items(context, kind):
|
||||||
|
return IMPL.get_items(context, kind)
|
||||||
|
|
||||||
|
|
||||||
|
def get_item_by_id(context, kind, item_id):
|
||||||
|
return IMPL.get_item_by_id(context, kind, item_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_items_by_ids(context, kind, item_ids):
|
||||||
|
return IMPL.get_items_by_ids(context, kind, item_ids)
|
76
ec2api/db/migration.py
Normal file
76
ec2api/db/migration.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Database setup and migration commands."""
|
||||||
|
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
from ec2api import exception
|
||||||
|
from ec2api.openstack.common.gettextutils import _
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class LazyPluggable(object):
|
||||||
|
"""A pluggable backend loaded lazily based on some value."""
|
||||||
|
|
||||||
|
def __init__(self, pivot, config_group=None, **backends):
|
||||||
|
self.__backends = backends
|
||||||
|
self.__pivot = pivot
|
||||||
|
self.__backend = None
|
||||||
|
self.__config_group = config_group
|
||||||
|
|
||||||
|
def __get_backend(self):
|
||||||
|
if not self.__backend:
|
||||||
|
if self.__config_group is None:
|
||||||
|
backend_name = CONF[self.__pivot]
|
||||||
|
else:
|
||||||
|
backend_name = CONF[self.__config_group][self.__pivot]
|
||||||
|
if backend_name not in self.__backends:
|
||||||
|
msg = _('Invalid backend: %s') % backend_name
|
||||||
|
raise exception.EC2Exception(msg)
|
||||||
|
|
||||||
|
backend = self.__backends[backend_name]
|
||||||
|
if isinstance(backend, tuple):
|
||||||
|
name = backend[0]
|
||||||
|
fromlist = backend[1]
|
||||||
|
else:
|
||||||
|
name = backend
|
||||||
|
fromlist = backend
|
||||||
|
|
||||||
|
self.__backend = __import__(name, None, None, fromlist)
|
||||||
|
return self.__backend
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
backend = self.__get_backend()
|
||||||
|
return getattr(backend, key)
|
||||||
|
|
||||||
|
IMPL = LazyPluggable('backend',
|
||||||
|
config_group='database',
|
||||||
|
sqlalchemy='ec2api.db.sqlalchemy.migration')
|
||||||
|
|
||||||
|
|
||||||
|
def db_sync(version=None):
|
||||||
|
"""Migrate the database to `version` or the most recent version."""
|
||||||
|
return IMPL.db_sync(version=version)
|
||||||
|
|
||||||
|
|
||||||
|
def db_version():
|
||||||
|
"""Display the current database version."""
|
||||||
|
return IMPL.db_version()
|
||||||
|
|
||||||
|
|
||||||
|
def db_initial_version():
|
||||||
|
"""The starting version for the database."""
|
||||||
|
return IMPL.db_initial_version()
|
0
ec2api/db/sqlalchemy/__init__.py
Normal file
0
ec2api/db/sqlalchemy/__init__.py
Normal file
178
ec2api/db/sqlalchemy/api.py
Normal file
178
ec2api/db/sqlalchemy/api.py
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Implementation of SQLAlchemy backend."""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import copy
|
||||||
|
import functools
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
import ec2api.context
|
||||||
|
from ec2api.db.sqlalchemy import models
|
||||||
|
from ec2api.openstack.common.db.sqlalchemy import session as db_session
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.import_opt('connection',
|
||||||
|
'ec2api.openstack.common.db.sqlalchemy.session',
|
||||||
|
group='database')
|
||||||
|
|
||||||
|
|
||||||
|
_MASTER_FACADE = None
|
||||||
|
|
||||||
|
|
||||||
|
def _create_facade_lazily(use_slave=False):
|
||||||
|
global _MASTER_FACADE
|
||||||
|
|
||||||
|
if _MASTER_FACADE is None:
|
||||||
|
_MASTER_FACADE = db_session.EngineFacade(
|
||||||
|
CONF.database.connection,
|
||||||
|
**dict(CONF.database.iteritems())
|
||||||
|
)
|
||||||
|
return _MASTER_FACADE
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine(use_slave=False):
|
||||||
|
facade = _create_facade_lazily(use_slave)
|
||||||
|
return facade.get_engine()
|
||||||
|
|
||||||
|
|
||||||
|
def get_session(use_slave=False, **kwargs):
|
||||||
|
facade = _create_facade_lazily(use_slave)
|
||||||
|
return facade.get_session(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_backend():
|
||||||
|
"""The backend is this module itself."""
|
||||||
|
return sys.modules[__name__]
|
||||||
|
|
||||||
|
|
||||||
|
def require_context(f):
|
||||||
|
"""Decorator to require *any* user or admin context.
|
||||||
|
|
||||||
|
The first argument to the wrapped function must be the context.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@functools.wraps(f)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ec2api.context.require_context(args[0])
|
||||||
|
return f(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def model_query(context, model, *args, **kwargs):
|
||||||
|
"""Query helper that accounts for context's `read_deleted` field.
|
||||||
|
|
||||||
|
:param context: context to query under
|
||||||
|
:param session: if present, the session to use
|
||||||
|
"""
|
||||||
|
session = kwargs.get('session') or get_session()
|
||||||
|
|
||||||
|
return session.query(model, *args)
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def add_item(context, kind, data):
|
||||||
|
item_ref = models.Item()
|
||||||
|
item_ref.update({
|
||||||
|
"project_id": context.project_id,
|
||||||
|
"kind": kind,
|
||||||
|
})
|
||||||
|
item_ref.update(_pack_item_data(data))
|
||||||
|
item_ref.save()
|
||||||
|
return _unpack_item_data(item_ref)
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def update_item(context, item):
|
||||||
|
item_ref = (model_query(context, models.Item).
|
||||||
|
filter_by(project_id=context.project_id,
|
||||||
|
id=item["id"]).
|
||||||
|
one())
|
||||||
|
item_ref.update(_pack_item_data(item))
|
||||||
|
item_ref.save()
|
||||||
|
return _unpack_item_data(item_ref)
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def delete_item(context, item_id):
|
||||||
|
(model_query(context, models.Item).
|
||||||
|
filter_by(project_id=context.project_id,
|
||||||
|
id=item_id).
|
||||||
|
delete())
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def restore_item(context, kind, data):
|
||||||
|
item_ref = models.Item()
|
||||||
|
item_ref.update({
|
||||||
|
"project_id": context.project_id,
|
||||||
|
"kind": kind,
|
||||||
|
})
|
||||||
|
item_ref.id = data['id']
|
||||||
|
item_ref.update(_pack_item_data(data))
|
||||||
|
item_ref.save()
|
||||||
|
return _unpack_item_data(item_ref)
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def get_items(context, kind):
|
||||||
|
return [_unpack_item_data(item)
|
||||||
|
for item in model_query(context, models.Item).
|
||||||
|
filter_by(project_id=context.project_id,
|
||||||
|
kind=kind).
|
||||||
|
all()]
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def get_item_by_id(context, kind, item_id):
|
||||||
|
return _unpack_item_data(model_query(context, models.Item).
|
||||||
|
filter_by(project_id=context.project_id,
|
||||||
|
kind=kind,
|
||||||
|
id=item_id).
|
||||||
|
first())
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def get_items_by_ids(context, kind, item_ids):
|
||||||
|
if item_ids is None or item_ids == []:
|
||||||
|
return get_items(context, kind)
|
||||||
|
return [_unpack_item_data(item)
|
||||||
|
for item in (model_query(context, models.Item).
|
||||||
|
filter_by(project_id=context.project_id,
|
||||||
|
kind=kind).
|
||||||
|
filter(models.Item.id.in_(item_ids))).
|
||||||
|
all()]
|
||||||
|
|
||||||
|
|
||||||
|
def _pack_item_data(item_data):
|
||||||
|
data = copy.deepcopy(item_data)
|
||||||
|
data.pop("id", None)
|
||||||
|
return {
|
||||||
|
"os_id": data.pop("os_id", None),
|
||||||
|
"vpc_id": data.pop("vpc_id", None),
|
||||||
|
"data": str(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _unpack_item_data(item_ref):
|
||||||
|
if item_ref is None:
|
||||||
|
return None
|
||||||
|
data = ast.literal_eval(item_ref.data)
|
||||||
|
data["id"] = item_ref.id
|
||||||
|
data["os_id"] = item_ref.os_id
|
||||||
|
data["vpc_id"] = item_ref.vpc_id
|
||||||
|
return data
|
4
ec2api/db/sqlalchemy/migrate_repo/README
Normal file
4
ec2api/db/sqlalchemy/migrate_repo/README
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
This is a database migration repository.
|
||||||
|
|
||||||
|
More information at
|
||||||
|
http://code.google.com/p/sqlalchemy-migrate/
|
0
ec2api/db/sqlalchemy/migrate_repo/__init__.py
Normal file
0
ec2api/db/sqlalchemy/migrate_repo/__init__.py
Normal file
19
ec2api/db/sqlalchemy/migrate_repo/manage.py
Normal file
19
ec2api/db/sqlalchemy/migrate_repo/manage.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from migrate.versioning.shell import main
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main(debug='False', repository='.')
|
20
ec2api/db/sqlalchemy/migrate_repo/migrate.cfg
Normal file
20
ec2api/db/sqlalchemy/migrate_repo/migrate.cfg
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
[db_settings]
|
||||||
|
# Used to identify which repository this database is versioned under.
|
||||||
|
# You can use the name of your project.
|
||||||
|
repository_id=ec2api
|
||||||
|
|
||||||
|
# The name of the database table used to track the schema version.
|
||||||
|
# This name shouldn't already be used by your project.
|
||||||
|
# If this is changed once a database is under version control, you'll need to
|
||||||
|
# change the table name in each database too.
|
||||||
|
version_table=migrate_version
|
||||||
|
|
||||||
|
# When committing a change script, Migrate will attempt to generate the
|
||||||
|
# sql for all supported databases; normally, if one of them fails - probably
|
||||||
|
# because you don't have that database installed - it is ignored and the
|
||||||
|
# commit continues, perhaps ending successfully.
|
||||||
|
# Databases in this list MUST compile successfully during a commit, or the
|
||||||
|
# entire commit will fail. List the databases your application will actually
|
||||||
|
# be using to ensure your updates to that database work properly.
|
||||||
|
# This must be a list; example: ['postgres','sqlite']
|
||||||
|
required_dbs=[]
|
48
ec2api/db/sqlalchemy/migrate_repo/versions/001_juno.py
Normal file
48
ec2api/db/sqlalchemy/migrate_repo/versions/001_juno.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Integer, MetaData
|
||||||
|
from sqlalchemy import PrimaryKeyConstraint, String, Table, Text
|
||||||
|
from sqlalchemy import UniqueConstraint
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(migrate_engine):
|
||||||
|
meta = MetaData()
|
||||||
|
meta.bind = migrate_engine
|
||||||
|
|
||||||
|
items = Table('items', meta,
|
||||||
|
Column("id", Integer(), autoincrement=True),
|
||||||
|
Column("project_id", String(length=64)),
|
||||||
|
Column("vpc_id", Integer),
|
||||||
|
Column("kind", String(length=20)),
|
||||||
|
Column("os_id", String(length=36)),
|
||||||
|
Column("data", Text()),
|
||||||
|
PrimaryKeyConstraint('id'),
|
||||||
|
UniqueConstraint('id', name='items_os_id_idx'),
|
||||||
|
mysql_engine="InnoDB",
|
||||||
|
mysql_charset="utf8"
|
||||||
|
)
|
||||||
|
items.create()
|
||||||
|
|
||||||
|
if migrate_engine.name == "mysql":
|
||||||
|
# In Folsom we explicitly converted migrate_version to UTF8.
|
||||||
|
sql = "ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8;"
|
||||||
|
# Set default DB charset to UTF8.
|
||||||
|
sql += ("ALTER DATABASE %s DEFAULT CHARACTER SET utf8;" %
|
||||||
|
migrate_engine.url.database)
|
||||||
|
migrate_engine.execute(sql)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(migrate_engine):
|
||||||
|
raise NotImplementedError("Downgrade from Juno is unsupported.")
|
86
ec2api/db/sqlalchemy/migration.py
Normal file
86
ec2api/db/sqlalchemy/migration.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# Copyright 2014 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from migrate import exceptions as versioning_exceptions
|
||||||
|
from migrate.versioning import api as versioning_api
|
||||||
|
from migrate.versioning.repository import Repository
|
||||||
|
import sqlalchemy
|
||||||
|
|
||||||
|
from ec2api.db.sqlalchemy import api as db_session
|
||||||
|
from ec2api import exception
|
||||||
|
from ec2api.openstack.common.gettextutils import _
|
||||||
|
|
||||||
|
INIT_VERSION = 0
|
||||||
|
_REPOSITORY = None
|
||||||
|
|
||||||
|
get_engine = db_session.get_engine
|
||||||
|
|
||||||
|
|
||||||
|
def db_sync(version=None):
|
||||||
|
if version is not None:
|
||||||
|
try:
|
||||||
|
version = int(version)
|
||||||
|
except ValueError:
|
||||||
|
raise exception.EC2Exception(_("version should be an integer"))
|
||||||
|
|
||||||
|
current_version = db_version()
|
||||||
|
repository = _find_migrate_repo()
|
||||||
|
if version is None or version > current_version:
|
||||||
|
return versioning_api.upgrade(get_engine(), repository, version)
|
||||||
|
else:
|
||||||
|
return versioning_api.downgrade(get_engine(), repository,
|
||||||
|
version)
|
||||||
|
|
||||||
|
|
||||||
|
def db_version():
|
||||||
|
repository = _find_migrate_repo()
|
||||||
|
try:
|
||||||
|
return versioning_api.db_version(get_engine(), repository)
|
||||||
|
except versioning_exceptions.DatabaseNotControlledError:
|
||||||
|
meta = sqlalchemy.MetaData()
|
||||||
|
engine = get_engine()
|
||||||
|
meta.reflect(bind=engine)
|
||||||
|
tables = meta.tables
|
||||||
|
if len(tables) == 0:
|
||||||
|
db_version_control(INIT_VERSION)
|
||||||
|
return versioning_api.db_version(get_engine(), repository)
|
||||||
|
else:
|
||||||
|
# Some pre-Essex DB's may not be version controlled.
|
||||||
|
# Require them to upgrade using Essex first.
|
||||||
|
raise exception.EC2Exception(
|
||||||
|
_("Upgrade DB using Essex release first."))
|
||||||
|
|
||||||
|
|
||||||
|
def db_initial_version():
|
||||||
|
return INIT_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
def db_version_control(version=None):
|
||||||
|
repository = _find_migrate_repo()
|
||||||
|
versioning_api.version_control(get_engine(), repository, version)
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
def _find_migrate_repo():
|
||||||
|
"""Get the path for the migrate repository."""
|
||||||
|
global _REPOSITORY
|
||||||
|
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||||
|
'migrate_repo')
|
||||||
|
assert os.path.exists(path)
|
||||||
|
if _REPOSITORY is None:
|
||||||
|
_REPOSITORY = Repository(path)
|
||||||
|
return _REPOSITORY
|
51
ec2api/db/sqlalchemy/models.py
Normal file
51
ec2api/db/sqlalchemy/models.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Copyright 2013 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
SQLAlchemy models for ec2api data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import Column, Integer, PrimaryKeyConstraint, String, Text
|
||||||
|
from sqlalchemy import UniqueConstraint
|
||||||
|
|
||||||
|
from ec2api.openstack.common.db.sqlalchemy import models
|
||||||
|
|
||||||
|
BASE = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
class EC2Base(models.ModelBase):
|
||||||
|
metadata = None
|
||||||
|
|
||||||
|
def save(self, session=None):
|
||||||
|
from ec2api.db.sqlalchemy import api
|
||||||
|
|
||||||
|
if session is None:
|
||||||
|
session = api.get_session()
|
||||||
|
|
||||||
|
super(EC2Base, self).save(session=session)
|
||||||
|
|
||||||
|
|
||||||
|
class Item(BASE, EC2Base):
|
||||||
|
__tablename__ = 'items'
|
||||||
|
__table_args__ = (
|
||||||
|
PrimaryKeyConstraint('id'),
|
||||||
|
UniqueConstraint('id', name='items_os_id_idx'),
|
||||||
|
)
|
||||||
|
id = Column(Integer, autoincrement=True)
|
||||||
|
project_id = Column(String(length=64))
|
||||||
|
vpc_id = Column(Integer)
|
||||||
|
kind = Column(String(length=20))
|
||||||
|
os_id = Column(String(length=36))
|
||||||
|
data = Column(Text())
|
@@ -47,7 +47,6 @@ class EC2ServerError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class EC2Exception(Exception):
|
class EC2Exception(Exception):
|
||||||
|
|
||||||
"""Base EC2 Exception
|
"""Base EC2 Exception
|
||||||
|
|
||||||
To correctly use this class, inherit from it and define
|
To correctly use this class, inherit from it and define
|
||||||
@@ -201,6 +200,11 @@ class InvalidRouteNotFound(EC2NotFound):
|
|||||||
'%(destination_cidr_block)s in route table %(route_table_id)s')
|
'%(destination_cidr_block)s in route table %(route_table_id)s')
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSecurityGroupIDNotFound(EC2NotFound):
|
||||||
|
ec2_code = 'InvalidSecurityGroupID.NotFound'
|
||||||
|
msg_fmt = _("The securityGroup ID '%(sg_id)s' does not exist")
|
||||||
|
|
||||||
|
|
||||||
class InvalidGroupNotFound(EC2NotFound):
|
class InvalidGroupNotFound(EC2NotFound):
|
||||||
ec2_code = 'InvalidGroup.NotFound'
|
ec2_code = 'InvalidGroup.NotFound'
|
||||||
msg_fmg = _("The security group ID '%(sg_id)s' does not exist")
|
msg_fmg = _("The security group ID '%(sg_id)s' does not exist")
|
||||||
@@ -267,6 +271,9 @@ class InvalidNetworkInterfaceInUse(Invalid):
|
|||||||
|
|
||||||
class InvalidInstanceId(Invalid):
|
class InvalidInstanceId(Invalid):
|
||||||
ec2_code = 'InvalidInstanceID'
|
ec2_code = 'InvalidInstanceID'
|
||||||
|
msg_fmt = _("There are multiple interfaces attached to instance "
|
||||||
|
"'%(instance_id)s'. Please specify an interface ID for "
|
||||||
|
"the operation instead.")
|
||||||
|
|
||||||
|
|
||||||
class InvalidIPAddressInUse(Invalid):
|
class InvalidIPAddressInUse(Invalid):
|
||||||
|
19
ec2api/novadb/__init__.py
Normal file
19
ec2api/novadb/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Copyright 2014 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
DB abstraction for Nova
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ec2api.novadb.api import * # noqa
|
103
ec2api/novadb/api.py
Normal file
103
ec2api/novadb/api.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Defines interface for DB access.
|
||||||
|
|
||||||
|
Functions in this module are imported into the ec2api.novadb namespace.
|
||||||
|
Call these functions from c2api.novadb namespace, not the c2api.novadb.api
|
||||||
|
namespace.
|
||||||
|
|
||||||
|
All functions in this module return objects that implement a dictionary-like
|
||||||
|
interface. Currently, many of these objects are sqlalchemy objects that
|
||||||
|
implement a dictionary interface. However, a future goal is to have all of
|
||||||
|
these objects be simple dictionaries.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from eventlet import tpool
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
from ec2api.openstack.common.db import api as db_api
|
||||||
|
from ec2api.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.import_opt('use_tpool', 'ec2api.db.api',
|
||||||
|
group='database')
|
||||||
|
CONF.import_opt('backend', 'ec2api.openstack.common.db.options',
|
||||||
|
group='database')
|
||||||
|
|
||||||
|
_BACKEND_MAPPING = {'sqlalchemy': 'ec2api.novadb.sqlalchemy.api'}
|
||||||
|
|
||||||
|
|
||||||
|
class NovaDBAPI(object):
|
||||||
|
"""Nova's DB API wrapper class.
|
||||||
|
|
||||||
|
This wraps the oslo DB API with an option to be able to use eventlet's
|
||||||
|
thread pooling. Since the CONF variable may not be loaded at the time
|
||||||
|
this class is instantiated, we must look at it on the first DB API call.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__db_api = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _db_api(self):
|
||||||
|
if not self.__db_api:
|
||||||
|
nova_db_api = db_api.DBAPI(CONF.database.backend,
|
||||||
|
backend_mapping=_BACKEND_MAPPING)
|
||||||
|
if CONF.database.use_tpool:
|
||||||
|
self.__db_api = tpool.Proxy(nova_db_api)
|
||||||
|
else:
|
||||||
|
self.__db_api = nova_db_api
|
||||||
|
return self.__db_api
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
return getattr(self._db_api, key)
|
||||||
|
|
||||||
|
|
||||||
|
IMPL = NovaDBAPI()
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# The maximum value a signed INT type may have
|
||||||
|
MAX_INT = 0x7FFFFFFF
|
||||||
|
|
||||||
|
####################
|
||||||
|
|
||||||
|
|
||||||
|
def get_ec2_instance_id_by_uuid(context, instance_id):
|
||||||
|
"""Get ec2 id through uuid from instance_id_mappings table."""
|
||||||
|
return IMPL.get_ec2_instance_id_by_uuid(context, instance_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_instance_uuid_by_ec2_id(context, ec2_id):
|
||||||
|
"""Get uuid through ec2 id from instance_id_mappings table."""
|
||||||
|
return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
|
||||||
|
|
||||||
|
|
||||||
|
def ec2_instance_create(context, instance_uuid, id=None):
|
||||||
|
"""Create the ec2 id to instance uuid mapping on demand."""
|
||||||
|
return IMPL.ec2_instance_create(context, instance_uuid, id)
|
||||||
|
|
||||||
|
|
||||||
|
def ec2_instance_get_by_uuid(context, instance_uuid):
|
||||||
|
return IMPL.ec2_instance_get_by_uuid(context, instance_uuid)
|
||||||
|
|
||||||
|
|
||||||
|
def ec2_instance_get_by_id(context, instance_id):
|
||||||
|
return IMPL.ec2_instance_get_by_id(context, instance_id)
|
23
ec2api/novadb/sqlalchemy/__init__.py
Normal file
23
ec2api/novadb/sqlalchemy/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from sqlalchemy import BigInteger
|
||||||
|
from sqlalchemy.ext.compiler import compiles
|
||||||
|
|
||||||
|
|
||||||
|
@compiles(BigInteger, 'sqlite')
|
||||||
|
def compile_big_int_sqlite(type_, compiler, **kw):
|
||||||
|
return 'INTEGER'
|
222
ec2api/novadb/sqlalchemy/api.py
Normal file
222
ec2api/novadb/sqlalchemy/api.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Implementation of SQLAlchemy backend."""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from oslo.config import cfg
|
||||||
|
from sqlalchemy import or_
|
||||||
|
|
||||||
|
import ec2api.context
|
||||||
|
from ec2api import exception
|
||||||
|
from ec2api.novadb.sqlalchemy import models
|
||||||
|
from ec2api.openstack.common.db.sqlalchemy import session as db_session
|
||||||
|
from ec2api.openstack.common.gettextutils import _
|
||||||
|
from ec2api.openstack.common import log as logging
|
||||||
|
|
||||||
|
connection_opts = [
|
||||||
|
cfg.StrOpt('connection_nova',
|
||||||
|
secret=True,
|
||||||
|
help='The SQLAlchemy connection string used to connect to the '
|
||||||
|
'nova database'),
|
||||||
|
cfg.StrOpt('slave_connection',
|
||||||
|
secret=True,
|
||||||
|
help='The SQLAlchemy connection string used to connect to the '
|
||||||
|
'slave database'),
|
||||||
|
]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.register_opts(connection_opts, group='database')
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_MASTER_FACADE = None
|
||||||
|
_SLAVE_FACADE = None
|
||||||
|
|
||||||
|
|
||||||
|
def _create_facade_lazily(use_slave=False):
|
||||||
|
global _MASTER_FACADE
|
||||||
|
global _SLAVE_FACADE
|
||||||
|
|
||||||
|
return_slave = use_slave and CONF.database.slave_connection
|
||||||
|
if not return_slave:
|
||||||
|
if _MASTER_FACADE is None:
|
||||||
|
_MASTER_FACADE = db_session.EngineFacade(
|
||||||
|
CONF.database.connection_nova,
|
||||||
|
**dict(CONF.database.iteritems())
|
||||||
|
)
|
||||||
|
return _MASTER_FACADE
|
||||||
|
else:
|
||||||
|
if _SLAVE_FACADE is None:
|
||||||
|
_SLAVE_FACADE = db_session.EngineFacade(
|
||||||
|
CONF.database.slave_connection,
|
||||||
|
**dict(CONF.database.iteritems())
|
||||||
|
)
|
||||||
|
return _SLAVE_FACADE
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine(use_slave=False):
|
||||||
|
facade = _create_facade_lazily(use_slave)
|
||||||
|
return facade.get_engine()
|
||||||
|
|
||||||
|
|
||||||
|
def get_session(use_slave=False, **kwargs):
|
||||||
|
facade = _create_facade_lazily(use_slave)
|
||||||
|
return facade.get_session(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_backend():
|
||||||
|
"""The backend is this module itself."""
|
||||||
|
return sys.modules[__name__]
|
||||||
|
|
||||||
|
|
||||||
|
def require_context(f):
|
||||||
|
"""Decorator to require *any* user or admin context.
|
||||||
|
|
||||||
|
This does no authorization for user or project access matching, see
|
||||||
|
:py:func:`ec2api.context.authorize_project_context` and
|
||||||
|
:py:func:`ec2api.context.authorize_user_context`.
|
||||||
|
|
||||||
|
The first argument to the wrapped function must be the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@functools.wraps(f)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ec2api.context.require_context(args[0])
|
||||||
|
return f(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def model_query(context, model, *args, **kwargs):
|
||||||
|
"""Query helper that accounts for context's `read_deleted` field.
|
||||||
|
|
||||||
|
:param context: context to query under
|
||||||
|
:param use_slave: If true, use slave_connection
|
||||||
|
:param session: if present, the session to use
|
||||||
|
:param read_deleted: if present, overrides context's read_deleted field.
|
||||||
|
:param project_only: if present and context is user-type, then restrict
|
||||||
|
query to match the context's project_id. If set to 'allow_none',
|
||||||
|
restriction includes project_id = None.
|
||||||
|
:param base_model: Where model_query is passed a "model" parameter which is
|
||||||
|
not a subclass of NovaBase, we should pass an extra base_model
|
||||||
|
parameter that is a subclass of NovaBase and corresponds to the
|
||||||
|
model parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use_slave = kwargs.get('use_slave') or False
|
||||||
|
if CONF.database.slave_connection == '':
|
||||||
|
use_slave = False
|
||||||
|
|
||||||
|
session = kwargs.get('session') or get_session(use_slave=use_slave)
|
||||||
|
read_deleted = kwargs.get('read_deleted') or context.read_deleted
|
||||||
|
project_only = kwargs.get('project_only', False)
|
||||||
|
|
||||||
|
def issubclassof_nova_base(obj):
|
||||||
|
return isinstance(obj, type) and issubclass(obj, models.NovaBase)
|
||||||
|
|
||||||
|
base_model = model
|
||||||
|
if not issubclassof_nova_base(base_model):
|
||||||
|
base_model = kwargs.get('base_model', None)
|
||||||
|
if not issubclassof_nova_base(base_model):
|
||||||
|
raise Exception(_("model or base_model parameter should be "
|
||||||
|
"subclass of NovaBase"))
|
||||||
|
|
||||||
|
query = session.query(model, *args)
|
||||||
|
|
||||||
|
default_deleted_value = base_model.__mapper__.c.deleted.default.arg
|
||||||
|
if read_deleted == 'no':
|
||||||
|
query = query.filter(base_model.deleted == default_deleted_value)
|
||||||
|
elif read_deleted == 'yes':
|
||||||
|
pass # omit the filter to include deleted and active
|
||||||
|
elif read_deleted == 'only':
|
||||||
|
query = query.filter(base_model.deleted != default_deleted_value)
|
||||||
|
else:
|
||||||
|
raise Exception(_("Unrecognized read_deleted value '%s'")
|
||||||
|
% read_deleted)
|
||||||
|
|
||||||
|
if ec2api.context.is_user_context(context) and project_only:
|
||||||
|
if project_only == 'allow_none':
|
||||||
|
query = (query.
|
||||||
|
filter(or_(base_model.project_id == context.project_id,
|
||||||
|
base_model.project_id == None)))
|
||||||
|
else:
|
||||||
|
query = query.filter_by(project_id=context.project_id)
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def ec2_instance_create(context, instance_uuid, id=None):
|
||||||
|
"""Create ec2 compatible instance by provided uuid."""
|
||||||
|
ec2_instance_ref = models.InstanceIdMapping()
|
||||||
|
ec2_instance_ref.update({'uuid': instance_uuid})
|
||||||
|
if id is not None:
|
||||||
|
ec2_instance_ref.update({'id': id})
|
||||||
|
|
||||||
|
ec2_instance_ref.save()
|
||||||
|
|
||||||
|
return ec2_instance_ref
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def ec2_instance_get_by_uuid(context, instance_uuid):
|
||||||
|
result = (_ec2_instance_get_query(context).
|
||||||
|
filter_by(uuid=instance_uuid).
|
||||||
|
first())
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
raise exception.InstanceNotFound(instance_id=instance_uuid)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def get_ec2_instance_id_by_uuid(context, instance_id):
|
||||||
|
result = ec2_instance_get_by_uuid(context, instance_id)
|
||||||
|
return result['id']
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def ec2_instance_get_by_id(context, instance_id):
|
||||||
|
result = (_ec2_instance_get_query(context).
|
||||||
|
filter_by(id=instance_id).
|
||||||
|
first())
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
raise exception.InstanceNotFound(instance_id=instance_id)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def get_instance_uuid_by_ec2_id(context, ec2_id):
|
||||||
|
result = ec2_instance_get_by_id(context, ec2_id)
|
||||||
|
return result['uuid']
|
||||||
|
|
||||||
|
|
||||||
|
def _ec2_instance_get_query(context, session=None):
|
||||||
|
return model_query(context,
|
||||||
|
models.InstanceIdMapping,
|
||||||
|
session=session,
|
||||||
|
read_deleted='yes')
|
59
ec2api/novadb/sqlalchemy/models.py
Normal file
59
ec2api/novadb/sqlalchemy/models.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
SQLAlchemy models for nova data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo.config import cfg
|
||||||
|
from sqlalchemy import Column, Index, Integer, String
|
||||||
|
from sqlalchemy.dialects.mysql import MEDIUMTEXT
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import Text
|
||||||
|
|
||||||
|
from ec2api.openstack.common.db.sqlalchemy import models
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
BASE = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
def MediumText():
|
||||||
|
return Text().with_variant(MEDIUMTEXT(), 'mysql')
|
||||||
|
|
||||||
|
|
||||||
|
class NovaBase(models.SoftDeleteMixin,
|
||||||
|
models.TimestampMixin,
|
||||||
|
models.ModelBase):
|
||||||
|
metadata = None
|
||||||
|
|
||||||
|
def save(self, session=None):
|
||||||
|
from ec2api.novadb.sqlalchemy import api
|
||||||
|
|
||||||
|
if session is None:
|
||||||
|
session = api.get_session()
|
||||||
|
|
||||||
|
super(NovaBase, self).save(session=session)
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceIdMapping(BASE, NovaBase):
|
||||||
|
"""Compatibility layer for the EC2 instance service."""
|
||||||
|
__tablename__ = 'instance_id_mappings'
|
||||||
|
__table_args__ = (
|
||||||
|
Index('ix_instance_id_mappings_uuid', 'uuid'),
|
||||||
|
)
|
||||||
|
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
|
||||||
|
uuid = Column(String(36), nullable=False)
|
@@ -92,8 +92,8 @@ DICT_FAKE_RESULT_DATA = {
|
|||||||
}
|
}
|
||||||
DICT_FAKE_RESULT = {
|
DICT_FAKE_RESULT = {
|
||||||
'FakeActionResponse': tools.update_dict(
|
'FakeActionResponse': tools.update_dict(
|
||||||
DICT_FAKE_RESULT_DATA,
|
DICT_FAKE_RESULT_DATA,
|
||||||
{'requestId': None})
|
{'requestId': None})
|
||||||
}
|
}
|
||||||
|
|
||||||
XML_SINGLE_RESULT = '''
|
XML_SINGLE_RESULT = '''
|
||||||
@@ -199,7 +199,7 @@ DICT_RESULT_SET = {
|
|||||||
'imageType': 'kernel',
|
'imageType': 'kernel',
|
||||||
'name': 'cirros-0.3.2-x86_64-uec-kernel',
|
'name': 'cirros-0.3.2-x86_64-uec-kernel',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'description': None,
|
'description': None,
|
||||||
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
||||||
'isPublic': True,
|
'isPublic': True,
|
||||||
@@ -212,7 +212,7 @@ DICT_RESULT_SET = {
|
|||||||
'imageType': 'ramdisk',
|
'imageType': 'ramdisk',
|
||||||
'name': 'cirros-0.3.2-x86_64-uec-ramdisk',
|
'name': 'cirros-0.3.2-x86_64-uec-ramdisk',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'name': 'cirros-0.3.2-x86_64-uec',
|
'name': 'cirros-0.3.2-x86_64-uec',
|
||||||
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
||||||
'isPublic': True,
|
'isPublic': True,
|
||||||
@@ -227,7 +227,7 @@ DICT_RESULT_SET = {
|
|||||||
'imageType': 'machine',
|
'imageType': 'machine',
|
||||||
'description': None,
|
'description': None,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'description': None,
|
'description': None,
|
||||||
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
'imageOwnerId': '77dcabaee8ea4a8fbae697ddc09afdaf',
|
||||||
'isPublic': True,
|
'isPublic': True,
|
||||||
|
@@ -22,6 +22,7 @@ from testtools import content
|
|||||||
|
|
||||||
|
|
||||||
class DictKeysMismatch(object):
|
class DictKeysMismatch(object):
|
||||||
|
|
||||||
def __init__(self, d1only, d2only):
|
def __init__(self, d1only, d2only):
|
||||||
self.d1only = d1only
|
self.d1only = d1only
|
||||||
self.d2only = d2only
|
self.d2only = d2only
|
||||||
@@ -35,6 +36,7 @@ class DictKeysMismatch(object):
|
|||||||
|
|
||||||
|
|
||||||
class DictMismatch(object):
|
class DictMismatch(object):
|
||||||
|
|
||||||
def __init__(self, key, d1_value, d2_value):
|
def __init__(self, key, d1_value, d2_value):
|
||||||
self.key = key
|
self.key = key
|
||||||
self.d1_value = d1_value
|
self.d1_value = d1_value
|
||||||
@@ -106,6 +108,7 @@ class DictMatches(object):
|
|||||||
|
|
||||||
|
|
||||||
class ListLengthMismatch(object):
|
class ListLengthMismatch(object):
|
||||||
|
|
||||||
def __init__(self, len1, len2):
|
def __init__(self, len1, len2):
|
||||||
self.len1 = len1
|
self.len1 = len1
|
||||||
self.len2 = len2
|
self.len2 = len2
|
||||||
@@ -147,6 +150,7 @@ class DictListMatches(object):
|
|||||||
|
|
||||||
|
|
||||||
class SubDictMismatch(object):
|
class SubDictMismatch(object):
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
key=None,
|
key=None,
|
||||||
sub_value=None,
|
sub_value=None,
|
||||||
@@ -212,6 +216,7 @@ class FunctionCallMatcher(object):
|
|||||||
|
|
||||||
|
|
||||||
class XMLMismatch(object):
|
class XMLMismatch(object):
|
||||||
|
|
||||||
"""Superclass for XML mismatch."""
|
"""Superclass for XML mismatch."""
|
||||||
|
|
||||||
def __init__(self, state):
|
def __init__(self, state):
|
||||||
@@ -230,6 +235,7 @@ class XMLMismatch(object):
|
|||||||
|
|
||||||
|
|
||||||
class XMLTagMismatch(XMLMismatch):
|
class XMLTagMismatch(XMLMismatch):
|
||||||
|
|
||||||
"""XML tags don't match."""
|
"""XML tags don't match."""
|
||||||
|
|
||||||
def __init__(self, state, idx, expected_tag, actual_tag):
|
def __init__(self, state, idx, expected_tag, actual_tag):
|
||||||
@@ -245,6 +251,7 @@ class XMLTagMismatch(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLAttrKeysMismatch(XMLMismatch):
|
class XMLAttrKeysMismatch(XMLMismatch):
|
||||||
|
|
||||||
"""XML attribute keys don't match."""
|
"""XML attribute keys don't match."""
|
||||||
|
|
||||||
def __init__(self, state, expected_only, actual_only):
|
def __init__(self, state, expected_only, actual_only):
|
||||||
@@ -259,6 +266,7 @@ class XMLAttrKeysMismatch(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLAttrValueMismatch(XMLMismatch):
|
class XMLAttrValueMismatch(XMLMismatch):
|
||||||
|
|
||||||
"""XML attribute values don't match."""
|
"""XML attribute values don't match."""
|
||||||
|
|
||||||
def __init__(self, state, key, expected_value, actual_value):
|
def __init__(self, state, key, expected_value, actual_value):
|
||||||
@@ -274,6 +282,7 @@ class XMLAttrValueMismatch(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLTextValueMismatch(XMLMismatch):
|
class XMLTextValueMismatch(XMLMismatch):
|
||||||
|
|
||||||
"""XML text values don't match."""
|
"""XML text values don't match."""
|
||||||
|
|
||||||
def __init__(self, state, expected_text, actual_text):
|
def __init__(self, state, expected_text, actual_text):
|
||||||
@@ -288,6 +297,7 @@ class XMLTextValueMismatch(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLUnexpectedChild(XMLMismatch):
|
class XMLUnexpectedChild(XMLMismatch):
|
||||||
|
|
||||||
"""Unexpected child present in XML."""
|
"""Unexpected child present in XML."""
|
||||||
|
|
||||||
def __init__(self, state, tag, idx):
|
def __init__(self, state, tag, idx):
|
||||||
@@ -301,6 +311,7 @@ class XMLUnexpectedChild(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLExpectedChild(XMLMismatch):
|
class XMLExpectedChild(XMLMismatch):
|
||||||
|
|
||||||
"""Expected child not present in XML."""
|
"""Expected child not present in XML."""
|
||||||
|
|
||||||
def __init__(self, state, tag, idx):
|
def __init__(self, state, tag, idx):
|
||||||
@@ -314,6 +325,7 @@ class XMLExpectedChild(XMLMismatch):
|
|||||||
|
|
||||||
|
|
||||||
class XMLMatchState(object):
|
class XMLMatchState(object):
|
||||||
|
|
||||||
"""Maintain some state for matching.
|
"""Maintain some state for matching.
|
||||||
|
|
||||||
Tracks the XML node path and saves the expected and actual full
|
Tracks the XML node path and saves the expected and actual full
|
||||||
@@ -354,6 +366,7 @@ class XMLMatchState(object):
|
|||||||
|
|
||||||
|
|
||||||
class XMLMatches(object):
|
class XMLMatches(object):
|
||||||
|
|
||||||
"""Compare XML strings. More complete than string comparison."""
|
"""Compare XML strings. More complete than string comparison."""
|
||||||
|
|
||||||
def __init__(self, expected):
|
def __init__(self, expected):
|
||||||
|
@@ -63,10 +63,10 @@ class ApiInitTestCase(test_base.BaseTestCase):
|
|||||||
self.assertEqual(200, res.status_code)
|
self.assertEqual(200, res.status_code)
|
||||||
self.assertEqual('text/xml', res.content_type)
|
self.assertEqual('text/xml', res.content_type)
|
||||||
expected_xml = fakes.XML_RESULT_TEMPLATE % {
|
expected_xml = fakes.XML_RESULT_TEMPLATE % {
|
||||||
'action': 'FakeAction',
|
'action': 'FakeAction',
|
||||||
'api_version': 'fake_v1',
|
'api_version': 'fake_v1',
|
||||||
'request_id': self.fake_context.request_id,
|
'request_id': self.fake_context.request_id,
|
||||||
'data': '<fakeTag>fake_data</fakeTag>'}
|
'data': '<fakeTag>fake_data</fakeTag>'}
|
||||||
self.assertThat(res.body, matchers.XMLMatches(expected_xml))
|
self.assertThat(res.body, matchers.XMLMatches(expected_xml))
|
||||||
self.controller.fake_action.assert_called_once_with(self.fake_context,
|
self.controller.fake_action.assert_called_once_with(self.fake_context,
|
||||||
param='fake_param')
|
param='fake_param')
|
||||||
@@ -81,12 +81,12 @@ class ApiInitTestCase(test_base.BaseTestCase):
|
|||||||
self.assertEqual(status, res.status_code)
|
self.assertEqual(status, res.status_code)
|
||||||
self.assertEqual('text/xml', res.content_type)
|
self.assertEqual('text/xml', res.content_type)
|
||||||
expected_xml = fakes.XML_ERROR_TEMPLATE % {
|
expected_xml = fakes.XML_ERROR_TEMPLATE % {
|
||||||
'code': code,
|
'code': code,
|
||||||
'message': message,
|
'message': message,
|
||||||
'request_id': self.fake_context.request_id}
|
'request_id': self.fake_context.request_id}
|
||||||
self.assertThat(res.body, matchers.XMLMatches(expected_xml))
|
self.assertThat(res.body, matchers.XMLMatches(expected_xml))
|
||||||
self.controller.fake_action.assert_called_once_with(
|
self.controller.fake_action.assert_called_once_with(
|
||||||
self.fake_context, param='fake_param')
|
self.fake_context, param='fake_param')
|
||||||
|
|
||||||
do_check(exception.EC2Exception('fake_msg'), 500,
|
do_check(exception.EC2Exception('fake_msg'), 500,
|
||||||
'EC2Exception', 'Unknown error occurred.')
|
'EC2Exception', 'Unknown error occurred.')
|
||||||
@@ -97,7 +97,7 @@ class ApiInitTestCase(test_base.BaseTestCase):
|
|||||||
|
|
||||||
def test_execute_proxy(self):
|
def test_execute_proxy(self):
|
||||||
self.controller_class.return_value = mock.create_autospec(
|
self.controller_class.return_value = mock.create_autospec(
|
||||||
cloud.CloudController, instance=True)
|
cloud.CloudController, instance=True)
|
||||||
# NOTE(ft): recreate APIRequest to use mock with autospec
|
# NOTE(ft): recreate APIRequest to use mock with autospec
|
||||||
ec2_request = apirequest.APIRequest('FakeAction', 'fake_v1',
|
ec2_request = apirequest.APIRequest('FakeAction', 'fake_v1',
|
||||||
{'Param': 'fake_param'})
|
{'Param': 'fake_param'})
|
||||||
@@ -119,8 +119,8 @@ class ApiInitTestCase(test_base.BaseTestCase):
|
|||||||
|
|
||||||
def test_execute_proxy_error(self):
|
def test_execute_proxy_error(self):
|
||||||
self.controller.fake_action.side_effect = exception.EC2ServerError(
|
self.controller.fake_action.side_effect = exception.EC2ServerError(
|
||||||
{'status': 400, 'content-type': 'fake_type'},
|
{'status': 400, 'content-type': 'fake_type'},
|
||||||
'fake_content')
|
'fake_content')
|
||||||
|
|
||||||
res = self.request.send(self.application)
|
res = self.request.send(self.application)
|
||||||
|
|
||||||
|
172
ec2api/tests/test_ec2client.py
Normal file
172
ec2api/tests/test_ec2client.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
# Copyright 2014 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import time
|
||||||
|
|
||||||
|
import mock
|
||||||
|
from oslotest import base as test_base
|
||||||
|
|
||||||
|
from ec2api.api import ec2client
|
||||||
|
from ec2api import exception
|
||||||
|
from ec2api.tests import fakes_request_response as fakes
|
||||||
|
from ec2api.tests import matchers
|
||||||
|
|
||||||
|
|
||||||
|
class EC2RequesterTestCase(test_base.BaseTestCase):
|
||||||
|
|
||||||
|
fake_context_class = collections.namedtuple('FakeContext', ['access_key',
|
||||||
|
'secret_key'])
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(EC2RequesterTestCase, self).setUp()
|
||||||
|
httplib2_patcher = mock.patch('ec2api.api.ec2client.httplib2')
|
||||||
|
self.httplib2 = httplib2_patcher.start()
|
||||||
|
self.addCleanup(httplib2_patcher.stop)
|
||||||
|
gmtime_patcher = mock.patch('ec2api.api.ec2client.time.gmtime')
|
||||||
|
self.gmtime = gmtime_patcher.start()
|
||||||
|
self.addCleanup(gmtime_patcher.stop)
|
||||||
|
|
||||||
|
def test_post_request(self):
|
||||||
|
http_obj = self.httplib2.Http.return_value
|
||||||
|
http_obj.request.return_value = ('fake_response', 'fake_context',)
|
||||||
|
self.gmtime.return_value = time.struct_time((2014, 6, 13,
|
||||||
|
7, 43, 54, 4, 164, 0,))
|
||||||
|
|
||||||
|
requester = ec2client.EC2Requester('fake_v1', 'POST')
|
||||||
|
requester._ec2_url = 'http://fake.host.com:1234/fake_Service'
|
||||||
|
context = self.fake_context_class('caeafa52dda845d78a54786aa2ad355b',
|
||||||
|
'f889ec080e094a92badb6f6ba0253393')
|
||||||
|
result = requester.request(context, 'FakeAction',
|
||||||
|
{'Arg1': 'Val1', 'Arg2': 'Val2'})
|
||||||
|
http_obj.request.assert_called_once_with(
|
||||||
|
'http://fake.host.com:1234/fake_Service',
|
||||||
|
'POST',
|
||||||
|
body='AWSAccessKeyId=caeafa52dda845d78a54786aa2ad355b&'
|
||||||
|
'Action=FakeAction&Arg1=Val1&Arg2=Val2&Signature='
|
||||||
|
'uBRxsBHetogWlgv%2FHJnJLK0vBMEChm1LFX%2BH9U1kjHo%3D&'
|
||||||
|
'SignatureMethod=HmacSHA256&SignatureVersion=2&'
|
||||||
|
'Timestamp=2014-06-13T07%3A43%3A54Z&Version=fake_v1',
|
||||||
|
headers={'content-type': 'application/x-www-form-urlencoded',
|
||||||
|
'connection': 'close'})
|
||||||
|
self.assertEqual(('fake_response', 'fake_context',), result)
|
||||||
|
|
||||||
|
def test_get_request(self):
|
||||||
|
http_obj = self.httplib2.Http.return_value
|
||||||
|
http_obj.request.return_value = ('fake_response', 'fake_context',)
|
||||||
|
self.gmtime.return_value = time.struct_time((2014, 6, 14,
|
||||||
|
10, 6, 16, 5, 165, 0,))
|
||||||
|
requester = ec2client.EC2Requester('fake_v1', 'GET')
|
||||||
|
requester._ec2_url = 'http://fake.host.com'
|
||||||
|
context = self.fake_context_class('c1ba55bbcaeb4b41bc9a6d5344392825',
|
||||||
|
'24aaf70906fe4d799f6360d7cd6320ba')
|
||||||
|
result = requester.request(context, 'FakeAction',
|
||||||
|
{'Arg1': 'Val1', 'Arg2': 'Val2'})
|
||||||
|
http_obj.request.assert_called_once_with(
|
||||||
|
'http://fake.host.com?'
|
||||||
|
'AWSAccessKeyId=c1ba55bbcaeb4b41bc9a6d5344392825&'
|
||||||
|
'Action=FakeAction&Arg1=Val1&Arg2=Val2&Signature='
|
||||||
|
'puCc5v7kjOLibLTaT5bDp%2FPcgtbWMGt3kvh54z%2BpedE%3D&'
|
||||||
|
'SignatureMethod=HmacSHA256&SignatureVersion=2&'
|
||||||
|
'Timestamp=2014-06-14T10%3A06%3A16Z&Version=fake_v1',
|
||||||
|
'GET',
|
||||||
|
body=None,
|
||||||
|
headers={'content-type': 'application/x-www-form-urlencoded',
|
||||||
|
'connection': 'close'})
|
||||||
|
self.assertEqual(('fake_response', 'fake_context',), result)
|
||||||
|
|
||||||
|
|
||||||
|
class EC2ClientTestCase(test_base.BaseTestCase):
|
||||||
|
|
||||||
|
fake_response_class = collections.namedtuple('response', ['status'])
|
||||||
|
|
||||||
|
def test_ec2_xml_to_json_on_fake_result(self):
|
||||||
|
json = ec2client.EC2Client._parse_xml(fakes.XML_FAKE_RESULT)
|
||||||
|
self.assertIsInstance(json, dict)
|
||||||
|
self.assertThat(fakes.DICT_FAKE_RESULT, matchers.DictMatches(json))
|
||||||
|
|
||||||
|
def test_ec2_xml_to_json_on_single_result(self):
|
||||||
|
json = ec2client.EC2Client._parse_xml(fakes.XML_SINGLE_RESULT)
|
||||||
|
self.assertIsInstance(json, dict)
|
||||||
|
self.assertThat(fakes.DICT_SINGLE_RESULT, matchers.DictMatches(json))
|
||||||
|
|
||||||
|
def test_ec2_xml_to_json_on_result_set(self):
|
||||||
|
json = ec2client.EC2Client._parse_xml(fakes.XML_RESULT_SET)
|
||||||
|
self.assertIsInstance(json, dict)
|
||||||
|
self.assertThat(fakes.DICT_RESULT_SET, matchers.DictMatches(json))
|
||||||
|
|
||||||
|
def test_ec2_xml_to_json_on_empty_result_set(self):
|
||||||
|
json = ec2client.EC2Client._parse_xml(fakes.XML_EMPTY_RESULT_SET)
|
||||||
|
self.assertIsInstance(json, dict)
|
||||||
|
self.assertThat(fakes.DICT_EMPTY_RESULT_SET,
|
||||||
|
matchers.DictMatches(json))
|
||||||
|
|
||||||
|
def test_ec2_xml_to_json_on_error(self):
|
||||||
|
json = ec2client.EC2Client._parse_xml(fakes.XML_ERROR)
|
||||||
|
self.assertIsInstance(json, dict)
|
||||||
|
self.assertThat(fakes.DICT_ERROR, matchers.DictMatches(json))
|
||||||
|
|
||||||
|
def test_process_response_on_data_result(self):
|
||||||
|
response = self.fake_response_class(200)
|
||||||
|
json = ec2client.EC2Client._process_response(response,
|
||||||
|
fakes.XML_FAKE_RESULT)
|
||||||
|
self.assertThat(json,
|
||||||
|
matchers.DictMatches(fakes.DICT_FAKE_RESULT_DATA))
|
||||||
|
|
||||||
|
def test_process_response_on_ok_result(self):
|
||||||
|
response = self.fake_response_class(200)
|
||||||
|
result = ec2client.EC2Client._process_response(
|
||||||
|
response, fakes.XML_SILENT_OPERATIN_RESULT)
|
||||||
|
self.assertEqual(True, result)
|
||||||
|
|
||||||
|
def test_process_response_on_error(self):
|
||||||
|
response = self.fake_response_class(400)
|
||||||
|
try:
|
||||||
|
ec2client.EC2Client._process_response(response, fakes.XML_ERROR)
|
||||||
|
except exception.EC2ServerError as ex:
|
||||||
|
self.assertEqual(response, ex.response)
|
||||||
|
self.assertEqual(fakes.XML_ERROR, ex.content)
|
||||||
|
except Exception as ex:
|
||||||
|
self.fail('%s was raised instead of '
|
||||||
|
'ec2api.exception.EC2ServerError' % str(ex))
|
||||||
|
else:
|
||||||
|
self.fail('No ec2api.exception.EC2ServerError was raised')
|
||||||
|
|
||||||
|
def test_build_params(self):
|
||||||
|
ec2_params = ec2client.EC2Client._build_params(
|
||||||
|
**fakes.DICT_FAKE_PARAMS)
|
||||||
|
self.assertThat(ec2_params,
|
||||||
|
matchers.DictMatches(fakes.DOTTED_FAKE_PARAMS))
|
||||||
|
|
||||||
|
@mock.patch('ec2api.api.ec2client.EC2Requester')
|
||||||
|
def test_call_action(self, requester_class):
|
||||||
|
requester = requester_class.return_value
|
||||||
|
fake_response = self.fake_response_class(200)
|
||||||
|
requester.request.return_value = (fake_response,
|
||||||
|
fakes.XML_FAKE_RESULT,)
|
||||||
|
|
||||||
|
fake_context_class = collections.namedtuple('FakeContext',
|
||||||
|
['api_version'])
|
||||||
|
fake_context = fake_context_class('fake_v1')
|
||||||
|
|
||||||
|
ec2 = ec2client.ec2client(fake_context)
|
||||||
|
json = ec2.fake_action(fake_int=1234, fake_str='fake')
|
||||||
|
|
||||||
|
self.assertThat(json,
|
||||||
|
matchers.DictMatches(fakes.DICT_FAKE_RESULT_DATA))
|
||||||
|
requester_class.assert_called_once_with('fake_v1', 'POST')
|
||||||
|
requester.request.assert_called_once_with(
|
||||||
|
fake_context, 'FakeAction',
|
||||||
|
{'FakeInt': '1234', 'FakeStr': 'fake'})
|
99
ec2api/tests/test_ec2utils.py
Normal file
99
ec2api/tests/test_ec2utils.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# Copyright 2014 Cloudscaling Group, Inc
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
from ec2api.api import ec2utils
|
||||||
|
from ec2api import exception
|
||||||
|
from ec2api.tests import matchers
|
||||||
|
|
||||||
|
|
||||||
|
class EC2UtilsTestCase(testtools.TestCase):
|
||||||
|
|
||||||
|
def test_get_ec2_id(self):
|
||||||
|
self.assertEqual('vpc-000004d2',
|
||||||
|
ec2utils.get_ec2_id(0x4d2, 'vpc'))
|
||||||
|
self.assertEqual('vpc-000004d2',
|
||||||
|
ec2utils.get_ec2_id(long(0x4d2), 'vpc'))
|
||||||
|
self.assertRaises(OverflowError,
|
||||||
|
ec2utils.get_ec2_id, -1, 'vpc')
|
||||||
|
self.assertRaises(OverflowError,
|
||||||
|
ec2utils.get_ec2_id, 0x123456789, 'vpc')
|
||||||
|
self.assertRaises(TypeError,
|
||||||
|
ec2utils.get_ec2_id, 'fake', 'vpc')
|
||||||
|
self.assertRaises(TypeError,
|
||||||
|
ec2utils.get_ec2_id, 1.1, 'vpc')
|
||||||
|
|
||||||
|
@mock.patch('ec2api.db.api.IMPL')
|
||||||
|
def test_get_db_item(self, db_api):
|
||||||
|
item = {'fake_key': 'fake_value'}
|
||||||
|
db_api.get_item_by_id.return_value = item
|
||||||
|
|
||||||
|
def check_normal_flow(db_id, kind, ec2_id):
|
||||||
|
item['id'] = db_id
|
||||||
|
res = ec2utils.get_db_item('fake_context', kind, ec2_id)
|
||||||
|
self.assertThat(res, matchers.DictMatches(item))
|
||||||
|
db_api.get_item_by_id.assert_called_once_with('fake_context',
|
||||||
|
kind, db_id)
|
||||||
|
db_api.reset_mock()
|
||||||
|
|
||||||
|
check_normal_flow(0x001234af, 'vpc', 'vpc-001234af')
|
||||||
|
check_normal_flow(0x22, 'igw', 'igw-22')
|
||||||
|
|
||||||
|
def check_not_found(kind, ec2_id, item_id, ex_class):
|
||||||
|
self.assertRaises(ex_class,
|
||||||
|
ec2utils.get_db_item,
|
||||||
|
'fake_context', kind, ec2_id)
|
||||||
|
db_api.get_item_by_id.assert_called_once_with('fake_context',
|
||||||
|
kind, item_id)
|
||||||
|
db_api.reset_mock()
|
||||||
|
|
||||||
|
db_api.get_item_by_id.return_value = None
|
||||||
|
check_not_found('vpc', 'vpc-22', 0x22,
|
||||||
|
exception.InvalidVpcIDNotFound)
|
||||||
|
check_not_found('igw', 'igw-22', 0x22,
|
||||||
|
exception.InvalidInternetGatewayIDNotFound)
|
||||||
|
check_not_found('subnet', 'subnet-22', 0x22,
|
||||||
|
exception.InvalidSubnetIDNotFound)
|
||||||
|
|
||||||
|
def test_validate_cidr(self):
|
||||||
|
self.assertIsNone(ec2utils.validate_cidr('10.10.0.0/24', 'cidr'))
|
||||||
|
|
||||||
|
def check_raise_invalid_parameter(cidr):
|
||||||
|
self.assertRaises(exception.InvalidParameterValue,
|
||||||
|
ec2utils.validate_cidr, cidr, 'cidr')
|
||||||
|
|
||||||
|
check_raise_invalid_parameter('fake')
|
||||||
|
check_raise_invalid_parameter('10.10/24')
|
||||||
|
check_raise_invalid_parameter('10.10.0.0.0/24')
|
||||||
|
check_raise_invalid_parameter('10.10.0.0')
|
||||||
|
check_raise_invalid_parameter(' 10.10.0.0/24')
|
||||||
|
check_raise_invalid_parameter('10.10.0.0/24 ')
|
||||||
|
check_raise_invalid_parameter('.10.10.0.0/24 ')
|
||||||
|
check_raise_invalid_parameter('-1.10.0.0/24')
|
||||||
|
check_raise_invalid_parameter('10.256.0.0/24')
|
||||||
|
check_raise_invalid_parameter('10.10.0.0/33')
|
||||||
|
check_raise_invalid_parameter('10.10.0.0/-1')
|
||||||
|
|
||||||
|
def check_raise_invalid_vpc_range(cidr, ex_class):
|
||||||
|
self.assertRaises(ex_class,
|
||||||
|
ec2utils.validate_vpc_cidr, cidr,
|
||||||
|
ex_class)
|
||||||
|
|
||||||
|
check_raise_invalid_vpc_range('10.10.0.0/15',
|
||||||
|
exception.InvalidSubnetRange)
|
||||||
|
check_raise_invalid_vpc_range('10.10.0.0/29',
|
||||||
|
exception.InvalidVpcRange)
|
Reference in New Issue
Block a user