pull-up from trunk; fix problem obscuring context module with context param; fix conflicts and no-longer-skipped tests
This commit is contained in:
@@ -17,12 +17,14 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
|
from xml.dom import minidom
|
||||||
|
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
from nova import exception
|
from nova import exception
|
||||||
from nova import flags
|
from nova import flags
|
||||||
from nova import log as logging
|
from nova import log as logging
|
||||||
|
from nova.api.openstack import wsgi
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger('nova.api.openstack.common')
|
LOG = logging.getLogger('nova.api.openstack.common')
|
||||||
@@ -192,3 +194,83 @@ def get_version_from_href(href):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
version = '1.0'
|
version = '1.0'
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataXMLDeserializer(wsgi.MetadataXMLDeserializer):
|
||||||
|
|
||||||
|
def _extract_metadata_container(self, datastring):
|
||||||
|
dom = minidom.parseString(datastring)
|
||||||
|
metadata_node = self.find_first_child_named(dom, "metadata")
|
||||||
|
metadata = self.extract_metadata(metadata_node)
|
||||||
|
return {'body': {'metadata': metadata}}
|
||||||
|
|
||||||
|
def create(self, datastring):
|
||||||
|
return self._extract_metadata_container(datastring)
|
||||||
|
|
||||||
|
def update_all(self, datastring):
|
||||||
|
return self._extract_metadata_container(datastring)
|
||||||
|
|
||||||
|
def update(self, datastring):
|
||||||
|
dom = minidom.parseString(datastring)
|
||||||
|
metadata_item = self.extract_metadata(dom)
|
||||||
|
return {'body': {'meta': metadata_item}}
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataHeadersSerializer(wsgi.ResponseHeadersSerializer):
|
||||||
|
|
||||||
|
def delete(self, response, data):
|
||||||
|
response.status_int = 204
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataXMLSerializer(wsgi.XMLDictSerializer):
|
||||||
|
def __init__(self, xmlns=wsgi.XMLNS_V11):
|
||||||
|
super(MetadataXMLSerializer, self).__init__(xmlns=xmlns)
|
||||||
|
|
||||||
|
def _meta_item_to_xml(self, doc, key, value):
|
||||||
|
node = doc.createElement('meta')
|
||||||
|
doc.appendChild(node)
|
||||||
|
node.setAttribute('key', '%s' % key)
|
||||||
|
text = doc.createTextNode('%s' % value)
|
||||||
|
node.appendChild(text)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def meta_list_to_xml(self, xml_doc, meta_items):
|
||||||
|
container_node = xml_doc.createElement('metadata')
|
||||||
|
for (key, value) in meta_items:
|
||||||
|
item_node = self._meta_item_to_xml(xml_doc, key, value)
|
||||||
|
container_node.appendChild(item_node)
|
||||||
|
return container_node
|
||||||
|
|
||||||
|
def _meta_list_to_xml_string(self, metadata_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
items = metadata_dict['metadata'].items()
|
||||||
|
container_node = self.meta_list_to_xml(xml_doc, items)
|
||||||
|
xml_doc.appendChild(container_node)
|
||||||
|
self._add_xmlns(container_node)
|
||||||
|
return xml_doc.toprettyxml(indent=' ', encoding='UTF-8')
|
||||||
|
|
||||||
|
def index(self, metadata_dict):
|
||||||
|
return self._meta_list_to_xml_string(metadata_dict)
|
||||||
|
|
||||||
|
def create(self, metadata_dict):
|
||||||
|
return self._meta_list_to_xml_string(metadata_dict)
|
||||||
|
|
||||||
|
def update_all(self, metadata_dict):
|
||||||
|
return self._meta_list_to_xml_string(metadata_dict)
|
||||||
|
|
||||||
|
def _meta_item_to_xml_string(self, meta_item_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
item_key, item_value = meta_item_dict.items()[0]
|
||||||
|
item_node = self._meta_item_to_xml(xml_doc, item_key, item_value)
|
||||||
|
xml_doc.appendChild(item_node)
|
||||||
|
self._add_xmlns(item_node)
|
||||||
|
return xml_doc.toprettyxml(indent=' ', encoding='UTF-8')
|
||||||
|
|
||||||
|
def show(self, meta_item_dict):
|
||||||
|
return self._meta_item_to_xml_string(meta_item_dict['meta'])
|
||||||
|
|
||||||
|
def update(self, meta_item_dict):
|
||||||
|
return self._meta_item_to_xml_string(meta_item_dict['meta'])
|
||||||
|
|
||||||
|
def default(self, *args, **kwargs):
|
||||||
|
return ''
|
||||||
|
|||||||
@@ -188,7 +188,7 @@ class CreateInstanceHelper(object):
|
|||||||
Overrides normal behavior in the case of xml content
|
Overrides normal behavior in the case of xml content
|
||||||
"""
|
"""
|
||||||
if request.content_type == "application/xml":
|
if request.content_type == "application/xml":
|
||||||
deserializer = ServerCreateRequestXMLDeserializer()
|
deserializer = ServerXMLDeserializer()
|
||||||
return deserializer.deserialize(request.body)
|
return deserializer.deserialize(request.body)
|
||||||
else:
|
else:
|
||||||
return self._deserialize(request.body, request.get_content_type())
|
return self._deserialize(request.body, request.get_content_type())
|
||||||
@@ -303,29 +303,29 @@ class ServerXMLDeserializer(wsgi.MetadataXMLDeserializer):
|
|||||||
"""Marshal the server attribute of a parsed request"""
|
"""Marshal the server attribute of a parsed request"""
|
||||||
server = {}
|
server = {}
|
||||||
server_node = self.find_first_child_named(node, 'server')
|
server_node = self.find_first_child_named(node, 'server')
|
||||||
for attr in ["name", "imageId", "flavorId", "imageRef", "flavorRef"]:
|
|
||||||
|
attributes = ["name", "imageId", "flavorId", "imageRef",
|
||||||
|
"flavorRef", "adminPass"]
|
||||||
|
for attr in attributes:
|
||||||
if server_node.getAttribute(attr):
|
if server_node.getAttribute(attr):
|
||||||
server[attr] = server_node.getAttribute(attr)
|
server[attr] = server_node.getAttribute(attr)
|
||||||
|
|
||||||
metadata_node = self.find_first_child_named(server_node, "metadata")
|
metadata_node = self.find_first_child_named(server_node, "metadata")
|
||||||
metadata = self.extract_metadata(metadata_node)
|
server["metadata"] = self.extract_metadata(metadata_node)
|
||||||
if metadata is not None:
|
|
||||||
server["metadata"] = metadata
|
server["personality"] = self._extract_personality(server_node)
|
||||||
personality = self._extract_personality(server_node)
|
|
||||||
if personality is not None:
|
|
||||||
server["personality"] = personality
|
|
||||||
return server
|
return server
|
||||||
|
|
||||||
def _extract_personality(self, server_node):
|
def _extract_personality(self, server_node):
|
||||||
"""Marshal the personality attribute of a parsed request"""
|
"""Marshal the personality attribute of a parsed request"""
|
||||||
personality_node = \
|
node = self.find_first_child_named(server_node, "personality")
|
||||||
self.find_first_child_named(server_node, "personality")
|
|
||||||
if personality_node is None:
|
|
||||||
return None
|
|
||||||
personality = []
|
personality = []
|
||||||
for file_node in self.find_children_named(personality_node, "file"):
|
if node is not None:
|
||||||
item = {}
|
for file_node in self.find_children_named(node, "file"):
|
||||||
if file_node.hasAttribute("path"):
|
item = {}
|
||||||
item["path"] = file_node.getAttribute("path")
|
if file_node.hasAttribute("path"):
|
||||||
item["contents"] = self.extract_text(file_node)
|
item["path"] = file_node.getAttribute("path")
|
||||||
personality.append(item)
|
item["contents"] = self.extract_text(file_node)
|
||||||
|
personality.append(item)
|
||||||
return personality
|
return personality
|
||||||
|
|||||||
@@ -16,12 +16,12 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from webob import exc
|
from webob import exc
|
||||||
from xml.dom import minidom
|
|
||||||
|
|
||||||
from nova import flags
|
from nova import flags
|
||||||
from nova import image
|
from nova import image
|
||||||
from nova import quota
|
from nova import quota
|
||||||
from nova import utils
|
from nova import utils
|
||||||
|
from nova.api.openstack import common
|
||||||
from nova.api.openstack import wsgi
|
from nova.api.openstack import wsgi
|
||||||
|
|
||||||
|
|
||||||
@@ -118,95 +118,15 @@ class Controller(object):
|
|||||||
self.image_service.update(context, image_id, img, None)
|
self.image_service.update(context, image_id, img, None)
|
||||||
|
|
||||||
|
|
||||||
class ImageMetadataXMLDeserializer(wsgi.MetadataXMLDeserializer):
|
|
||||||
|
|
||||||
def _extract_metadata_container(self, datastring):
|
|
||||||
dom = minidom.parseString(datastring)
|
|
||||||
metadata_node = self.find_first_child_named(dom, "metadata")
|
|
||||||
metadata = self.extract_metadata(metadata_node)
|
|
||||||
return {'body': {'metadata': metadata}}
|
|
||||||
|
|
||||||
def create(self, datastring):
|
|
||||||
return self._extract_metadata_container(datastring)
|
|
||||||
|
|
||||||
def update_all(self, datastring):
|
|
||||||
return self._extract_metadata_container(datastring)
|
|
||||||
|
|
||||||
def update(self, datastring):
|
|
||||||
dom = minidom.parseString(datastring)
|
|
||||||
metadata_item = self.extract_metadata(dom)
|
|
||||||
return {'body': {'meta': metadata_item}}
|
|
||||||
|
|
||||||
|
|
||||||
class HeadersSerializer(wsgi.ResponseHeadersSerializer):
|
|
||||||
|
|
||||||
def delete(self, response, data):
|
|
||||||
response.status_int = 204
|
|
||||||
|
|
||||||
|
|
||||||
class ImageMetadataXMLSerializer(wsgi.XMLDictSerializer):
|
|
||||||
def __init__(self, xmlns=wsgi.XMLNS_V11):
|
|
||||||
super(ImageMetadataXMLSerializer, self).__init__(xmlns=xmlns)
|
|
||||||
|
|
||||||
def _meta_item_to_xml(self, doc, key, value):
|
|
||||||
node = doc.createElement('meta')
|
|
||||||
doc.appendChild(node)
|
|
||||||
node.setAttribute('key', '%s' % key)
|
|
||||||
text = doc.createTextNode('%s' % value)
|
|
||||||
node.appendChild(text)
|
|
||||||
return node
|
|
||||||
|
|
||||||
def meta_list_to_xml(self, xml_doc, meta_items):
|
|
||||||
container_node = xml_doc.createElement('metadata')
|
|
||||||
for (key, value) in meta_items:
|
|
||||||
item_node = self._meta_item_to_xml(xml_doc, key, value)
|
|
||||||
container_node.appendChild(item_node)
|
|
||||||
return container_node
|
|
||||||
|
|
||||||
def _meta_list_to_xml_string(self, metadata_dict):
|
|
||||||
xml_doc = minidom.Document()
|
|
||||||
items = metadata_dict['metadata'].items()
|
|
||||||
container_node = self.meta_list_to_xml(xml_doc, items)
|
|
||||||
xml_doc.appendChild(container_node)
|
|
||||||
self._add_xmlns(container_node)
|
|
||||||
return xml_doc.toprettyxml(indent=' ', encoding='UTF-8')
|
|
||||||
|
|
||||||
def index(self, metadata_dict):
|
|
||||||
return self._meta_list_to_xml_string(metadata_dict)
|
|
||||||
|
|
||||||
def create(self, metadata_dict):
|
|
||||||
return self._meta_list_to_xml_string(metadata_dict)
|
|
||||||
|
|
||||||
def update_all(self, metadata_dict):
|
|
||||||
return self._meta_list_to_xml_string(metadata_dict)
|
|
||||||
|
|
||||||
def _meta_item_to_xml_string(self, meta_item_dict):
|
|
||||||
xml_doc = minidom.Document()
|
|
||||||
item_key, item_value = meta_item_dict.items()[0]
|
|
||||||
item_node = self._meta_item_to_xml(xml_doc, item_key, item_value)
|
|
||||||
xml_doc.appendChild(item_node)
|
|
||||||
self._add_xmlns(item_node)
|
|
||||||
return xml_doc.toprettyxml(indent=' ', encoding='UTF-8')
|
|
||||||
|
|
||||||
def show(self, meta_item_dict):
|
|
||||||
return self._meta_item_to_xml_string(meta_item_dict['meta'])
|
|
||||||
|
|
||||||
def update(self, meta_item_dict):
|
|
||||||
return self._meta_item_to_xml_string(meta_item_dict['meta'])
|
|
||||||
|
|
||||||
def default(self, *args, **kwargs):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
def create_resource():
|
def create_resource():
|
||||||
headers_serializer = HeadersSerializer()
|
headers_serializer = common.MetadataHeadersSerializer()
|
||||||
|
|
||||||
body_deserializers = {
|
body_deserializers = {
|
||||||
'application/xml': ImageMetadataXMLDeserializer(),
|
'application/xml': common.MetadataXMLDeserializer(),
|
||||||
}
|
}
|
||||||
|
|
||||||
body_serializers = {
|
body_serializers = {
|
||||||
'application/xml': ImageMetadataXMLSerializer(),
|
'application/xml': common.MetadataXMLSerializer(),
|
||||||
}
|
}
|
||||||
serializer = wsgi.ResponseSerializer(body_serializers, headers_serializer)
|
serializer = wsgi.ResponseSerializer(body_serializers, headers_serializer)
|
||||||
deserializer = wsgi.RequestDeserializer(body_deserializers)
|
deserializer = wsgi.RequestDeserializer(body_deserializers)
|
||||||
|
|||||||
@@ -284,7 +284,7 @@ class ImageXMLSerializer(wsgi.XMLDictSerializer):
|
|||||||
xmlns = wsgi.XMLNS_V11
|
xmlns = wsgi.XMLNS_V11
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.metadata_serializer = image_metadata.ImageMetadataXMLSerializer()
|
self.metadata_serializer = common.MetadataXMLSerializer()
|
||||||
|
|
||||||
def _image_to_xml(self, xml_doc, image):
|
def _image_to_xml(self, xml_doc, image):
|
||||||
image_node = xml_doc.createElement('image')
|
image_node = xml_doc.createElement('image')
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import traceback
|
|||||||
|
|
||||||
from webob import exc
|
from webob import exc
|
||||||
import webob
|
import webob
|
||||||
|
from xml.dom import minidom
|
||||||
|
|
||||||
from nova import compute
|
from nova import compute
|
||||||
from nova import db
|
from nova import db
|
||||||
@@ -27,6 +28,7 @@ from nova import log as logging
|
|||||||
from nova import utils
|
from nova import utils
|
||||||
from nova.api.openstack import common
|
from nova.api.openstack import common
|
||||||
from nova.api.openstack import create_instance_helper as helper
|
from nova.api.openstack import create_instance_helper as helper
|
||||||
|
from nova.api.openstack import ips
|
||||||
import nova.api.openstack.views.addresses
|
import nova.api.openstack.views.addresses
|
||||||
import nova.api.openstack.views.flavors
|
import nova.api.openstack.views.flavors
|
||||||
import nova.api.openstack.views.images
|
import nova.api.openstack.views.images
|
||||||
@@ -480,11 +482,20 @@ class ControllerV11(Controller):
|
|||||||
raise exc.HTTPNotFound()
|
raise exc.HTTPNotFound()
|
||||||
|
|
||||||
def _image_ref_from_req_data(self, data):
|
def _image_ref_from_req_data(self, data):
|
||||||
return data['server']['imageRef']
|
try:
|
||||||
|
return data['server']['imageRef']
|
||||||
|
except (TypeError, KeyError):
|
||||||
|
msg = _("Missing imageRef attribute")
|
||||||
|
raise exc.HTTPBadRequest(explanation=msg)
|
||||||
|
|
||||||
def _flavor_id_from_req_data(self, data):
|
def _flavor_id_from_req_data(self, data):
|
||||||
href = data['server']['flavorRef']
|
try:
|
||||||
return common.get_id_from_href(href)
|
flavor_ref = data['server']['flavorRef']
|
||||||
|
except (TypeError, KeyError):
|
||||||
|
msg = _("Missing flavorRef attribute")
|
||||||
|
raise exc.HTTPBadRequest(explanation=msg)
|
||||||
|
|
||||||
|
return common.get_id_from_href(flavor_ref)
|
||||||
|
|
||||||
def _build_view(self, req, instance, is_detail=False):
|
def _build_view(self, req, instance, is_detail=False):
|
||||||
base_url = req.application_url
|
base_url = req.application_url
|
||||||
@@ -599,6 +610,123 @@ class HeadersSerializer(wsgi.ResponseHeadersSerializer):
|
|||||||
response.status_int = 204
|
response.status_int = 204
|
||||||
|
|
||||||
|
|
||||||
|
class ServerXMLSerializer(wsgi.XMLDictSerializer):
|
||||||
|
|
||||||
|
xmlns = wsgi.XMLNS_V11
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.metadata_serializer = common.MetadataXMLSerializer()
|
||||||
|
self.addresses_serializer = ips.IPXMLSerializer()
|
||||||
|
|
||||||
|
def _create_basic_entity_node(self, xml_doc, id, links, name):
|
||||||
|
basic_node = xml_doc.createElement(name)
|
||||||
|
basic_node.setAttribute('id', str(id))
|
||||||
|
link_nodes = self._create_link_nodes(xml_doc, links)
|
||||||
|
for link_node in link_nodes:
|
||||||
|
basic_node.appendChild(link_node)
|
||||||
|
return basic_node
|
||||||
|
|
||||||
|
def _create_metadata_node(self, xml_doc, metadata):
|
||||||
|
return self.metadata_serializer.meta_list_to_xml(xml_doc, metadata)
|
||||||
|
|
||||||
|
def _create_addresses_node(self, xml_doc, addresses):
|
||||||
|
return self.addresses_serializer.networks_to_xml(xml_doc, addresses)
|
||||||
|
|
||||||
|
def _add_server_attributes(self, node, server):
|
||||||
|
node.setAttribute('id', str(server['id']))
|
||||||
|
node.setAttribute('uuid', str(server['uuid']))
|
||||||
|
node.setAttribute('hostId', str(server['hostId']))
|
||||||
|
node.setAttribute('name', server['name'])
|
||||||
|
node.setAttribute('created', str(server['created']))
|
||||||
|
node.setAttribute('updated', str(server['updated']))
|
||||||
|
node.setAttribute('status', server['status'])
|
||||||
|
if 'progress' in server:
|
||||||
|
node.setAttribute('progress', str(server['progress']))
|
||||||
|
|
||||||
|
def _server_to_xml(self, xml_doc, server):
|
||||||
|
server_node = xml_doc.createElement('server')
|
||||||
|
server_node.setAttribute('id', str(server['id']))
|
||||||
|
server_node.setAttribute('name', server['name'])
|
||||||
|
link_nodes = self._create_link_nodes(xml_doc,
|
||||||
|
server['links'])
|
||||||
|
for link_node in link_nodes:
|
||||||
|
server_node.appendChild(link_node)
|
||||||
|
return server_node
|
||||||
|
|
||||||
|
def _server_to_xml_detailed(self, xml_doc, server):
|
||||||
|
server_node = xml_doc.createElement('server')
|
||||||
|
self._add_server_attributes(server_node, server)
|
||||||
|
|
||||||
|
link_nodes = self._create_link_nodes(xml_doc,
|
||||||
|
server['links'])
|
||||||
|
for link_node in link_nodes:
|
||||||
|
server_node.appendChild(link_node)
|
||||||
|
|
||||||
|
if 'image' in server:
|
||||||
|
image_node = self._create_basic_entity_node(xml_doc,
|
||||||
|
server['image']['id'],
|
||||||
|
server['image']['links'],
|
||||||
|
'image')
|
||||||
|
server_node.appendChild(image_node)
|
||||||
|
|
||||||
|
if 'flavor' in server:
|
||||||
|
flavor_node = self._create_basic_entity_node(xml_doc,
|
||||||
|
server['flavor']['id'],
|
||||||
|
server['flavor']['links'],
|
||||||
|
'flavor')
|
||||||
|
server_node.appendChild(flavor_node)
|
||||||
|
|
||||||
|
metadata = server.get('metadata', {}).items()
|
||||||
|
if len(metadata) > 0:
|
||||||
|
metadata_node = self._create_metadata_node(xml_doc, metadata)
|
||||||
|
server_node.appendChild(metadata_node)
|
||||||
|
|
||||||
|
addresses_node = self._create_addresses_node(xml_doc,
|
||||||
|
server['addresses'])
|
||||||
|
server_node.appendChild(addresses_node)
|
||||||
|
|
||||||
|
return server_node
|
||||||
|
|
||||||
|
def _server_list_to_xml(self, xml_doc, servers, detailed):
|
||||||
|
container_node = xml_doc.createElement('servers')
|
||||||
|
if detailed:
|
||||||
|
server_to_xml = self._server_to_xml_detailed
|
||||||
|
else:
|
||||||
|
server_to_xml = self._server_to_xml
|
||||||
|
|
||||||
|
for server in servers:
|
||||||
|
item_node = server_to_xml(xml_doc, server)
|
||||||
|
container_node.appendChild(item_node)
|
||||||
|
return container_node
|
||||||
|
|
||||||
|
def index(self, servers_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
node = self._server_list_to_xml(xml_doc,
|
||||||
|
servers_dict['servers'],
|
||||||
|
detailed=False)
|
||||||
|
return self.to_xml_string(node, True)
|
||||||
|
|
||||||
|
def detail(self, servers_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
node = self._server_list_to_xml(xml_doc,
|
||||||
|
servers_dict['servers'],
|
||||||
|
detailed=True)
|
||||||
|
return self.to_xml_string(node, True)
|
||||||
|
|
||||||
|
def show(self, server_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
node = self._server_to_xml_detailed(xml_doc,
|
||||||
|
server_dict['server'])
|
||||||
|
return self.to_xml_string(node, True)
|
||||||
|
|
||||||
|
def create(self, server_dict):
|
||||||
|
xml_doc = minidom.Document()
|
||||||
|
node = self._server_to_xml_detailed(xml_doc,
|
||||||
|
server_dict['server'])
|
||||||
|
node.setAttribute('adminPass', server_dict['server']['adminPass'])
|
||||||
|
return self.to_xml_string(node, True)
|
||||||
|
|
||||||
|
|
||||||
def create_resource(version='1.0'):
|
def create_resource(version='1.0'):
|
||||||
controller = {
|
controller = {
|
||||||
'1.0': ControllerV10,
|
'1.0': ControllerV10,
|
||||||
@@ -628,9 +756,13 @@ def create_resource(version='1.0'):
|
|||||||
|
|
||||||
headers_serializer = HeadersSerializer()
|
headers_serializer = HeadersSerializer()
|
||||||
|
|
||||||
|
xml_serializer = {
|
||||||
|
'1.0': wsgi.XMLDictSerializer(metadata, wsgi.XMLNS_V10),
|
||||||
|
'1.1': ServerXMLSerializer(),
|
||||||
|
}[version]
|
||||||
|
|
||||||
body_serializers = {
|
body_serializers = {
|
||||||
'application/xml': wsgi.XMLDictSerializer(metadata=metadata,
|
'application/xml': xml_serializer,
|
||||||
xmlns=xmlns),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
body_deserializers = {
|
body_deserializers = {
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -149,8 +150,10 @@ class ViewBuilderV11(ViewBuilder):
|
|||||||
|
|
||||||
def _build_detail(self, inst):
|
def _build_detail(self, inst):
|
||||||
response = super(ViewBuilderV11, self)._build_detail(inst)
|
response = super(ViewBuilderV11, self)._build_detail(inst)
|
||||||
response['server']['created'] = inst['created_at']
|
response['server']['created'] = \
|
||||||
response['server']['updated'] = inst['updated_at']
|
self._convert_timeformat(inst['created_at'])
|
||||||
|
response['server']['updated'] = \
|
||||||
|
self._convert_timeformat(inst['updated_at'])
|
||||||
if 'status' in response['server']:
|
if 'status' in response['server']:
|
||||||
if response['server']['status'] == "ACTIVE":
|
if response['server']['status'] == "ACTIVE":
|
||||||
response['server']['progress'] = 100
|
response['server']['progress'] = 100
|
||||||
@@ -221,3 +224,11 @@ class ViewBuilderV11(ViewBuilder):
|
|||||||
"""Create an url that refers to a specific flavor id."""
|
"""Create an url that refers to a specific flavor id."""
|
||||||
return os.path.join(common.remove_version_from_href(self.base_url),
|
return os.path.join(common.remove_version_from_href(self.base_url),
|
||||||
"servers", str(server_id))
|
"servers", str(server_id))
|
||||||
|
|
||||||
|
def _convert_timeformat(self, date_time):
|
||||||
|
"""Converts the given time into the common time format
|
||||||
|
|
||||||
|
:param date_time: the datetime object to convert
|
||||||
|
|
||||||
|
"""
|
||||||
|
return date_time.strftime(utils.TIME_FORMAT)
|
||||||
|
|||||||
@@ -165,12 +165,11 @@ class MetadataXMLDeserializer(XMLDeserializer):
|
|||||||
|
|
||||||
def extract_metadata(self, metadata_node):
|
def extract_metadata(self, metadata_node):
|
||||||
"""Marshal the metadata attribute of a parsed request"""
|
"""Marshal the metadata attribute of a parsed request"""
|
||||||
if metadata_node is None:
|
|
||||||
return None
|
|
||||||
metadata = {}
|
metadata = {}
|
||||||
for meta_node in self.find_children_named(metadata_node, "meta"):
|
if metadata_node is not None:
|
||||||
key = meta_node.getAttribute("key")
|
for meta_node in self.find_children_named(metadata_node, "meta"):
|
||||||
metadata[key] = self.extract_text(meta_node)
|
key = meta_node.getAttribute("key")
|
||||||
|
metadata[key] = self.extract_text(meta_node)
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -127,7 +127,7 @@ class DbDriver(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
project = db.project_create(context.get_admin_context(), values)
|
project = db.project_create(context.get_admin_context(), values)
|
||||||
except (exception.Duplicate, exception.DBError):
|
except exception.DBError:
|
||||||
raise exception.ProjectExists(project=name)
|
raise exception.ProjectExists(project=name)
|
||||||
|
|
||||||
for member in members:
|
for member in members:
|
||||||
|
|||||||
@@ -747,7 +747,7 @@ class ComputeManager(manager.SchedulerDependentManager):
|
|||||||
local_gb=instance_type['local_gb'],
|
local_gb=instance_type['local_gb'],
|
||||||
instance_type_id=instance_type['id']))
|
instance_type_id=instance_type['id']))
|
||||||
|
|
||||||
self.driver.revert_resize(instance_ref)
|
self.driver.revert_migration(instance_ref)
|
||||||
self.db.migration_update(context, migration_id,
|
self.db.migration_update(context, migration_id,
|
||||||
{'status': 'reverted'})
|
{'status': 'reverted'})
|
||||||
usage_info = utils.usage_from_instance(instance_ref)
|
usage_info = utils.usage_from_instance(instance_ref)
|
||||||
@@ -845,21 +845,26 @@ class ComputeManager(manager.SchedulerDependentManager):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
migration_ref = self.db.migration_get(context, migration_id)
|
migration_ref = self.db.migration_get(context, migration_id)
|
||||||
|
|
||||||
|
resize_instance = False
|
||||||
instance_ref = self.db.instance_get_by_uuid(context,
|
instance_ref = self.db.instance_get_by_uuid(context,
|
||||||
migration_ref.instance_uuid)
|
migration_ref.instance_uuid)
|
||||||
instance_type = self.db.instance_type_get_by_flavor_id(context,
|
if migration_ref['old_flavor_id'] != migration_ref['new_flavor_id']:
|
||||||
migration_ref['new_flavor_id'])
|
instance_type = self.db.instance_type_get_by_flavor_id(context,
|
||||||
self.db.instance_update(context, instance_ref.uuid,
|
migration_ref['new_flavor_id'])
|
||||||
dict(instance_type_id=instance_type['id'],
|
self.db.instance_update(context, instance_ref.uuid,
|
||||||
memory_mb=instance_type['memory_mb'],
|
dict(instance_type_id=instance_type['id'],
|
||||||
vcpus=instance_type['vcpus'],
|
memory_mb=instance_type['memory_mb'],
|
||||||
local_gb=instance_type['local_gb']))
|
vcpus=instance_type['vcpus'],
|
||||||
|
local_gb=instance_type['local_gb']))
|
||||||
|
resize_instance = True
|
||||||
|
|
||||||
instance_ref = self.db.instance_get_by_uuid(context,
|
instance_ref = self.db.instance_get_by_uuid(context,
|
||||||
instance_ref.uuid)
|
instance_ref.uuid)
|
||||||
|
|
||||||
network_info = self._get_instance_nw_info(context, instance_ref)
|
network_info = self._get_instance_nw_info(context, instance_ref)
|
||||||
self.driver.finish_resize(context, instance_ref, disk_info,
|
self.driver.finish_migration(context, instance_ref, disk_info,
|
||||||
network_info)
|
network_info, resize_instance)
|
||||||
|
|
||||||
self.db.migration_update(context, migration_id,
|
self.db.migration_update(context, migration_id,
|
||||||
{'status': 'finished', })
|
{'status': 'finished', })
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ Test suites for 'common' code used throughout the OpenStack HTTP API.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import webob.exc
|
import webob.exc
|
||||||
|
import xml.dom.minidom as minidom
|
||||||
|
|
||||||
from webob import Request
|
from webob import Request
|
||||||
|
|
||||||
@@ -265,3 +266,203 @@ class MiscFunctionsTest(test.TestCase):
|
|||||||
expected = '1.0'
|
expected = '1.0'
|
||||||
actual = common.get_version_from_href(fixture)
|
actual = common.get_version_from_href(fixture)
|
||||||
self.assertEqual(actual, expected)
|
self.assertEqual(actual, expected)
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataXMLDeserializationTest(test.TestCase):
|
||||||
|
|
||||||
|
deserializer = common.MetadataXMLDeserializer()
|
||||||
|
|
||||||
|
def test_create(self):
|
||||||
|
request_body = """
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key='123'>asdf</meta>
|
||||||
|
<meta key='567'>jkl;</meta>
|
||||||
|
</metadata>"""
|
||||||
|
output = self.deserializer.deserialize(request_body, 'create')
|
||||||
|
expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}}
|
||||||
|
self.assertEquals(output, expected)
|
||||||
|
|
||||||
|
def test_create_empty(self):
|
||||||
|
request_body = """
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
|
||||||
|
output = self.deserializer.deserialize(request_body, 'create')
|
||||||
|
expected = {"body": {"metadata": {}}}
|
||||||
|
self.assertEquals(output, expected)
|
||||||
|
|
||||||
|
def test_update_all(self):
|
||||||
|
request_body = """
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key='123'>asdf</meta>
|
||||||
|
<meta key='567'>jkl;</meta>
|
||||||
|
</metadata>"""
|
||||||
|
output = self.deserializer.deserialize(request_body, 'update_all')
|
||||||
|
expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}}
|
||||||
|
self.assertEquals(output, expected)
|
||||||
|
|
||||||
|
def test_update(self):
|
||||||
|
request_body = """
|
||||||
|
<meta xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
key='123'>asdf</meta>"""
|
||||||
|
output = self.deserializer.deserialize(request_body, 'update')
|
||||||
|
expected = {"body": {"meta": {"123": "asdf"}}}
|
||||||
|
self.assertEquals(output, expected)
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataXMLSerializationTest(test.TestCase):
|
||||||
|
|
||||||
|
def test_index(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'metadata': {
|
||||||
|
'one': 'two',
|
||||||
|
'three': 'four',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'index')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key="three">
|
||||||
|
four
|
||||||
|
</meta>
|
||||||
|
<meta key="one">
|
||||||
|
two
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_index_null(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'metadata': {
|
||||||
|
None: None,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'index')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key="None">
|
||||||
|
None
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_index_unicode(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'metadata': {
|
||||||
|
u'three': u'Jos\xe9',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'index')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString(u"""
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key="three">
|
||||||
|
Jos\xe9
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
""".encode("UTF-8").replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_show(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'meta': {
|
||||||
|
'one': 'two',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'show')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<meta xmlns="http://docs.openstack.org/compute/api/v1.1" key="one">
|
||||||
|
two
|
||||||
|
</meta>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_update_all(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'metadata': {
|
||||||
|
'key6': 'value6',
|
||||||
|
'key4': 'value4',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'update_all')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key="key6">
|
||||||
|
value6
|
||||||
|
</meta>
|
||||||
|
<meta key="key4">
|
||||||
|
value4
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_update_item(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'meta': {
|
||||||
|
'one': 'two',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'update')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<meta xmlns="http://docs.openstack.org/compute/api/v1.1" key="one">
|
||||||
|
two
|
||||||
|
</meta>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_create(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
fixture = {
|
||||||
|
'metadata': {
|
||||||
|
'key9': 'value9',
|
||||||
|
'key2': 'value2',
|
||||||
|
'key1': 'value1',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output = serializer.serialize(fixture, 'create')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
||||||
|
<meta key="key2">
|
||||||
|
value2
|
||||||
|
</meta>
|
||||||
|
<meta key="key9">
|
||||||
|
value9
|
||||||
|
</meta>
|
||||||
|
<meta key="key1">
|
||||||
|
value1
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
""".replace(" ", ""))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
serializer = common.MetadataXMLSerializer()
|
||||||
|
output = serializer.serialize(None, 'delete')
|
||||||
|
self.assertEqual(output, '')
|
||||||
|
|||||||
@@ -109,8 +109,8 @@ class ExtensionControllerTest(unittest.TestCase):
|
|||||||
'updated': '2011-01-22T13:25:27-06:00',
|
'updated': '2011-01-22T13:25:27-06:00',
|
||||||
'description': 'The Fox In Socks Extension',
|
'description': 'The Fox In Socks Extension',
|
||||||
'alias': 'FOXNSOX',
|
'alias': 'FOXNSOX',
|
||||||
'links': []
|
'links': [],
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_extension_json(self):
|
def test_get_extension_json(self):
|
||||||
@@ -127,8 +127,8 @@ class ExtensionControllerTest(unittest.TestCase):
|
|||||||
"updated": "2011-01-22T13:25:27-06:00",
|
"updated": "2011-01-22T13:25:27-06:00",
|
||||||
"description": "The Fox In Socks Extension",
|
"description": "The Fox In Socks Extension",
|
||||||
"alias": "FOXNSOX",
|
"alias": "FOXNSOX",
|
||||||
"links": []
|
"links": [],
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_list_extensions_xml(self):
|
def test_list_extensions_xml(self):
|
||||||
@@ -342,15 +342,15 @@ class ExtensionsXMLSerializerTest(unittest.TestCase):
|
|||||||
{
|
{
|
||||||
'rel': 'describedby',
|
'rel': 'describedby',
|
||||||
'type': 'application/pdf',
|
'type': 'application/pdf',
|
||||||
'href': 'http://docs.rack.com/servers/api/ext/cs.pdf'
|
'href': 'http://docs.rack.com/servers/api/ext/cs.pdf',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'rel': 'describedby',
|
'rel': 'describedby',
|
||||||
'type': 'application/vnd.sun.wadl+xml',
|
'type': 'application/vnd.sun.wadl+xml',
|
||||||
'href': 'http://docs.rack.com/servers/api/ext/cs.wadl'
|
'href': 'http://docs.rack.com/servers/api/ext/cs.wadl',
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
xml = serializer.serialize(data, 'show')
|
xml = serializer.serialize(data, 'show')
|
||||||
@@ -382,14 +382,14 @@ class ExtensionsXMLSerializerTest(unittest.TestCase):
|
|||||||
{
|
{
|
||||||
"rel": "describedby",
|
"rel": "describedby",
|
||||||
"type": "application/pdf",
|
"type": "application/pdf",
|
||||||
"href": "http://foo.com/api/ext/cs-pie.pdf"
|
"href": "http://foo.com/api/ext/cs-pie.pdf",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"rel": "describedby",
|
"rel": "describedby",
|
||||||
"type": "application/vnd.sun.wadl+xml",
|
"type": "application/vnd.sun.wadl+xml",
|
||||||
"href": "http://foo.com/api/ext/cs-pie.wadl"
|
"href": "http://foo.com/api/ext/cs-pie.wadl",
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Cloud Block Storage",
|
"name": "Cloud Block Storage",
|
||||||
@@ -401,16 +401,16 @@ class ExtensionsXMLSerializerTest(unittest.TestCase):
|
|||||||
{
|
{
|
||||||
"rel": "describedby",
|
"rel": "describedby",
|
||||||
"type": "application/pdf",
|
"type": "application/pdf",
|
||||||
"href": "http://foo.com/api/ext/cs-cbs.pdf"
|
"href": "http://foo.com/api/ext/cs-cbs.pdf",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"rel": "describedby",
|
"rel": "describedby",
|
||||||
"type": "application/vnd.sun.wadl+xml",
|
"type": "application/vnd.sun.wadl+xml",
|
||||||
"href": "http://foo.com/api/ext/cs-cbs.wadl"
|
"href": "http://foo.com/api/ext/cs-cbs.wadl",
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
xml = serializer.serialize(data, 'index')
|
xml = serializer.serialize(data, 'index')
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ import json
|
|||||||
import stubout
|
import stubout
|
||||||
import unittest
|
import unittest
|
||||||
import webob
|
import webob
|
||||||
import xml.dom.minidom as minidom
|
|
||||||
|
|
||||||
|
|
||||||
from nova import flags
|
from nova import flags
|
||||||
@@ -252,203 +251,3 @@ class ImageMetaDataTest(test.TestCase):
|
|||||||
req.headers["content-type"] = "application/json"
|
req.headers["content-type"] = "application/json"
|
||||||
res = req.get_response(fakes.wsgi_app())
|
res = req.get_response(fakes.wsgi_app())
|
||||||
self.assertEqual(400, res.status_int)
|
self.assertEqual(400, res.status_int)
|
||||||
|
|
||||||
|
|
||||||
class ImageMetadataXMLDeserializationTest(test.TestCase):
|
|
||||||
|
|
||||||
deserializer = openstack.image_metadata.ImageMetadataXMLDeserializer()
|
|
||||||
|
|
||||||
def test_create(self):
|
|
||||||
request_body = """
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key='123'>asdf</meta>
|
|
||||||
<meta key='567'>jkl;</meta>
|
|
||||||
</metadata>"""
|
|
||||||
output = self.deserializer.deserialize(request_body, 'create')
|
|
||||||
expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}}
|
|
||||||
self.assertEquals(output, expected)
|
|
||||||
|
|
||||||
def test_create_empty(self):
|
|
||||||
request_body = """
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
|
|
||||||
output = self.deserializer.deserialize(request_body, 'create')
|
|
||||||
expected = {"body": {"metadata": {}}}
|
|
||||||
self.assertEquals(output, expected)
|
|
||||||
|
|
||||||
def test_update_all(self):
|
|
||||||
request_body = """
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key='123'>asdf</meta>
|
|
||||||
<meta key='567'>jkl;</meta>
|
|
||||||
</metadata>"""
|
|
||||||
output = self.deserializer.deserialize(request_body, 'update_all')
|
|
||||||
expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}}
|
|
||||||
self.assertEquals(output, expected)
|
|
||||||
|
|
||||||
def test_update(self):
|
|
||||||
request_body = """
|
|
||||||
<meta xmlns="http://docs.openstack.org/compute/api/v1.1"
|
|
||||||
key='123'>asdf</meta>"""
|
|
||||||
output = self.deserializer.deserialize(request_body, 'update')
|
|
||||||
expected = {"body": {"meta": {"123": "asdf"}}}
|
|
||||||
self.assertEquals(output, expected)
|
|
||||||
|
|
||||||
|
|
||||||
class ImageMetadataXMLSerializationTest(test.TestCase):
|
|
||||||
|
|
||||||
def test_index(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'metadata': {
|
|
||||||
'one': 'two',
|
|
||||||
'three': 'four',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'index')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key="three">
|
|
||||||
four
|
|
||||||
</meta>
|
|
||||||
<meta key="one">
|
|
||||||
two
|
|
||||||
</meta>
|
|
||||||
</metadata>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_index_null(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'metadata': {
|
|
||||||
None: None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'index')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key="None">
|
|
||||||
None
|
|
||||||
</meta>
|
|
||||||
</metadata>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_index_unicode(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'metadata': {
|
|
||||||
u'three': u'Jos\xe9',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'index')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString(u"""
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key="three">
|
|
||||||
Jos\xe9
|
|
||||||
</meta>
|
|
||||||
</metadata>
|
|
||||||
""".encode("UTF-8").replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_show(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'meta': {
|
|
||||||
'one': 'two',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'show')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<meta xmlns="http://docs.openstack.org/compute/api/v1.1" key="one">
|
|
||||||
two
|
|
||||||
</meta>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_update_all(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'metadata': {
|
|
||||||
'key6': 'value6',
|
|
||||||
'key4': 'value4',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'update_all')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key="key6">
|
|
||||||
value6
|
|
||||||
</meta>
|
|
||||||
<meta key="key4">
|
|
||||||
value4
|
|
||||||
</meta>
|
|
||||||
</metadata>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_update_item(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'meta': {
|
|
||||||
'one': 'two',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'update')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<meta xmlns="http://docs.openstack.org/compute/api/v1.1" key="one">
|
|
||||||
two
|
|
||||||
</meta>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_create(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
fixture = {
|
|
||||||
'metadata': {
|
|
||||||
'key9': 'value9',
|
|
||||||
'key2': 'value2',
|
|
||||||
'key1': 'value1',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
output = serializer.serialize(fixture, 'create')
|
|
||||||
actual = minidom.parseString(output.replace(" ", ""))
|
|
||||||
|
|
||||||
expected = minidom.parseString("""
|
|
||||||
<metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
|
|
||||||
<meta key="key2">
|
|
||||||
value2
|
|
||||||
</meta>
|
|
||||||
<meta key="key9">
|
|
||||||
value9
|
|
||||||
</meta>
|
|
||||||
<meta key="key1">
|
|
||||||
value1
|
|
||||||
</meta>
|
|
||||||
</metadata>
|
|
||||||
""".replace(" ", ""))
|
|
||||||
|
|
||||||
self.assertEqual(expected.toxml(), actual.toxml())
|
|
||||||
|
|
||||||
def test_delete(self):
|
|
||||||
serializer = openstack.image_metadata.ImageMetadataXMLSerializer()
|
|
||||||
output = serializer.serialize(None, 'delete')
|
|
||||||
self.assertEqual(output, '')
|
|
||||||
|
|||||||
@@ -920,7 +920,7 @@ class LimitsViewBuilderV11Test(test.TestCase):
|
|||||||
"verb": "POST",
|
"verb": "POST",
|
||||||
"remaining": 2,
|
"remaining": 2,
|
||||||
"unit": "MINUTE",
|
"unit": "MINUTE",
|
||||||
"resetTime": 1311272226
|
"resetTime": 1311272226,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"URI": "*/servers",
|
"URI": "*/servers",
|
||||||
@@ -929,7 +929,7 @@ class LimitsViewBuilderV11Test(test.TestCase):
|
|||||||
"verb": "POST",
|
"verb": "POST",
|
||||||
"remaining": 10,
|
"remaining": 10,
|
||||||
"unit": "DAY",
|
"unit": "DAY",
|
||||||
"resetTime": 1311272226
|
"resetTime": 1311272226,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
self.absolute_limits = {
|
self.absolute_limits = {
|
||||||
@@ -954,7 +954,7 @@ class LimitsViewBuilderV11Test(test.TestCase):
|
|||||||
"verb": "POST",
|
"verb": "POST",
|
||||||
"remaining": 2,
|
"remaining": 2,
|
||||||
"unit": "MINUTE",
|
"unit": "MINUTE",
|
||||||
"next-available": "2011-07-21T18:17:06Z"
|
"next-available": "2011-07-21T18:17:06Z",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -967,7 +967,7 @@ class LimitsViewBuilderV11Test(test.TestCase):
|
|||||||
"verb": "POST",
|
"verb": "POST",
|
||||||
"remaining": 10,
|
"remaining": 10,
|
||||||
"unit": "DAY",
|
"unit": "DAY",
|
||||||
"next-available": "2011-07-21T18:17:06Z"
|
"next-available": "2011-07-21T18:17:06Z",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -989,7 +989,7 @@ class LimitsViewBuilderV11Test(test.TestCase):
|
|||||||
expected_limits = {
|
expected_limits = {
|
||||||
"limits": {
|
"limits": {
|
||||||
"rate": [],
|
"rate": [],
|
||||||
"absolute": {}
|
"absolute": {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1022,7 +1022,7 @@ class LimitsXMLSerializationTest(test.TestCase):
|
|||||||
"verb": "POST",
|
"verb": "POST",
|
||||||
"remaining": 2,
|
"remaining": 2,
|
||||||
"unit": "MINUTE",
|
"unit": "MINUTE",
|
||||||
"next-available": "2011-12-15T22:42:45Z"
|
"next-available": "2011-12-15T22:42:45Z",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -1083,7 +1083,7 @@ class LimitsXMLSerializationTest(test.TestCase):
|
|||||||
fixture = {
|
fixture = {
|
||||||
"limits": {
|
"limits": {
|
||||||
"rate": [],
|
"rate": [],
|
||||||
"absolute": {}
|
"absolute": {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
import unittest
|
import unittest
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
@@ -172,8 +173,8 @@ def stub_instance(id, user_id=1, private_address=None, public_addresses=None,
|
|||||||
|
|
||||||
instance = {
|
instance = {
|
||||||
"id": int(id),
|
"id": int(id),
|
||||||
"created_at": "2010-10-10T12:00:00Z",
|
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
|
||||||
"updated_at": "2010-11-11T11:00:00Z",
|
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
|
||||||
"admin_pass": "",
|
"admin_pass": "",
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"project_id": "",
|
"project_id": "",
|
||||||
@@ -399,6 +400,78 @@ class ServersTest(test.TestCase):
|
|||||||
|
|
||||||
self.assertDictMatch(res_dict, expected_server)
|
self.assertDictMatch(res_dict, expected_server)
|
||||||
|
|
||||||
|
def test_get_server_by_id_v1_1_xml(self):
|
||||||
|
image_bookmark = "http://localhost/images/10"
|
||||||
|
flavor_ref = "http://localhost/v1.1/flavors/1"
|
||||||
|
flavor_id = "1"
|
||||||
|
flavor_bookmark = "http://localhost/flavors/1"
|
||||||
|
server_href = "http://localhost/v1.1/servers/1"
|
||||||
|
server_bookmark = "http://localhost/servers/1"
|
||||||
|
|
||||||
|
public_ip = '192.168.0.3'
|
||||||
|
private_ip = '172.19.0.1'
|
||||||
|
interfaces = [
|
||||||
|
{
|
||||||
|
'network': {'label': 'public'},
|
||||||
|
'fixed_ips': [
|
||||||
|
{'address': public_ip},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'network': {'label': 'private'},
|
||||||
|
'fixed_ips': [
|
||||||
|
{'address': private_ip},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
new_return_server = return_server_with_attributes(
|
||||||
|
interfaces=interfaces)
|
||||||
|
self.stubs.Set(nova.db.api, 'instance_get', new_return_server)
|
||||||
|
|
||||||
|
req = webob.Request.blank('/v1.1/servers/1')
|
||||||
|
req.headers['Accept'] = 'application/xml'
|
||||||
|
res = req.get_response(fakes.wsgi_app())
|
||||||
|
actual = minidom.parseString(res.body.replace(' ', ''))
|
||||||
|
expected_uuid = FAKE_UUID
|
||||||
|
expected_updated = "2010-11-11T11:00:00Z"
|
||||||
|
expected_created = "2010-10-10T12:00:00Z"
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<server id="1"
|
||||||
|
uuid="%(expected_uuid)s"
|
||||||
|
xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
xmlns:atom="http://www.w3.org/2005/Atom"
|
||||||
|
name="server1"
|
||||||
|
updated="%(expected_updated)s"
|
||||||
|
created="%(expected_created)s"
|
||||||
|
hostId=""
|
||||||
|
status="BUILD"
|
||||||
|
progress="0">
|
||||||
|
<atom:link href="%(server_href)s" rel="self"/>
|
||||||
|
<atom:link href="%(server_bookmark)s" rel="bookmark"/>
|
||||||
|
<image id="10">
|
||||||
|
<atom:link rel="bookmark" href="%(image_bookmark)s"/>
|
||||||
|
</image>
|
||||||
|
<flavor id="1">
|
||||||
|
<atom:link rel="bookmark" href="%(flavor_bookmark)s"/>
|
||||||
|
</flavor>
|
||||||
|
<metadata>
|
||||||
|
<meta key="seq">
|
||||||
|
1
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
<addresses>
|
||||||
|
<network id="public">
|
||||||
|
<ip version="4" addr="%(public_ip)s"/>
|
||||||
|
</network>
|
||||||
|
<network id="private">
|
||||||
|
<ip version="4" addr="%(private_ip)s"/>
|
||||||
|
</network>
|
||||||
|
</addresses>
|
||||||
|
</server>
|
||||||
|
""".replace(" ", "") % (locals()))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
def test_get_server_with_active_status_by_id_v1_1(self):
|
def test_get_server_with_active_status_by_id_v1_1(self):
|
||||||
image_bookmark = "http://localhost/images/10"
|
image_bookmark = "http://localhost/images/10"
|
||||||
flavor_ref = "http://localhost/v1.1/flavors/1"
|
flavor_ref = "http://localhost/v1.1/flavors/1"
|
||||||
@@ -1048,8 +1121,8 @@ class ServersTest(test.TestCase):
|
|||||||
'uuid': FAKE_UUID,
|
'uuid': FAKE_UUID,
|
||||||
'instance_type': dict(inst_type),
|
'instance_type': dict(inst_type),
|
||||||
'image_ref': image_ref,
|
'image_ref': image_ref,
|
||||||
'created_at': '2010-10-10T12:00:00Z',
|
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
|
||||||
'updated_at': '2010-11-11T11:00:00Z',
|
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
def server_update(context, id, params):
|
def server_update(context, id, params):
|
||||||
@@ -1099,6 +1172,7 @@ class ServersTest(test.TestCase):
|
|||||||
|
|
||||||
res = req.get_response(fakes.wsgi_app())
|
res = req.get_response(fakes.wsgi_app())
|
||||||
|
|
||||||
|
self.assertEqual(res.status_int, 200)
|
||||||
server = json.loads(res.body)['server']
|
server = json.loads(res.body)['server']
|
||||||
self.assertEqual(16, len(server['adminPass']))
|
self.assertEqual(16, len(server['adminPass']))
|
||||||
self.assertEqual('server_test', server['name'])
|
self.assertEqual('server_test', server['name'])
|
||||||
@@ -1106,7 +1180,6 @@ class ServersTest(test.TestCase):
|
|||||||
self.assertEqual(2, server['flavorId'])
|
self.assertEqual(2, server['flavorId'])
|
||||||
self.assertEqual(3, server['imageId'])
|
self.assertEqual(3, server['imageId'])
|
||||||
self.assertEqual(FAKE_UUID, server['uuid'])
|
self.assertEqual(FAKE_UUID, server['uuid'])
|
||||||
self.assertEqual(res.status_int, 200)
|
|
||||||
|
|
||||||
def test_create_instance(self):
|
def test_create_instance(self):
|
||||||
self._test_create_instance_helper()
|
self._test_create_instance_helper()
|
||||||
@@ -1279,7 +1352,12 @@ class ServersTest(test.TestCase):
|
|||||||
'hello': 'world',
|
'hello': 'world',
|
||||||
'open': 'stack',
|
'open': 'stack',
|
||||||
},
|
},
|
||||||
'personality': {},
|
'personality': [
|
||||||
|
{
|
||||||
|
"path": "/etc/banner.txt",
|
||||||
|
"contents": "MQ==",
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1293,11 +1371,11 @@ class ServersTest(test.TestCase):
|
|||||||
self.assertEqual(res.status_int, 200)
|
self.assertEqual(res.status_int, 200)
|
||||||
server = json.loads(res.body)['server']
|
server = json.loads(res.body)['server']
|
||||||
self.assertEqual(16, len(server['adminPass']))
|
self.assertEqual(16, len(server['adminPass']))
|
||||||
|
self.assertEqual(1, server['id'])
|
||||||
|
self.assertEqual(0, server['progress'])
|
||||||
self.assertEqual('server_test', server['name'])
|
self.assertEqual('server_test', server['name'])
|
||||||
self.assertEqual(expected_flavor, server['flavor'])
|
self.assertEqual(expected_flavor, server['flavor'])
|
||||||
self.assertEqual(expected_image, server['image'])
|
self.assertEqual(expected_image, server['image'])
|
||||||
self.assertEqual(res.status_int, 200)
|
|
||||||
#self.assertEqual(1, server['id'])
|
|
||||||
|
|
||||||
def test_create_instance_v1_1_invalid_flavor_href(self):
|
def test_create_instance_v1_1_invalid_flavor_href(self):
|
||||||
self._setup_for_create_instance()
|
self._setup_for_create_instance()
|
||||||
@@ -1351,7 +1429,7 @@ class ServersTest(test.TestCase):
|
|||||||
self._setup_for_create_instance()
|
self._setup_for_create_instance()
|
||||||
|
|
||||||
image_id = "2"
|
image_id = "2"
|
||||||
flavor_ref = 'http://localhost/flavors/3'
|
flavor_ref = 'http://localhost/v1.1/flavors/3'
|
||||||
expected_flavor = {
|
expected_flavor = {
|
||||||
"id": "3",
|
"id": "3",
|
||||||
"links": [
|
"links": [
|
||||||
@@ -1385,10 +1463,10 @@ class ServersTest(test.TestCase):
|
|||||||
|
|
||||||
res = req.get_response(fakes.wsgi_app())
|
res = req.get_response(fakes.wsgi_app())
|
||||||
|
|
||||||
|
self.assertEqual(res.status_int, 200)
|
||||||
server = json.loads(res.body)['server']
|
server = json.loads(res.body)['server']
|
||||||
self.assertEqual(expected_flavor, server['flavor'])
|
self.assertEqual(expected_flavor, server['flavor'])
|
||||||
self.assertEqual(expected_image, server['image'])
|
self.assertEqual(expected_image, server['image'])
|
||||||
self.assertEqual(res.status_int, 200)
|
|
||||||
|
|
||||||
def test_create_instance_with_admin_pass_v1_0(self):
|
def test_create_instance_with_admin_pass_v1_0(self):
|
||||||
self._setup_for_create_instance()
|
self._setup_for_create_instance()
|
||||||
@@ -1411,7 +1489,7 @@ class ServersTest(test.TestCase):
|
|||||||
self.assertNotEqual(res['server']['adminPass'],
|
self.assertNotEqual(res['server']['adminPass'],
|
||||||
body['server']['adminPass'])
|
body['server']['adminPass'])
|
||||||
|
|
||||||
def test_create_instance_with_admin_pass_v1_1(self):
|
def test_create_instance_v1_1_admin_pass(self):
|
||||||
self._setup_for_create_instance()
|
self._setup_for_create_instance()
|
||||||
|
|
||||||
image_href = 'http://localhost/v1.1/images/2'
|
image_href = 'http://localhost/v1.1/images/2'
|
||||||
@@ -1419,8 +1497,8 @@ class ServersTest(test.TestCase):
|
|||||||
body = {
|
body = {
|
||||||
'server': {
|
'server': {
|
||||||
'name': 'server_test',
|
'name': 'server_test',
|
||||||
'imageRef': image_href,
|
'imageRef': 3,
|
||||||
'flavorRef': flavor_ref,
|
'flavorRef': 3,
|
||||||
'adminPass': 'testpass',
|
'adminPass': 'testpass',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -1430,19 +1508,18 @@ class ServersTest(test.TestCase):
|
|||||||
req.body = json.dumps(body)
|
req.body = json.dumps(body)
|
||||||
req.headers['content-type'] = "application/json"
|
req.headers['content-type'] = "application/json"
|
||||||
res = req.get_response(fakes.wsgi_app())
|
res = req.get_response(fakes.wsgi_app())
|
||||||
|
self.assertEqual(res.status_int, 200)
|
||||||
server = json.loads(res.body)['server']
|
server = json.loads(res.body)['server']
|
||||||
self.assertEqual(server['adminPass'], body['server']['adminPass'])
|
self.assertEqual(server['adminPass'], body['server']['adminPass'])
|
||||||
|
|
||||||
def test_create_instance_with_empty_admin_pass_v1_1(self):
|
def test_create_instance_v1_1_admin_pass_empty(self):
|
||||||
self._setup_for_create_instance()
|
self._setup_for_create_instance()
|
||||||
|
|
||||||
image_href = 'http://localhost/v1.1/images/2'
|
|
||||||
flavor_ref = 'http://localhost/v1.1/flavors/3'
|
|
||||||
body = {
|
body = {
|
||||||
'server': {
|
'server': {
|
||||||
'name': 'server_test',
|
'name': 'server_test',
|
||||||
'imageRef': image_href,
|
'imageRef': 3,
|
||||||
'flavorRef': flavor_ref,
|
'flavorRef': 3,
|
||||||
'adminPass': '',
|
'adminPass': '',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -2235,7 +2312,7 @@ class ServersTest(test.TestCase):
|
|||||||
self.assertEqual(res_dict['server']['status'], 'SHUTOFF')
|
self.assertEqual(res_dict['server']['status'], 'SHUTOFF')
|
||||||
|
|
||||||
|
|
||||||
class TestServerCreateRequestXMLDeserializer(unittest.TestCase):
|
class TestServerCreateRequestXMLDeserializerV10(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.deserializer = create_instance_helper.ServerXMLDeserializer()
|
self.deserializer = create_instance_helper.ServerXMLDeserializer()
|
||||||
@@ -2249,6 +2326,8 @@ class TestServerCreateRequestXMLDeserializer(unittest.TestCase):
|
|||||||
"name": "new-server-test",
|
"name": "new-server-test",
|
||||||
"imageId": "1",
|
"imageId": "1",
|
||||||
"flavorId": "1",
|
"flavorId": "1",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
}}
|
}}
|
||||||
self.assertEquals(request['body'], expected)
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
@@ -2264,6 +2343,7 @@ class TestServerCreateRequestXMLDeserializer(unittest.TestCase):
|
|||||||
"imageId": "1",
|
"imageId": "1",
|
||||||
"flavorId": "1",
|
"flavorId": "1",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
}}
|
}}
|
||||||
self.assertEquals(request['body'], expected)
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
@@ -2278,6 +2358,7 @@ class TestServerCreateRequestXMLDeserializer(unittest.TestCase):
|
|||||||
"name": "new-server-test",
|
"name": "new-server-test",
|
||||||
"imageId": "1",
|
"imageId": "1",
|
||||||
"flavorId": "1",
|
"flavorId": "1",
|
||||||
|
"metadata": {},
|
||||||
"personality": [],
|
"personality": [],
|
||||||
}}
|
}}
|
||||||
self.assertEquals(request['body'], expected)
|
self.assertEquals(request['body'], expected)
|
||||||
@@ -2515,18 +2596,188 @@ b25zLiINCg0KLVJpY2hhcmQgQmFjaA==""",
|
|||||||
request = self.deserializer.deserialize(serial_request, 'create')
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
self.assertEqual(request['body'], expected)
|
self.assertEqual(request['body'], expected)
|
||||||
|
|
||||||
def test_request_xmlser_with_flavor_image_href(self):
|
|
||||||
|
class TestServerCreateRequestXMLDeserializerV11(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.deserializer = create_instance_helper.ServerXMLDeserializer()
|
||||||
|
|
||||||
|
def test_minimal_request(self):
|
||||||
serial_request = """
|
serial_request = """
|
||||||
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
name="new-server-test"
|
name="new-server-test"
|
||||||
imageRef="http://localhost:8774/v1.1/images/1"
|
imageRef="1"
|
||||||
flavorRef="http://localhost:8774/v1.1/flavors/1">
|
flavorRef="2"/>"""
|
||||||
</server>"""
|
|
||||||
request = self.deserializer.deserialize(serial_request, 'create')
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
self.assertEquals(request['body']["server"]["flavorRef"],
|
expected = {
|
||||||
"http://localhost:8774/v1.1/flavors/1")
|
"server": {
|
||||||
self.assertEquals(request['body']["server"]["imageRef"],
|
"name": "new-server-test",
|
||||||
"http://localhost:8774/v1.1/images/1")
|
"imageRef": "1",
|
||||||
|
"flavorRef": "2",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_admin_pass(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="1"
|
||||||
|
flavorRef="2"
|
||||||
|
adminPass="1234"/>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "1",
|
||||||
|
"flavorRef": "2",
|
||||||
|
"adminPass": "1234",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_image_link(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="http://localhost:8774/v1.1/images/2"
|
||||||
|
flavorRef="3"/>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "http://localhost:8774/v1.1/images/2",
|
||||||
|
"flavorRef": "3",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_flavor_link(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="1"
|
||||||
|
flavorRef="http://localhost:8774/v1.1/flavors/3"/>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "1",
|
||||||
|
"flavorRef": "http://localhost:8774/v1.1/flavors/3",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_empty_metadata_personality(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="1"
|
||||||
|
flavorRef="2">
|
||||||
|
<metadata/>
|
||||||
|
<personality/>
|
||||||
|
</server>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "1",
|
||||||
|
"flavorRef": "2",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_multiple_metadata_items(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="1"
|
||||||
|
flavorRef="2">
|
||||||
|
<metadata>
|
||||||
|
<meta key="one">two</meta>
|
||||||
|
<meta key="open">snack</meta>
|
||||||
|
</metadata>
|
||||||
|
</server>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "1",
|
||||||
|
"flavorRef": "2",
|
||||||
|
"metadata": {"one": "two", "open": "snack"},
|
||||||
|
"personality": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_multiple_personality_files(self):
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
name="new-server-test"
|
||||||
|
imageRef="1"
|
||||||
|
flavorRef="2">
|
||||||
|
<personality>
|
||||||
|
<file path="/etc/banner.txt">MQ==</file>
|
||||||
|
<file path="/etc/hosts">Mg==</file>
|
||||||
|
</personality>
|
||||||
|
</server>"""
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "1",
|
||||||
|
"flavorRef": "2",
|
||||||
|
"metadata": {},
|
||||||
|
"personality": [
|
||||||
|
{"path": "/etc/banner.txt", "contents": "MQ=="},
|
||||||
|
{"path": "/etc/hosts", "contents": "Mg=="},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
def test_spec_request(self):
|
||||||
|
image_bookmark_link = "http://servers.api.openstack.org/1234/" + \
|
||||||
|
"images/52415800-8b69-11e0-9b19-734f6f006e54"
|
||||||
|
serial_request = """
|
||||||
|
<server xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
imageRef="%s"
|
||||||
|
flavorRef="52415800-8b69-11e0-9b19-734f1195ff37"
|
||||||
|
name="new-server-test">
|
||||||
|
<metadata>
|
||||||
|
<meta key="My Server Name">Apache1</meta>
|
||||||
|
</metadata>
|
||||||
|
<personality>
|
||||||
|
<file path="/etc/banner.txt">Mg==</file>
|
||||||
|
</personality>
|
||||||
|
</server>""" % (image_bookmark_link)
|
||||||
|
request = self.deserializer.deserialize(serial_request, 'create')
|
||||||
|
expected = {
|
||||||
|
"server": {
|
||||||
|
"name": "new-server-test",
|
||||||
|
"imageRef": "http://servers.api.openstack.org/1234/" + \
|
||||||
|
"images/52415800-8b69-11e0-9b19-734f6f006e54",
|
||||||
|
"flavorRef": "52415800-8b69-11e0-9b19-734f1195ff37",
|
||||||
|
"metadata": {"My Server Name": "Apache1"},
|
||||||
|
"personality": [
|
||||||
|
{
|
||||||
|
"path": "/etc/banner.txt",
|
||||||
|
"contents": "Mg==",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEquals(request['body'], expected)
|
||||||
|
|
||||||
|
|
||||||
class TextAddressesXMLSerialization(test.TestCase):
|
class TextAddressesXMLSerialization(test.TestCase):
|
||||||
@@ -2899,10 +3150,12 @@ class ServersViewBuilderV11Test(test.TestCase):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def _get_instance(self):
|
def _get_instance(self):
|
||||||
|
created_at = datetime.datetime(2010, 10, 10, 12, 0, 0)
|
||||||
|
updated_at = datetime.datetime(2010, 11, 11, 11, 0, 0)
|
||||||
instance = {
|
instance = {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"created_at": "2010-10-10T12:00:00Z",
|
"created_at": created_at,
|
||||||
"updated_at": "2010-11-11T11:00:00Z",
|
"updated_at": updated_at,
|
||||||
"admin_pass": "",
|
"admin_pass": "",
|
||||||
"user_id": "",
|
"user_id": "",
|
||||||
"project_id": "",
|
"project_id": "",
|
||||||
@@ -2950,7 +3203,7 @@ class ServersViewBuilderV11Test(test.TestCase):
|
|||||||
address_builder,
|
address_builder,
|
||||||
flavor_builder,
|
flavor_builder,
|
||||||
image_builder,
|
image_builder,
|
||||||
base_url
|
base_url,
|
||||||
)
|
)
|
||||||
return view_builder
|
return view_builder
|
||||||
|
|
||||||
@@ -3106,12 +3359,12 @@ class ServersViewBuilderV11Test(test.TestCase):
|
|||||||
},
|
},
|
||||||
"flavor": {
|
"flavor": {
|
||||||
"id": "1",
|
"id": "1",
|
||||||
"links": [
|
"links": [
|
||||||
{
|
{
|
||||||
"rel": "bookmark",
|
"rel": "bookmark",
|
||||||
"href": flavor_bookmark,
|
"href": flavor_bookmark,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"addresses": {},
|
"addresses": {},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
@@ -3133,3 +3386,505 @@ class ServersViewBuilderV11Test(test.TestCase):
|
|||||||
|
|
||||||
output = self.view_builder.build(self.instance, True)
|
output = self.view_builder.build(self.instance, True)
|
||||||
self.assertDictMatch(output, expected_server)
|
self.assertDictMatch(output, expected_server)
|
||||||
|
|
||||||
|
|
||||||
|
class ServerXMLSerializationTest(test.TestCase):
|
||||||
|
|
||||||
|
TIMESTAMP = "2010-10-11T10:30:22Z"
|
||||||
|
SERVER_HREF = 'http://localhost/v1.1/servers/123'
|
||||||
|
SERVER_BOOKMARK = 'http://localhost/servers/123'
|
||||||
|
IMAGE_BOOKMARK = 'http://localhost/images/5'
|
||||||
|
FLAVOR_BOOKMARK = 'http://localhost/flavors/1'
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.maxDiff = None
|
||||||
|
test.TestCase.setUp(self)
|
||||||
|
|
||||||
|
def test_show(self):
|
||||||
|
serializer = servers.ServerXMLSerializer()
|
||||||
|
|
||||||
|
fixture = {
|
||||||
|
"server": {
|
||||||
|
"id": 1,
|
||||||
|
"uuid": FAKE_UUID,
|
||||||
|
'created': self.TIMESTAMP,
|
||||||
|
'updated': self.TIMESTAMP,
|
||||||
|
"progress": 0,
|
||||||
|
"name": "test_server",
|
||||||
|
"status": "BUILD",
|
||||||
|
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
|
||||||
|
"image": {
|
||||||
|
"id": "5",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": self.IMAGE_BOOKMARK,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"flavor": {
|
||||||
|
"id": "1",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": self.FLAVOR_BOOKMARK,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"addresses": {
|
||||||
|
"network_one": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.138",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.138",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"network_two": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.139",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.139",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"Open": "Stack",
|
||||||
|
"Number": "1",
|
||||||
|
},
|
||||||
|
'links': [
|
||||||
|
{
|
||||||
|
'href': self.SERVER_HREF,
|
||||||
|
'rel': 'self',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'href': self.SERVER_BOOKMARK,
|
||||||
|
'rel': 'bookmark',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output = serializer.serialize(fixture, 'show')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected_server_href = self.SERVER_HREF
|
||||||
|
expected_server_bookmark = self.SERVER_BOOKMARK
|
||||||
|
expected_image_bookmark = self.IMAGE_BOOKMARK
|
||||||
|
expected_flavor_bookmark = self.FLAVOR_BOOKMARK
|
||||||
|
expected_now = self.TIMESTAMP
|
||||||
|
expected_uuid = FAKE_UUID
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<server id="1"
|
||||||
|
uuid="%(expected_uuid)s"
|
||||||
|
xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
xmlns:atom="http://www.w3.org/2005/Atom"
|
||||||
|
name="test_server"
|
||||||
|
updated="%(expected_now)s"
|
||||||
|
created="%(expected_now)s"
|
||||||
|
hostId="e4d909c290d0fb1ca068ffaddf22cbd0"
|
||||||
|
status="BUILD"
|
||||||
|
progress="0">
|
||||||
|
<atom:link href="%(expected_server_href)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark)s" rel="bookmark"/>
|
||||||
|
<image id="5">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_image_bookmark)s"/>
|
||||||
|
</image>
|
||||||
|
<flavor id="1">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_flavor_bookmark)s"/>
|
||||||
|
</flavor>
|
||||||
|
<metadata>
|
||||||
|
<meta key="Open">
|
||||||
|
Stack
|
||||||
|
</meta>
|
||||||
|
<meta key="Number">
|
||||||
|
1
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
<addresses>
|
||||||
|
<network id="network_one">
|
||||||
|
<ip version="4" addr="67.23.10.138"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.138"/>
|
||||||
|
</network>
|
||||||
|
<network id="network_two">
|
||||||
|
<ip version="4" addr="67.23.10.139"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.139"/>
|
||||||
|
</network>
|
||||||
|
</addresses>
|
||||||
|
</server>
|
||||||
|
""".replace(" ", "") % (locals()))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_create(self):
|
||||||
|
serializer = servers.ServerXMLSerializer()
|
||||||
|
|
||||||
|
fixture = {
|
||||||
|
"server": {
|
||||||
|
"id": 1,
|
||||||
|
"uuid": FAKE_UUID,
|
||||||
|
'created': self.TIMESTAMP,
|
||||||
|
'updated': self.TIMESTAMP,
|
||||||
|
"progress": 0,
|
||||||
|
"name": "test_server",
|
||||||
|
"status": "BUILD",
|
||||||
|
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
|
||||||
|
"adminPass": "test_password",
|
||||||
|
"image": {
|
||||||
|
"id": "5",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": self.IMAGE_BOOKMARK,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"flavor": {
|
||||||
|
"id": "1",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": self.FLAVOR_BOOKMARK,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"addresses": {
|
||||||
|
"network_one": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.138",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.138",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"network_two": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.139",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.139",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"Open": "Stack",
|
||||||
|
"Number": "1",
|
||||||
|
},
|
||||||
|
'links': [
|
||||||
|
{
|
||||||
|
'href': self.SERVER_HREF,
|
||||||
|
'rel': 'self',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'href': self.SERVER_BOOKMARK,
|
||||||
|
'rel': 'bookmark',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output = serializer.serialize(fixture, 'create')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected_server_href = self.SERVER_HREF
|
||||||
|
expected_server_bookmark = self.SERVER_BOOKMARK
|
||||||
|
expected_image_bookmark = self.IMAGE_BOOKMARK
|
||||||
|
expected_flavor_bookmark = self.FLAVOR_BOOKMARK
|
||||||
|
expected_now = self.TIMESTAMP
|
||||||
|
expected_uuid = FAKE_UUID
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<server id="1"
|
||||||
|
uuid="%(expected_uuid)s"
|
||||||
|
xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
xmlns:atom="http://www.w3.org/2005/Atom"
|
||||||
|
name="test_server"
|
||||||
|
updated="%(expected_now)s"
|
||||||
|
created="%(expected_now)s"
|
||||||
|
hostId="e4d909c290d0fb1ca068ffaddf22cbd0"
|
||||||
|
status="BUILD"
|
||||||
|
adminPass="test_password"
|
||||||
|
progress="0">
|
||||||
|
<atom:link href="%(expected_server_href)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark)s" rel="bookmark"/>
|
||||||
|
<image id="5">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_image_bookmark)s"/>
|
||||||
|
</image>
|
||||||
|
<flavor id="1">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_flavor_bookmark)s"/>
|
||||||
|
</flavor>
|
||||||
|
<metadata>
|
||||||
|
<meta key="Open">
|
||||||
|
Stack
|
||||||
|
</meta>
|
||||||
|
<meta key="Number">
|
||||||
|
1
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
<addresses>
|
||||||
|
<network id="network_one">
|
||||||
|
<ip version="4" addr="67.23.10.138"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.138"/>
|
||||||
|
</network>
|
||||||
|
<network id="network_two">
|
||||||
|
<ip version="4" addr="67.23.10.139"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.139"/>
|
||||||
|
</network>
|
||||||
|
</addresses>
|
||||||
|
</server>
|
||||||
|
""".replace(" ", "") % (locals()))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_index(self):
|
||||||
|
serializer = servers.ServerXMLSerializer()
|
||||||
|
|
||||||
|
expected_server_href = 'http://localhost/v1.1/servers/1'
|
||||||
|
expected_server_bookmark = 'http://localhost/servers/1'
|
||||||
|
expected_server_href_2 = 'http://localhost/v1.1/servers/2'
|
||||||
|
expected_server_bookmark_2 = 'http://localhost/servers/2'
|
||||||
|
fixture = {"servers": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"name": "test_server",
|
||||||
|
'links': [
|
||||||
|
{
|
||||||
|
'href': expected_server_href,
|
||||||
|
'rel': 'self',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'href': expected_server_bookmark,
|
||||||
|
'rel': 'bookmark',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"name": "test_server_2",
|
||||||
|
'links': [
|
||||||
|
{
|
||||||
|
'href': expected_server_href_2,
|
||||||
|
'rel': 'self',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'href': expected_server_bookmark_2,
|
||||||
|
'rel': 'bookmark',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
|
||||||
|
output = serializer.serialize(fixture, 'index')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<servers xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
xmlns:atom="http://www.w3.org/2005/Atom">
|
||||||
|
<server id="1" name="test_server">
|
||||||
|
<atom:link href="%(expected_server_href)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark)s" rel="bookmark"/>
|
||||||
|
</server>
|
||||||
|
<server id="2" name="test_server_2">
|
||||||
|
<atom:link href="%(expected_server_href_2)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark_2)s" rel="bookmark"/>
|
||||||
|
</server>
|
||||||
|
</servers>
|
||||||
|
""".replace(" ", "") % (locals()))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|
||||||
|
def test_detail(self):
|
||||||
|
serializer = servers.ServerXMLSerializer()
|
||||||
|
|
||||||
|
expected_server_href = 'http://localhost/v1.1/servers/1'
|
||||||
|
expected_server_bookmark = 'http://localhost/servers/1'
|
||||||
|
expected_image_bookmark = self.IMAGE_BOOKMARK
|
||||||
|
expected_flavor_bookmark = self.FLAVOR_BOOKMARK
|
||||||
|
expected_now = self.TIMESTAMP
|
||||||
|
expected_uuid = FAKE_UUID
|
||||||
|
|
||||||
|
expected_server_href_2 = 'http://localhost/v1.1/servers/2'
|
||||||
|
expected_server_bookmark_2 = 'http://localhost/servers/2'
|
||||||
|
fixture = {"servers": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"uuid": FAKE_UUID,
|
||||||
|
'created': self.TIMESTAMP,
|
||||||
|
'updated': self.TIMESTAMP,
|
||||||
|
"progress": 0,
|
||||||
|
"name": "test_server",
|
||||||
|
"status": "BUILD",
|
||||||
|
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
|
||||||
|
"image": {
|
||||||
|
"id": "5",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": expected_image_bookmark,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"flavor": {
|
||||||
|
"id": "1",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": expected_flavor_bookmark,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"addresses": {
|
||||||
|
"network_one": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.138",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.138",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"Number": "1",
|
||||||
|
},
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"href": expected_server_href,
|
||||||
|
"rel": "self",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"href": expected_server_bookmark,
|
||||||
|
"rel": "bookmark",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"uuid": FAKE_UUID,
|
||||||
|
'created': self.TIMESTAMP,
|
||||||
|
'updated': self.TIMESTAMP,
|
||||||
|
"progress": 100,
|
||||||
|
"name": "test_server_2",
|
||||||
|
"status": "ACTIVE",
|
||||||
|
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
|
||||||
|
"image": {
|
||||||
|
"id": "5",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": expected_image_bookmark,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"flavor": {
|
||||||
|
"id": "1",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"rel": "bookmark",
|
||||||
|
"href": expected_flavor_bookmark,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"addresses": {
|
||||||
|
"network_one": [
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"addr": "67.23.10.138",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": 6,
|
||||||
|
"addr": "::babe:67.23.10.138",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"Number": "2",
|
||||||
|
},
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"href": expected_server_href_2,
|
||||||
|
"rel": "self",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"href": expected_server_bookmark_2,
|
||||||
|
"rel": "bookmark",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
|
||||||
|
output = serializer.serialize(fixture, 'detail')
|
||||||
|
actual = minidom.parseString(output.replace(" ", ""))
|
||||||
|
|
||||||
|
expected = minidom.parseString("""
|
||||||
|
<servers xmlns="http://docs.openstack.org/compute/api/v1.1"
|
||||||
|
xmlns:atom="http://www.w3.org/2005/Atom">
|
||||||
|
<server id="1"
|
||||||
|
uuid="%(expected_uuid)s"
|
||||||
|
name="test_server"
|
||||||
|
updated="%(expected_now)s"
|
||||||
|
created="%(expected_now)s"
|
||||||
|
hostId="e4d909c290d0fb1ca068ffaddf22cbd0"
|
||||||
|
status="BUILD"
|
||||||
|
progress="0">
|
||||||
|
<atom:link href="%(expected_server_href)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark)s" rel="bookmark"/>
|
||||||
|
<image id="5">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_image_bookmark)s"/>
|
||||||
|
</image>
|
||||||
|
<flavor id="1">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_flavor_bookmark)s"/>
|
||||||
|
</flavor>
|
||||||
|
<metadata>
|
||||||
|
<meta key="Number">
|
||||||
|
1
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
<addresses>
|
||||||
|
<network id="network_one">
|
||||||
|
<ip version="4" addr="67.23.10.138"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.138"/>
|
||||||
|
</network>
|
||||||
|
</addresses>
|
||||||
|
</server>
|
||||||
|
<server id="2"
|
||||||
|
uuid="%(expected_uuid)s"
|
||||||
|
name="test_server_2"
|
||||||
|
updated="%(expected_now)s"
|
||||||
|
created="%(expected_now)s"
|
||||||
|
hostId="e4d909c290d0fb1ca068ffaddf22cbd0"
|
||||||
|
status="ACTIVE"
|
||||||
|
progress="100">
|
||||||
|
<atom:link href="%(expected_server_href_2)s" rel="self"/>
|
||||||
|
<atom:link href="%(expected_server_bookmark_2)s" rel="bookmark"/>
|
||||||
|
<image id="5">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_image_bookmark)s"/>
|
||||||
|
</image>
|
||||||
|
<flavor id="1">
|
||||||
|
<atom:link rel="bookmark" href="%(expected_flavor_bookmark)s"/>
|
||||||
|
</flavor>
|
||||||
|
<metadata>
|
||||||
|
<meta key="Number">
|
||||||
|
2
|
||||||
|
</meta>
|
||||||
|
</metadata>
|
||||||
|
<addresses>
|
||||||
|
<network id="network_one">
|
||||||
|
<ip version="4" addr="67.23.10.138"/>
|
||||||
|
<ip version="6" addr="::babe:67.23.10.138"/>
|
||||||
|
</network>
|
||||||
|
</addresses>
|
||||||
|
</server>
|
||||||
|
</servers>
|
||||||
|
""".replace(" ", "") % (locals()))
|
||||||
|
|
||||||
|
self.assertEqual(expected.toxml(), actual.toxml())
|
||||||
|
|||||||
@@ -305,5 +305,6 @@ class ServersTest(integrated_helpers._IntegratedTestBase):
|
|||||||
# Cleanup
|
# Cleanup
|
||||||
self._delete_server(server_id)
|
self._delete_server(server_id)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -420,7 +420,7 @@ class ComputeTestCase(test.TestCase):
|
|||||||
def fake(*args, **kwargs):
|
def fake(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.stubs.Set(self.compute.driver, 'finish_resize', fake)
|
self.stubs.Set(self.compute.driver, 'finish_migration', fake)
|
||||||
self.stubs.Set(self.compute.network_api, 'get_instance_nw_info', fake)
|
self.stubs.Set(self.compute.network_api, 'get_instance_nw_info', fake)
|
||||||
context = self.context.elevated()
|
context = self.context.elevated()
|
||||||
instance_id = self._create_instance()
|
instance_id = self._create_instance()
|
||||||
@@ -531,8 +531,8 @@ class ComputeTestCase(test.TestCase):
|
|||||||
def fake(*args, **kwargs):
|
def fake(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.stubs.Set(self.compute.driver, 'finish_resize', fake)
|
self.stubs.Set(self.compute.driver, 'finish_migration', fake)
|
||||||
self.stubs.Set(self.compute.driver, 'revert_resize', fake)
|
self.stubs.Set(self.compute.driver, 'revert_migration', fake)
|
||||||
self.stubs.Set(self.compute.network_api, 'get_instance_nw_info', fake)
|
self.stubs.Set(self.compute.network_api, 'get_instance_nw_info', fake)
|
||||||
|
|
||||||
self.compute.run_instance(self.context, instance_id)
|
self.compute.run_instance(self.context, instance_id)
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ class DbApiTestCase(test.TestCase):
|
|||||||
result = db.fixed_ip_get_all(self.context)
|
result = db.fixed_ip_get_all(self.context)
|
||||||
values = {'instance_type_id': FLAGS.default_instance_type,
|
values = {'instance_type_id': FLAGS.default_instance_type,
|
||||||
'image_ref': FLAGS.vpn_image_id,
|
'image_ref': FLAGS.vpn_image_id,
|
||||||
'project_id': self.project.id
|
'project_id': self.project.id,
|
||||||
}
|
}
|
||||||
instance = db.instance_create(self.context, values)
|
instance = db.instance_create(self.context, values)
|
||||||
result = db.instance_get_project_vpn(self.context, self.project.id)
|
result = db.instance_get_project_vpn(self.context, self.project.id)
|
||||||
@@ -76,7 +76,7 @@ class DbApiTestCase(test.TestCase):
|
|||||||
result = db.fixed_ip_get_all(self.context)
|
result = db.fixed_ip_get_all(self.context)
|
||||||
values = {'instance_type_id': FLAGS.default_instance_type,
|
values = {'instance_type_id': FLAGS.default_instance_type,
|
||||||
'image_ref': FLAGS.vpn_image_id,
|
'image_ref': FLAGS.vpn_image_id,
|
||||||
'project_id': self.project.id
|
'project_id': self.project.id,
|
||||||
}
|
}
|
||||||
instance = db.instance_create(self.context, values)
|
instance = db.instance_create(self.context, values)
|
||||||
_setup_networking(instance.id)
|
_setup_networking(instance.id)
|
||||||
|
|||||||
@@ -239,7 +239,7 @@ class LibvirtConnTestCase(test.TestCase):
|
|||||||
'mac_address': 'fake',
|
'mac_address': 'fake',
|
||||||
'ip_address': 'fake',
|
'ip_address': 'fake',
|
||||||
'dhcp_server': 'fake',
|
'dhcp_server': 'fake',
|
||||||
'extra_params': 'fake'
|
'extra_params': 'fake',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Creating mocks
|
# Creating mocks
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ Test suite for VMWareAPI.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from nova import db
|
from nova import db
|
||||||
|
from nova import context
|
||||||
from nova import flags
|
from nova import flags
|
||||||
from nova import test
|
from nova import test
|
||||||
from nova import utils
|
from nova import utils
|
||||||
@@ -40,6 +41,7 @@ class VMWareAPIVMTestCase(test.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(VMWareAPIVMTestCase, self).setUp()
|
super(VMWareAPIVMTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake', False)
|
||||||
self.flags(vmwareapi_host_ip='test_url',
|
self.flags(vmwareapi_host_ip='test_url',
|
||||||
vmwareapi_host_username='test_username',
|
vmwareapi_host_username='test_username',
|
||||||
vmwareapi_host_password='test_pass')
|
vmwareapi_host_password='test_pass')
|
||||||
@@ -97,7 +99,7 @@ class VMWareAPIVMTestCase(test.TestCase):
|
|||||||
"""Create and spawn the VM."""
|
"""Create and spawn the VM."""
|
||||||
self._create_instance_in_the_db()
|
self._create_instance_in_the_db()
|
||||||
self.type_data = db.instance_type_get_by_name(None, 'm1.large')
|
self.type_data = db.instance_type_get_by_name(None, 'm1.large')
|
||||||
self.conn.spawn(self.instance, self.network_info)
|
self.conn.spawn(self.context, self.instance, self.network_info)
|
||||||
self._check_vm_record()
|
self._check_vm_record()
|
||||||
|
|
||||||
def _check_vm_record(self):
|
def _check_vm_record(self):
|
||||||
@@ -159,14 +161,14 @@ class VMWareAPIVMTestCase(test.TestCase):
|
|||||||
self._create_vm()
|
self._create_vm()
|
||||||
info = self.conn.get_info(1)
|
info = self.conn.get_info(1)
|
||||||
self._check_vm_info(info, power_state.RUNNING)
|
self._check_vm_info(info, power_state.RUNNING)
|
||||||
self.conn.snapshot(self.instance, "Test-Snapshot")
|
self.conn.snapshot(self.context, self.instance, "Test-Snapshot")
|
||||||
info = self.conn.get_info(1)
|
info = self.conn.get_info(1)
|
||||||
self._check_vm_info(info, power_state.RUNNING)
|
self._check_vm_info(info, power_state.RUNNING)
|
||||||
|
|
||||||
def test_snapshot_non_existent(self):
|
def test_snapshot_non_existent(self):
|
||||||
self._create_instance_in_the_db()
|
self._create_instance_in_the_db()
|
||||||
self.assertRaises(Exception, self.conn.snapshot, self.instance,
|
self.assertRaises(Exception, self.conn.snapshot, self.context,
|
||||||
"Test-Snapshot")
|
self.instance, "Test-Snapshot")
|
||||||
|
|
||||||
def test_reboot(self):
|
def test_reboot(self):
|
||||||
self._create_vm()
|
self._create_vm()
|
||||||
|
|||||||
@@ -786,8 +786,15 @@ class XenAPIMigrateInstance(test.TestCase):
|
|||||||
conn = xenapi_conn.get_connection(False)
|
conn = xenapi_conn.get_connection(False)
|
||||||
conn.migrate_disk_and_power_off(instance, '127.0.0.1')
|
conn.migrate_disk_and_power_off(instance, '127.0.0.1')
|
||||||
|
|
||||||
def test_finish_resize(self):
|
def test_finish_migrate(self):
|
||||||
instance = db.instance_create(self.context, self.values)
|
instance = db.instance_create(self.context, self.values)
|
||||||
|
self.called = False
|
||||||
|
|
||||||
|
def fake_vdi_resize(*args, **kwargs):
|
||||||
|
self.called = True
|
||||||
|
|
||||||
|
self.stubs.Set(stubs.FakeSessionForMigrationTests,
|
||||||
|
"VDI_resize_online", fake_vdi_resize)
|
||||||
stubs.stubout_session(self.stubs, stubs.FakeSessionForMigrationTests)
|
stubs.stubout_session(self.stubs, stubs.FakeSessionForMigrationTests)
|
||||||
stubs.stubout_loopingcall_start(self.stubs)
|
stubs.stubout_loopingcall_start(self.stubs)
|
||||||
conn = xenapi_conn.get_connection(False)
|
conn = xenapi_conn.get_connection(False)
|
||||||
@@ -805,9 +812,73 @@ class XenAPIMigrateInstance(test.TestCase):
|
|||||||
'label': 'fake',
|
'label': 'fake',
|
||||||
'mac': 'DE:AD:BE:EF:00:00',
|
'mac': 'DE:AD:BE:EF:00:00',
|
||||||
'rxtx_cap': 3})]
|
'rxtx_cap': 3})]
|
||||||
conn.finish_resize(self.context, instance,
|
conn.finish_migration(self.context, instance,
|
||||||
dict(base_copy='hurr', cow='durr'),
|
dict(base_copy='hurr', cow='durr'),
|
||||||
network_info)
|
network_info, resize_instance=True)
|
||||||
|
self.assertEqual(self.called, True)
|
||||||
|
|
||||||
|
def test_finish_migrate_no_local_storage(self):
|
||||||
|
tiny_type_id = \
|
||||||
|
instance_types.get_instance_type_by_name('m1.tiny')['id']
|
||||||
|
self.values.update({'instance_type_id': tiny_type_id, 'local_gb': 0})
|
||||||
|
instance = db.instance_create(self.context, self.values)
|
||||||
|
|
||||||
|
def fake_vdi_resize(*args, **kwargs):
|
||||||
|
raise Exception("This shouldn't be called")
|
||||||
|
|
||||||
|
self.stubs.Set(stubs.FakeSessionForMigrationTests,
|
||||||
|
"VDI_resize_online", fake_vdi_resize)
|
||||||
|
stubs.stubout_session(self.stubs, stubs.FakeSessionForMigrationTests)
|
||||||
|
stubs.stubout_loopingcall_start(self.stubs)
|
||||||
|
conn = xenapi_conn.get_connection(False)
|
||||||
|
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
|
||||||
|
{'broadcast': '192.168.0.255',
|
||||||
|
'dns': ['192.168.0.1'],
|
||||||
|
'gateway': '192.168.0.1',
|
||||||
|
'gateway6': 'dead:beef::1',
|
||||||
|
'ip6s': [{'enabled': '1',
|
||||||
|
'ip': 'dead:beef::dcad:beff:feef:0',
|
||||||
|
'netmask': '64'}],
|
||||||
|
'ips': [{'enabled': '1',
|
||||||
|
'ip': '192.168.0.100',
|
||||||
|
'netmask': '255.255.255.0'}],
|
||||||
|
'label': 'fake',
|
||||||
|
'mac': 'DE:AD:BE:EF:00:00',
|
||||||
|
'rxtx_cap': 3})]
|
||||||
|
conn.finish_migration(self.context, instance,
|
||||||
|
dict(base_copy='hurr', cow='durr'),
|
||||||
|
network_info, resize_instance=True)
|
||||||
|
|
||||||
|
def test_finish_migrate_no_resize_vdi(self):
|
||||||
|
instance = db.instance_create(self.context, self.values)
|
||||||
|
|
||||||
|
def fake_vdi_resize(*args, **kwargs):
|
||||||
|
raise Exception("This shouldn't be called")
|
||||||
|
|
||||||
|
self.stubs.Set(stubs.FakeSessionForMigrationTests,
|
||||||
|
"VDI_resize_online", fake_vdi_resize)
|
||||||
|
stubs.stubout_session(self.stubs, stubs.FakeSessionForMigrationTests)
|
||||||
|
stubs.stubout_loopingcall_start(self.stubs)
|
||||||
|
conn = xenapi_conn.get_connection(False)
|
||||||
|
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
|
||||||
|
{'broadcast': '192.168.0.255',
|
||||||
|
'dns': ['192.168.0.1'],
|
||||||
|
'gateway': '192.168.0.1',
|
||||||
|
'gateway6': 'dead:beef::1',
|
||||||
|
'ip6s': [{'enabled': '1',
|
||||||
|
'ip': 'dead:beef::dcad:beff:feef:0',
|
||||||
|
'netmask': '64'}],
|
||||||
|
'ips': [{'enabled': '1',
|
||||||
|
'ip': '192.168.0.100',
|
||||||
|
'netmask': '255.255.255.0'}],
|
||||||
|
'label': 'fake',
|
||||||
|
'mac': 'DE:AD:BE:EF:00:00',
|
||||||
|
'rxtx_cap': 3})]
|
||||||
|
|
||||||
|
# Resize instance would be determined by the compute call
|
||||||
|
conn.finish_migration(self.context, instance,
|
||||||
|
dict(base_copy='hurr', cow='durr'),
|
||||||
|
network_info, resize_instance=False)
|
||||||
|
|
||||||
|
|
||||||
class XenAPIDetermineDiskImageTestCase(test.TestCase):
|
class XenAPIDetermineDiskImageTestCase(test.TestCase):
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ class ComputeDriver(object):
|
|||||||
# TODO(Vek): Need to pass context in for access to auth_token
|
# TODO(Vek): Need to pass context in for access to auth_token
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
"""Launch a VM for the specified instance"""
|
"""Launch a VM for the specified instance"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
@@ -131,15 +131,16 @@ class ComputeDriver(object):
|
|||||||
# TODO(Vek): Need to pass context in for access to auth_token
|
# TODO(Vek): Need to pass context in for access to auth_token
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def snapshot(self, context, instance, image_id):
|
def snapshot(self, cxt, instance, image_id):
|
||||||
"""Create snapshot from a running VM instance."""
|
"""Create snapshot from a running VM instance."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def finish_resize(self, context, instance, disk_info):
|
def finish_migration(self, cxt, instance, disk_info, network_info,
|
||||||
|
resize_instance):
|
||||||
"""Completes a resize, turning on the migrated instance"""
|
"""Completes a resize, turning on the migrated instance"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def revert_resize(self, instance):
|
def revert_migration(self, instance):
|
||||||
"""Reverts a resize, powering back on the instance"""
|
"""Reverts a resize, powering back on the instance"""
|
||||||
# TODO(Vek): Need to pass context in for access to auth_token
|
# TODO(Vek): Need to pass context in for access to auth_token
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ class FakeConnection(driver.ComputeDriver):
|
|||||||
info_list.append(self._map_to_instance_info(instance))
|
info_list.append(self._map_to_instance_info(instance))
|
||||||
return info_list
|
return info_list
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
"""
|
"""
|
||||||
Create a new instance/VM/domain on the virtualization platform.
|
Create a new instance/VM/domain on the virtualization platform.
|
||||||
@@ -154,7 +154,7 @@ class FakeConnection(driver.ComputeDriver):
|
|||||||
fake_instance = FakeInstance(name, state)
|
fake_instance = FakeInstance(name, state)
|
||||||
self.instances[name] = fake_instance
|
self.instances[name] = fake_instance
|
||||||
|
|
||||||
def snapshot(self, context, instance, name):
|
def snapshot(self, cxt, instance, name):
|
||||||
"""
|
"""
|
||||||
Snapshots the specified instance.
|
Snapshots the specified instance.
|
||||||
|
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ class HyperVConnection(driver.ComputeDriver):
|
|||||||
|
|
||||||
return instance_infos
|
return instance_infos
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
""" Create a new VM and start it."""
|
""" Create a new VM and start it."""
|
||||||
vm = self._lookup(instance.name)
|
vm = self._lookup(instance.name)
|
||||||
|
|||||||
@@ -396,7 +396,7 @@ class LibvirtConnection(driver.ComputeDriver):
|
|||||||
virt_dom.detachDevice(xml)
|
virt_dom.detachDevice(xml)
|
||||||
|
|
||||||
@exception.wrap_exception()
|
@exception.wrap_exception()
|
||||||
def snapshot(self, context, instance, image_href):
|
def snapshot(self, cxt, instance, image_href):
|
||||||
"""Create snapshot from a running VM instance.
|
"""Create snapshot from a running VM instance.
|
||||||
|
|
||||||
This command only works with qemu 0.14+, the qemu_img flag is
|
This command only works with qemu 0.14+, the qemu_img flag is
|
||||||
@@ -592,7 +592,7 @@ class LibvirtConnection(driver.ComputeDriver):
|
|||||||
# NOTE(ilyaalekseyev): Implementation like in multinics
|
# NOTE(ilyaalekseyev): Implementation like in multinics
|
||||||
# for xenapi(tr3buchet)
|
# for xenapi(tr3buchet)
|
||||||
@exception.wrap_exception()
|
@exception.wrap_exception()
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
xml = self.to_xml(instance, False, network_info=network_info,
|
xml = self.to_xml(instance, False, network_info=network_info,
|
||||||
block_device_mapping=block_device_mapping)
|
block_device_mapping=block_device_mapping)
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ class VMWareVMOps(object):
|
|||||||
LOG.debug(_("Got total of %s instances") % str(len(lst_vm_names)))
|
LOG.debug(_("Got total of %s instances") % str(len(lst_vm_names)))
|
||||||
return lst_vm_names
|
return lst_vm_names
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info):
|
def spawn(self, cxt, instance, network_info):
|
||||||
"""
|
"""
|
||||||
Creates a VM instance.
|
Creates a VM instance.
|
||||||
|
|
||||||
@@ -329,7 +329,7 @@ class VMWareVMOps(object):
|
|||||||
LOG.debug(_("Powered on the VM instance %s") % instance.name)
|
LOG.debug(_("Powered on the VM instance %s") % instance.name)
|
||||||
_power_on_vm()
|
_power_on_vm()
|
||||||
|
|
||||||
def snapshot(self, context, instance, snapshot_name):
|
def snapshot(self, cxt, instance, snapshot_name):
|
||||||
"""
|
"""
|
||||||
Create snapshot from a running VM instance.
|
Create snapshot from a running VM instance.
|
||||||
Steps followed are:
|
Steps followed are:
|
||||||
|
|||||||
@@ -124,14 +124,14 @@ class VMWareESXConnection(driver.ComputeDriver):
|
|||||||
"""List VM instances."""
|
"""List VM instances."""
|
||||||
return self._vmops.list_instances()
|
return self._vmops.list_instances()
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
"""Create VM instance."""
|
"""Create VM instance."""
|
||||||
self._vmops.spawn(context, instance, network_info)
|
self._vmops.spawn(cxt, instance, network_info)
|
||||||
|
|
||||||
def snapshot(self, context, instance, name):
|
def snapshot(self, cxt, instance, name):
|
||||||
"""Create snapshot from a running VM instance."""
|
"""Create snapshot from a running VM instance."""
|
||||||
self._vmops.snapshot(context, instance, name)
|
self._vmops.snapshot(cxt, instance, name)
|
||||||
|
|
||||||
def reboot(self, instance, network_info):
|
def reboot(self, instance, network_info):
|
||||||
"""Reboot VM instance."""
|
"""Reboot VM instance."""
|
||||||
|
|||||||
@@ -359,7 +359,7 @@ class VMHelper(HelperBase):
|
|||||||
return os.path.join(FLAGS.xenapi_sr_base_path, sr_uuid)
|
return os.path.join(FLAGS.xenapi_sr_base_path, sr_uuid)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def upload_image(cls, context, session, instance, vdi_uuids, image_id):
|
def upload_image(cls, ctx, session, instance, vdi_uuids, image_id):
|
||||||
""" Requests that the Glance plugin bundle the specified VDIs and
|
""" Requests that the Glance plugin bundle the specified VDIs and
|
||||||
push them into Glance using the specified human-friendly name.
|
push them into Glance using the specified human-friendly name.
|
||||||
"""
|
"""
|
||||||
@@ -378,14 +378,14 @@ class VMHelper(HelperBase):
|
|||||||
'glance_port': glance_port,
|
'glance_port': glance_port,
|
||||||
'sr_path': cls.get_sr_path(session),
|
'sr_path': cls.get_sr_path(session),
|
||||||
'os_type': os_type,
|
'os_type': os_type,
|
||||||
'auth_token': getattr(context, 'auth_token', None)}
|
'auth_token': getattr(ctx, 'auth_token', None)}
|
||||||
|
|
||||||
kwargs = {'params': pickle.dumps(params)}
|
kwargs = {'params': pickle.dumps(params)}
|
||||||
task = session.async_call_plugin('glance', 'upload_vhd', kwargs)
|
task = session.async_call_plugin('glance', 'upload_vhd', kwargs)
|
||||||
session.wait_for_task(task, instance.id)
|
session.wait_for_task(task, instance.id)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fetch_image(cls, context, session, instance_id, image, user, project,
|
def fetch_image(cls, ctx, session, instance_id, image, user, project,
|
||||||
image_type):
|
image_type):
|
||||||
"""
|
"""
|
||||||
image_type is interpreted as an ImageType instance
|
image_type is interpreted as an ImageType instance
|
||||||
@@ -400,7 +400,7 @@ class VMHelper(HelperBase):
|
|||||||
access = AuthManager().get_access_key(user, project)
|
access = AuthManager().get_access_key(user, project)
|
||||||
|
|
||||||
if FLAGS.xenapi_image_service == 'glance':
|
if FLAGS.xenapi_image_service == 'glance':
|
||||||
return cls._fetch_image_glance(context, session, instance_id,
|
return cls._fetch_image_glance(ctx, session, instance_id,
|
||||||
image, access, image_type)
|
image, access, image_type)
|
||||||
else:
|
else:
|
||||||
return cls._fetch_image_objectstore(session, instance_id, image,
|
return cls._fetch_image_objectstore(session, instance_id, image,
|
||||||
@@ -408,7 +408,7 @@ class VMHelper(HelperBase):
|
|||||||
image_type)
|
image_type)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _fetch_image_glance_vhd(cls, context, session, instance_id, image,
|
def _fetch_image_glance_vhd(cls, ctx, session, instance_id, image,
|
||||||
access, image_type):
|
access, image_type):
|
||||||
"""Tell glance to download an image and put the VHDs into the SR
|
"""Tell glance to download an image and put the VHDs into the SR
|
||||||
|
|
||||||
@@ -431,7 +431,7 @@ class VMHelper(HelperBase):
|
|||||||
'glance_port': glance_port,
|
'glance_port': glance_port,
|
||||||
'uuid_stack': uuid_stack,
|
'uuid_stack': uuid_stack,
|
||||||
'sr_path': cls.get_sr_path(session),
|
'sr_path': cls.get_sr_path(session),
|
||||||
'auth_token': getattr(context, 'auth_token', None)}
|
'auth_token': getattr(ctx, 'auth_token', None)}
|
||||||
|
|
||||||
kwargs = {'params': pickle.dumps(params)}
|
kwargs = {'params': pickle.dumps(params)}
|
||||||
task = session.async_call_plugin('glance', 'download_vhd', kwargs)
|
task = session.async_call_plugin('glance', 'download_vhd', kwargs)
|
||||||
@@ -457,7 +457,7 @@ class VMHelper(HelperBase):
|
|||||||
return vdis
|
return vdis
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _fetch_image_glance_disk(cls, context, session, instance_id, image,
|
def _fetch_image_glance_disk(cls, ctx, session, instance_id, image,
|
||||||
access, image_type):
|
access, image_type):
|
||||||
"""Fetch the image from Glance
|
"""Fetch the image from Glance
|
||||||
|
|
||||||
@@ -477,7 +477,7 @@ class VMHelper(HelperBase):
|
|||||||
sr_ref = safe_find_sr(session)
|
sr_ref = safe_find_sr(session)
|
||||||
|
|
||||||
glance_client, image_id = nova.image.get_glance_client(image)
|
glance_client, image_id = nova.image.get_glance_client(image)
|
||||||
glance_client.set_auth_token(getattr(context, 'auth_token', None))
|
glance_client.set_auth_token(getattr(ctx, 'auth_token', None))
|
||||||
meta, image_file = glance_client.get_image(image_id)
|
meta, image_file = glance_client.get_image(image_id)
|
||||||
virtual_size = int(meta['size'])
|
virtual_size = int(meta['size'])
|
||||||
vdi_size = virtual_size
|
vdi_size = virtual_size
|
||||||
@@ -592,7 +592,7 @@ class VMHelper(HelperBase):
|
|||||||
return image_type
|
return image_type
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _fetch_image_glance(cls, context, session, instance_id, image, access,
|
def _fetch_image_glance(cls, ctx, session, instance_id, image, access,
|
||||||
image_type):
|
image_type):
|
||||||
"""Fetch image from glance based on image type.
|
"""Fetch image from glance based on image type.
|
||||||
|
|
||||||
@@ -600,10 +600,10 @@ class VMHelper(HelperBase):
|
|||||||
A list of dictionaries that describe VDIs, otherwise
|
A list of dictionaries that describe VDIs, otherwise
|
||||||
"""
|
"""
|
||||||
if image_type == ImageType.DISK_VHD:
|
if image_type == ImageType.DISK_VHD:
|
||||||
return cls._fetch_image_glance_vhd(context,
|
return cls._fetch_image_glance_vhd(ctx,
|
||||||
session, instance_id, image, access, image_type)
|
session, instance_id, image, access, image_type)
|
||||||
else:
|
else:
|
||||||
return cls._fetch_image_glance_disk(context,
|
return cls._fetch_image_glance_disk(ctx,
|
||||||
session, instance_id, image, access, image_type)
|
session, instance_id, image, access, image_type)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -110,17 +110,19 @@ class VMOps(object):
|
|||||||
instance_infos.append(instance_info)
|
instance_infos.append(instance_info)
|
||||||
return instance_infos
|
return instance_infos
|
||||||
|
|
||||||
def revert_resize(self, instance):
|
def revert_migration(self, instance):
|
||||||
vm_ref = VMHelper.lookup(self._session, instance.name)
|
vm_ref = VMHelper.lookup(self._session, instance.name)
|
||||||
self._start(instance, vm_ref)
|
self._start(instance, vm_ref)
|
||||||
|
|
||||||
def finish_resize(self, context, instance, disk_info, network_info):
|
def finish_migration(self, ctx, instance, disk_info, network_info,
|
||||||
|
resize_instance):
|
||||||
vdi_uuid = self.link_disks(instance, disk_info['base_copy'],
|
vdi_uuid = self.link_disks(instance, disk_info['base_copy'],
|
||||||
disk_info['cow'])
|
disk_info['cow'])
|
||||||
vm_ref = self._create_vm(context, instance,
|
vm_ref = self._create_vm(ctx, instance,
|
||||||
[dict(vdi_type='os', vdi_uuid=vdi_uuid)],
|
[dict(vdi_type='os', vdi_uuid=vdi_uuid)],
|
||||||
network_info)
|
network_info)
|
||||||
self.resize_instance(instance, vdi_uuid)
|
if resize_instance:
|
||||||
|
self.resize_instance(instance, vdi_uuid)
|
||||||
self._spawn(instance, vm_ref)
|
self._spawn(instance, vm_ref)
|
||||||
|
|
||||||
def _start(self, instance, vm_ref=None):
|
def _start(self, instance, vm_ref=None):
|
||||||
@@ -133,20 +135,20 @@ class VMOps(object):
|
|||||||
LOG.debug(_("Starting instance %s"), instance.name)
|
LOG.debug(_("Starting instance %s"), instance.name)
|
||||||
self._session.call_xenapi('VM.start', vm_ref, False, False)
|
self._session.call_xenapi('VM.start', vm_ref, False, False)
|
||||||
|
|
||||||
def _create_disks(self, context, instance):
|
def _create_disks(self, ctx, instance):
|
||||||
user = AuthManager().get_user(instance.user_id)
|
user = AuthManager().get_user(instance.user_id)
|
||||||
project = AuthManager().get_project(instance.project_id)
|
project = AuthManager().get_project(instance.project_id)
|
||||||
disk_image_type = VMHelper.determine_disk_image_type(instance)
|
disk_image_type = VMHelper.determine_disk_image_type(instance)
|
||||||
vdis = VMHelper.fetch_image(context, self._session,
|
vdis = VMHelper.fetch_image(ctx, self._session,
|
||||||
instance.id, instance.image_ref, user, project,
|
instance.id, instance.image_ref, user, project,
|
||||||
disk_image_type)
|
disk_image_type)
|
||||||
return vdis
|
return vdis
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info):
|
def spawn(self, ctx, instance, network_info):
|
||||||
vdis = None
|
vdis = None
|
||||||
try:
|
try:
|
||||||
vdis = self._create_disks(context, instance)
|
vdis = self._create_disks(ctx, instance)
|
||||||
vm_ref = self._create_vm(context, instance, vdis, network_info)
|
vm_ref = self._create_vm(ctx, instance, vdis, network_info)
|
||||||
self._spawn(instance, vm_ref)
|
self._spawn(instance, vm_ref)
|
||||||
except (self.XenAPI.Failure, OSError, IOError) as spawn_error:
|
except (self.XenAPI.Failure, OSError, IOError) as spawn_error:
|
||||||
LOG.exception(_("instance %s: Failed to spawn"),
|
LOG.exception(_("instance %s: Failed to spawn"),
|
||||||
@@ -160,7 +162,7 @@ class VMOps(object):
|
|||||||
"""Spawn a rescue instance."""
|
"""Spawn a rescue instance."""
|
||||||
self.spawn(instance)
|
self.spawn(instance)
|
||||||
|
|
||||||
def _create_vm(self, context, instance, vdis, network_info):
|
def _create_vm(self, ctx, instance, vdis, network_info):
|
||||||
"""Create VM instance."""
|
"""Create VM instance."""
|
||||||
instance_name = instance.name
|
instance_name = instance.name
|
||||||
vm_ref = VMHelper.lookup(self._session, instance_name)
|
vm_ref = VMHelper.lookup(self._session, instance_name)
|
||||||
@@ -184,11 +186,11 @@ class VMOps(object):
|
|||||||
ramdisk = None
|
ramdisk = None
|
||||||
try:
|
try:
|
||||||
if instance.kernel_id:
|
if instance.kernel_id:
|
||||||
kernel = VMHelper.fetch_image(context, self._session,
|
kernel = VMHelper.fetch_image(ctx, self._session,
|
||||||
instance.id, instance.kernel_id, user, project,
|
instance.id, instance.kernel_id, user, project,
|
||||||
ImageType.KERNEL)[0]
|
ImageType.KERNEL)[0]
|
||||||
if instance.ramdisk_id:
|
if instance.ramdisk_id:
|
||||||
ramdisk = VMHelper.fetch_image(context, self._session,
|
ramdisk = VMHelper.fetch_image(ctx, self._session,
|
||||||
instance.id, instance.ramdisk_id, user, project,
|
instance.id, instance.ramdisk_id, user, project,
|
||||||
ImageType.RAMDISK)[0]
|
ImageType.RAMDISK)[0]
|
||||||
# Create the VM ref and attach the first disk
|
# Create the VM ref and attach the first disk
|
||||||
@@ -440,10 +442,10 @@ class VMOps(object):
|
|||||||
vm,
|
vm,
|
||||||
"start")
|
"start")
|
||||||
|
|
||||||
def snapshot(self, context, instance, image_id):
|
def snapshot(self, ctx, instance, image_id):
|
||||||
"""Create snapshot from a running VM instance.
|
"""Create snapshot from a running VM instance.
|
||||||
|
|
||||||
:param context: request context
|
:param ctx: request context
|
||||||
:param instance: instance to be snapshotted
|
:param instance: instance to be snapshotted
|
||||||
:param image_id: id of image to upload to
|
:param image_id: id of image to upload to
|
||||||
|
|
||||||
@@ -468,7 +470,7 @@ class VMOps(object):
|
|||||||
try:
|
try:
|
||||||
template_vm_ref, template_vdi_uuids = self._get_snapshot(instance)
|
template_vm_ref, template_vdi_uuids = self._get_snapshot(instance)
|
||||||
# call plugin to ship snapshot off to glance
|
# call plugin to ship snapshot off to glance
|
||||||
VMHelper.upload_image(context,
|
VMHelper.upload_image(ctx,
|
||||||
self._session, instance, template_vdi_uuids, image_id)
|
self._session, instance, template_vdi_uuids, image_id)
|
||||||
finally:
|
finally:
|
||||||
if template_vm_ref:
|
if template_vm_ref:
|
||||||
@@ -569,18 +571,22 @@ class VMOps(object):
|
|||||||
return new_cow_uuid
|
return new_cow_uuid
|
||||||
|
|
||||||
def resize_instance(self, instance, vdi_uuid):
|
def resize_instance(self, instance, vdi_uuid):
|
||||||
"""Resize a running instance by changing it's RAM and disk size."""
|
"""Resize a running instance by changing its RAM and disk size."""
|
||||||
#TODO(mdietz): this will need to be adjusted for swap later
|
#TODO(mdietz): this will need to be adjusted for swap later
|
||||||
#The new disk size must be in bytes
|
#The new disk size must be in bytes
|
||||||
|
|
||||||
new_disk_size = str(instance.local_gb * 1024 * 1024 * 1024)
|
new_disk_size = instance.local_gb * 1024 * 1024 * 1024
|
||||||
instance_name = instance.name
|
if new_disk_size > 0:
|
||||||
instance_local_gb = instance.local_gb
|
instance_name = instance.name
|
||||||
LOG.debug(_("Resizing VDI %(vdi_uuid)s for instance %(instance_name)s."
|
instance_local_gb = instance.local_gb
|
||||||
" Expanding to %(instance_local_gb)d GB") % locals())
|
LOG.debug(_("Resizing VDI %(vdi_uuid)s for instance"
|
||||||
vdi_ref = self._session.call_xenapi('VDI.get_by_uuid', vdi_uuid)
|
"%(instance_name)s. Expanding to %(instance_local_gb)d"
|
||||||
self._session.call_xenapi('VDI.resize_online', vdi_ref, new_disk_size)
|
" GB") % locals())
|
||||||
LOG.debug(_("Resize instance %s complete") % (instance.name))
|
vdi_ref = self._session.call_xenapi('VDI.get_by_uuid', vdi_uuid)
|
||||||
|
# for an instance with no local storage
|
||||||
|
self._session.call_xenapi('VDI.resize_online', vdi_ref,
|
||||||
|
str(new_disk_size))
|
||||||
|
LOG.debug(_("Resize instance %s complete") % (instance.name))
|
||||||
|
|
||||||
def reboot(self, instance):
|
def reboot(self, instance):
|
||||||
"""Reboot VM instance."""
|
"""Reboot VM instance."""
|
||||||
|
|||||||
@@ -187,22 +187,24 @@ class XenAPIConnection(driver.ComputeDriver):
|
|||||||
def list_instances_detail(self):
|
def list_instances_detail(self):
|
||||||
return self._vmops.list_instances_detail()
|
return self._vmops.list_instances_detail()
|
||||||
|
|
||||||
def spawn(self, context, instance, network_info,
|
def spawn(self, cxt, instance, network_info,
|
||||||
block_device_mapping=None):
|
block_device_mapping=None):
|
||||||
"""Create VM instance"""
|
"""Create VM instance"""
|
||||||
self._vmops.spawn(context, instance, network_info)
|
self._vmops.spawn(cxt, instance, network_info)
|
||||||
|
|
||||||
def revert_resize(self, instance):
|
def revert_migration(self, instance):
|
||||||
"""Reverts a resize, powering back on the instance"""
|
"""Reverts a resize, powering back on the instance"""
|
||||||
self._vmops.revert_resize(instance)
|
self._vmops.revert_resize(instance)
|
||||||
|
|
||||||
def finish_resize(self, context, instance, disk_info, network_info):
|
def finish_migration(self, cxt, instance, disk_info, network_info,
|
||||||
|
resize_instance=False):
|
||||||
"""Completes a resize, turning on the migrated instance"""
|
"""Completes a resize, turning on the migrated instance"""
|
||||||
self._vmops.finish_resize(context, instance, disk_info, network_info)
|
self._vmops.finish_migration(cxt, instance, disk_info,
|
||||||
|
network_info, resize_instance)
|
||||||
|
|
||||||
def snapshot(self, context, instance, image_id):
|
def snapshot(self, cxt, instance, image_id):
|
||||||
""" Create snapshot from a running VM instance """
|
""" Create snapshot from a running VM instance """
|
||||||
self._vmops.snapshot(context, instance, image_id)
|
self._vmops.snapshot(cxt, instance, image_id)
|
||||||
|
|
||||||
def reboot(self, instance, network_info):
|
def reboot(self, instance, network_info):
|
||||||
"""Reboot VM instance"""
|
"""Reboot VM instance"""
|
||||||
|
|||||||
Reference in New Issue
Block a user