Merge w/ trunk.
This commit is contained in:
@@ -537,7 +537,7 @@ class FloatingIpCommands(object):
|
|||||||
for floating_ip in floating_ips:
|
for floating_ip in floating_ips:
|
||||||
instance = None
|
instance = None
|
||||||
if floating_ip['fixed_ip']:
|
if floating_ip['fixed_ip']:
|
||||||
instance = floating_ip['fixed_ip']['instance']['ec2_id']
|
instance = floating_ip['fixed_ip']['instance']['hostname']
|
||||||
print "%s\t%s\t%s" % (floating_ip['host'],
|
print "%s\t%s\t%s" % (floating_ip['host'],
|
||||||
floating_ip['address'],
|
floating_ip['address'],
|
||||||
instance)
|
instance)
|
||||||
|
|||||||
@@ -296,6 +296,7 @@ DEFINE_bool('fake_network', False,
|
|||||||
'should we use fake network devices and addresses')
|
'should we use fake network devices and addresses')
|
||||||
DEFINE_string('rabbit_host', 'localhost', 'rabbit host')
|
DEFINE_string('rabbit_host', 'localhost', 'rabbit host')
|
||||||
DEFINE_integer('rabbit_port', 5672, 'rabbit port')
|
DEFINE_integer('rabbit_port', 5672, 'rabbit port')
|
||||||
|
DEFINE_bool('rabbit_use_ssl', False, 'connect over SSL')
|
||||||
DEFINE_string('rabbit_userid', 'guest', 'rabbit userid')
|
DEFINE_string('rabbit_userid', 'guest', 'rabbit userid')
|
||||||
DEFINE_string('rabbit_password', 'guest', 'rabbit password')
|
DEFINE_string('rabbit_password', 'guest', 'rabbit password')
|
||||||
DEFINE_string('rabbit_virtual_host', '/', 'rabbit virtual host')
|
DEFINE_string('rabbit_virtual_host', '/', 'rabbit virtual host')
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ class Connection(carrot_connection.BrokerConnection):
|
|||||||
if new or not hasattr(cls, '_instance'):
|
if new or not hasattr(cls, '_instance'):
|
||||||
params = dict(hostname=FLAGS.rabbit_host,
|
params = dict(hostname=FLAGS.rabbit_host,
|
||||||
port=FLAGS.rabbit_port,
|
port=FLAGS.rabbit_port,
|
||||||
|
ssl=FLAGS.rabbit_use_ssl,
|
||||||
userid=FLAGS.rabbit_userid,
|
userid=FLAGS.rabbit_userid,
|
||||||
password=FLAGS.rabbit_password,
|
password=FLAGS.rabbit_password,
|
||||||
virtual_host=FLAGS.rabbit_virtual_host)
|
virtual_host=FLAGS.rabbit_virtual_host)
|
||||||
|
|||||||
@@ -226,7 +226,7 @@ class JsonFilter(HostFilter):
|
|||||||
required_disk = instance_type['local_gb']
|
required_disk = instance_type['local_gb']
|
||||||
query = ['and',
|
query = ['and',
|
||||||
['>=', '$compute.host_memory_free', required_ram],
|
['>=', '$compute.host_memory_free', required_ram],
|
||||||
['>=', '$compute.disk_available', required_disk]
|
['>=', '$compute.disk_available', required_disk],
|
||||||
]
|
]
|
||||||
return (self._full_name(), json.dumps(query))
|
return (self._full_name(), json.dumps(query))
|
||||||
|
|
||||||
|
|||||||
@@ -133,13 +133,14 @@ class HostFilterTestCase(test.TestCase):
|
|||||||
raw = ['or',
|
raw = ['or',
|
||||||
['and',
|
['and',
|
||||||
['<', '$compute.host_memory_free', 30],
|
['<', '$compute.host_memory_free', 30],
|
||||||
['<', '$compute.disk_available', 300]
|
['<', '$compute.disk_available', 300],
|
||||||
],
|
],
|
||||||
['and',
|
['and',
|
||||||
['>', '$compute.host_memory_free', 70],
|
['>', '$compute.host_memory_free', 70],
|
||||||
['>', '$compute.disk_available', 700]
|
['>', '$compute.disk_available', 700],
|
||||||
]
|
],
|
||||||
]
|
]
|
||||||
|
|
||||||
cooked = json.dumps(raw)
|
cooked = json.dumps(raw)
|
||||||
hosts = hf.filter_hosts(self.zone_manager, cooked)
|
hosts = hf.filter_hosts(self.zone_manager, cooked)
|
||||||
|
|
||||||
@@ -183,13 +184,11 @@ class HostFilterTestCase(test.TestCase):
|
|||||||
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps([])))
|
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps([])))
|
||||||
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps({})))
|
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps({})))
|
||||||
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps(
|
self.assertTrue(hf.filter_hosts(self.zone_manager, json.dumps(
|
||||||
['not', True, False, True, False]
|
['not', True, False, True, False])))
|
||||||
)))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
hf.filter_hosts(self.zone_manager, json.dumps(
|
hf.filter_hosts(self.zone_manager, json.dumps(
|
||||||
'not', True, False, True, False
|
'not', True, False, True, False))
|
||||||
))
|
|
||||||
self.fail("Should give KeyError")
|
self.fail("Should give KeyError")
|
||||||
except KeyError, e:
|
except KeyError, e:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import eventlet
|
|||||||
import mox
|
import mox
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from xml.etree.ElementTree import fromstring as xml_to_tree
|
from xml.etree.ElementTree import fromstring as xml_to_tree
|
||||||
@@ -645,6 +646,8 @@ class LibvirtConnTestCase(test.TestCase):
|
|||||||
except Exception, e:
|
except Exception, e:
|
||||||
count = (0 <= str(e.message).find('Unexpected method call'))
|
count = (0 <= str(e.message).find('Unexpected method call'))
|
||||||
|
|
||||||
|
shutil.rmtree(os.path.join(FLAGS.instances_path, instance.name))
|
||||||
|
|
||||||
self.assertTrue(count)
|
self.assertTrue(count)
|
||||||
|
|
||||||
def test_get_host_ip_addr(self):
|
def test_get_host_ip_addr(self):
|
||||||
|
|||||||
@@ -38,16 +38,16 @@ class FakeZoneAwareScheduler(zone_aware_scheduler.ZoneAwareScheduler):
|
|||||||
class FakeZoneManager(zone_manager.ZoneManager):
|
class FakeZoneManager(zone_manager.ZoneManager):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.service_states = {
|
self.service_states = {
|
||||||
'host1': {
|
'host1': {
|
||||||
'compute': {'ram': 1000}
|
'compute': {'ram': 1000},
|
||||||
},
|
},
|
||||||
'host2': {
|
'host2': {
|
||||||
'compute': {'ram': 2000}
|
'compute': {'ram': 2000},
|
||||||
},
|
},
|
||||||
'host3': {
|
'host3': {
|
||||||
'compute': {'ram': 3000}
|
'compute': {'ram': 3000},
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FakeEmptyZoneManager(zone_manager.ZoneManager):
|
class FakeEmptyZoneManager(zone_manager.ZoneManager):
|
||||||
|
|||||||
252
nova/wsgi.py
252
nova/wsgi.py
@@ -85,36 +85,7 @@ class Server(object):
|
|||||||
|
|
||||||
|
|
||||||
class Request(webob.Request):
|
class Request(webob.Request):
|
||||||
|
pass
|
||||||
def best_match_content_type(self):
|
|
||||||
"""Determine the most acceptable content-type.
|
|
||||||
|
|
||||||
Based on the query extension then the Accept header.
|
|
||||||
|
|
||||||
"""
|
|
||||||
parts = self.path.rsplit('.', 1)
|
|
||||||
|
|
||||||
if len(parts) > 1:
|
|
||||||
format = parts[1]
|
|
||||||
if format in ['json', 'xml']:
|
|
||||||
return 'application/{0}'.format(parts[1])
|
|
||||||
|
|
||||||
ctypes = ['application/json', 'application/xml']
|
|
||||||
bm = self.accept.best_match(ctypes)
|
|
||||||
|
|
||||||
return bm or 'application/json'
|
|
||||||
|
|
||||||
def get_content_type(self):
|
|
||||||
allowed_types = ("application/xml", "application/json")
|
|
||||||
if not "Content-Type" in self.headers:
|
|
||||||
msg = _("Missing Content-Type")
|
|
||||||
LOG.debug(msg)
|
|
||||||
raise webob.exc.HTTPBadRequest(msg)
|
|
||||||
type = self.content_type
|
|
||||||
if type in allowed_types:
|
|
||||||
return type
|
|
||||||
LOG.debug(_("Wrong Content-Type: %s") % type)
|
|
||||||
raise webob.exc.HTTPBadRequest("Invalid content type")
|
|
||||||
|
|
||||||
|
|
||||||
class Application(object):
|
class Application(object):
|
||||||
@@ -289,8 +260,8 @@ class Router(object):
|
|||||||
|
|
||||||
Each route in `mapper` must specify a 'controller', which is a
|
Each route in `mapper` must specify a 'controller', which is a
|
||||||
WSGI app to call. You'll probably want to specify an 'action' as
|
WSGI app to call. You'll probably want to specify an 'action' as
|
||||||
well and have your controller be a wsgi.Controller, who will route
|
well and have your controller be an object that can route
|
||||||
the request to the action method.
|
the request to the action-specific method.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
mapper = routes.Mapper()
|
mapper = routes.Mapper()
|
||||||
@@ -338,223 +309,6 @@ class Router(object):
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
class Controller(object):
|
|
||||||
"""WSGI app that dispatched to methods.
|
|
||||||
|
|
||||||
WSGI app that reads routing information supplied by RoutesMiddleware
|
|
||||||
and calls the requested action method upon itself. All action methods
|
|
||||||
must, in addition to their normal parameters, accept a 'req' argument
|
|
||||||
which is the incoming wsgi.Request. They raise a webob.exc exception,
|
|
||||||
or return a dict which will be serialized by requested content type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
@webob.dec.wsgify(RequestClass=Request)
|
|
||||||
def __call__(self, req):
|
|
||||||
"""Call the method specified in req.environ by RoutesMiddleware."""
|
|
||||||
arg_dict = req.environ['wsgiorg.routing_args'][1]
|
|
||||||
action = arg_dict['action']
|
|
||||||
method = getattr(self, action)
|
|
||||||
LOG.debug("%s %s" % (req.method, req.url))
|
|
||||||
del arg_dict['controller']
|
|
||||||
del arg_dict['action']
|
|
||||||
if 'format' in arg_dict:
|
|
||||||
del arg_dict['format']
|
|
||||||
arg_dict['req'] = req
|
|
||||||
result = method(**arg_dict)
|
|
||||||
|
|
||||||
if type(result) is dict:
|
|
||||||
content_type = req.best_match_content_type()
|
|
||||||
default_xmlns = self.get_default_xmlns(req)
|
|
||||||
body = self._serialize(result, content_type, default_xmlns)
|
|
||||||
|
|
||||||
response = webob.Response()
|
|
||||||
response.headers['Content-Type'] = content_type
|
|
||||||
response.body = body
|
|
||||||
msg_dict = dict(url=req.url, status=response.status_int)
|
|
||||||
msg = _("%(url)s returned with HTTP %(status)d") % msg_dict
|
|
||||||
LOG.debug(msg)
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _serialize(self, data, content_type, default_xmlns):
|
|
||||||
"""Serialize the given dict to the provided content_type.
|
|
||||||
|
|
||||||
Uses self._serialization_metadata if it exists, which is a dict mapping
|
|
||||||
MIME types to information needed to serialize to that type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
_metadata = getattr(type(self), '_serialization_metadata', {})
|
|
||||||
|
|
||||||
serializer = Serializer(_metadata, default_xmlns)
|
|
||||||
try:
|
|
||||||
return serializer.serialize(data, content_type)
|
|
||||||
except exception.InvalidContentType:
|
|
||||||
raise webob.exc.HTTPNotAcceptable()
|
|
||||||
|
|
||||||
def _deserialize(self, data, content_type):
|
|
||||||
"""Deserialize the request body to the specefied content type.
|
|
||||||
|
|
||||||
Uses self._serialization_metadata if it exists, which is a dict mapping
|
|
||||||
MIME types to information needed to serialize to that type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
_metadata = getattr(type(self), '_serialization_metadata', {})
|
|
||||||
serializer = Serializer(_metadata)
|
|
||||||
return serializer.deserialize(data, content_type)
|
|
||||||
|
|
||||||
def get_default_xmlns(self, req):
|
|
||||||
"""Provide the XML namespace to use if none is otherwise specified."""
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Serializer(object):
|
|
||||||
"""Serializes and deserializes dictionaries to certain MIME types."""
|
|
||||||
|
|
||||||
def __init__(self, metadata=None, default_xmlns=None):
|
|
||||||
"""Create a serializer based on the given WSGI environment.
|
|
||||||
|
|
||||||
'metadata' is an optional dict mapping MIME types to information
|
|
||||||
needed to serialize a dictionary to that type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.metadata = metadata or {}
|
|
||||||
self.default_xmlns = default_xmlns
|
|
||||||
|
|
||||||
def _get_serialize_handler(self, content_type):
|
|
||||||
handlers = {
|
|
||||||
'application/json': self._to_json,
|
|
||||||
'application/xml': self._to_xml,
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
return handlers[content_type]
|
|
||||||
except Exception:
|
|
||||||
raise exception.InvalidContentType(content_type=content_type)
|
|
||||||
|
|
||||||
def serialize(self, data, content_type):
|
|
||||||
"""Serialize a dictionary into the specified content type."""
|
|
||||||
return self._get_serialize_handler(content_type)(data)
|
|
||||||
|
|
||||||
def deserialize(self, datastring, content_type):
|
|
||||||
"""Deserialize a string to a dictionary.
|
|
||||||
|
|
||||||
The string must be in the format of a supported MIME type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self.get_deserialize_handler(content_type)(datastring)
|
|
||||||
|
|
||||||
def get_deserialize_handler(self, content_type):
|
|
||||||
handlers = {
|
|
||||||
'application/json': self._from_json,
|
|
||||||
'application/xml': self._from_xml,
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
return handlers[content_type]
|
|
||||||
except Exception:
|
|
||||||
raise exception.InvalidContentType(content_type=content_type)
|
|
||||||
|
|
||||||
def _from_json(self, datastring):
|
|
||||||
return utils.loads(datastring)
|
|
||||||
|
|
||||||
def _from_xml(self, datastring):
|
|
||||||
xmldata = self.metadata.get('application/xml', {})
|
|
||||||
plurals = set(xmldata.get('plurals', {}))
|
|
||||||
node = minidom.parseString(datastring).childNodes[0]
|
|
||||||
return {node.nodeName: self._from_xml_node(node, plurals)}
|
|
||||||
|
|
||||||
def _from_xml_node(self, node, listnames):
|
|
||||||
"""Convert a minidom node to a simple Python type.
|
|
||||||
|
|
||||||
listnames is a collection of names of XML nodes whose subnodes should
|
|
||||||
be considered list items.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
|
||||||
return node.childNodes[0].nodeValue
|
|
||||||
elif node.nodeName in listnames:
|
|
||||||
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
|
||||||
else:
|
|
||||||
result = dict()
|
|
||||||
for attr in node.attributes.keys():
|
|
||||||
result[attr] = node.attributes[attr].nodeValue
|
|
||||||
for child in node.childNodes:
|
|
||||||
if child.nodeType != node.TEXT_NODE:
|
|
||||||
result[child.nodeName] = self._from_xml_node(child,
|
|
||||||
listnames)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _to_json(self, data):
|
|
||||||
return utils.dumps(data)
|
|
||||||
|
|
||||||
def _to_xml(self, data):
|
|
||||||
metadata = self.metadata.get('application/xml', {})
|
|
||||||
# We expect data to contain a single key which is the XML root.
|
|
||||||
root_key = data.keys()[0]
|
|
||||||
doc = minidom.Document()
|
|
||||||
node = self._to_xml_node(doc, metadata, root_key, data[root_key])
|
|
||||||
|
|
||||||
xmlns = node.getAttribute('xmlns')
|
|
||||||
if not xmlns and self.default_xmlns:
|
|
||||||
node.setAttribute('xmlns', self.default_xmlns)
|
|
||||||
|
|
||||||
return node.toprettyxml(indent=' ')
|
|
||||||
|
|
||||||
def _to_xml_node(self, doc, metadata, nodename, data):
|
|
||||||
"""Recursive method to convert data members to XML nodes."""
|
|
||||||
result = doc.createElement(nodename)
|
|
||||||
|
|
||||||
# Set the xml namespace if one is specified
|
|
||||||
# TODO(justinsb): We could also use prefixes on the keys
|
|
||||||
xmlns = metadata.get('xmlns', None)
|
|
||||||
if xmlns:
|
|
||||||
result.setAttribute('xmlns', xmlns)
|
|
||||||
|
|
||||||
if type(data) is list:
|
|
||||||
collections = metadata.get('list_collections', {})
|
|
||||||
if nodename in collections:
|
|
||||||
metadata = collections[nodename]
|
|
||||||
for item in data:
|
|
||||||
node = doc.createElement(metadata['item_name'])
|
|
||||||
node.setAttribute(metadata['item_key'], str(item))
|
|
||||||
result.appendChild(node)
|
|
||||||
return result
|
|
||||||
singular = metadata.get('plurals', {}).get(nodename, None)
|
|
||||||
if singular is None:
|
|
||||||
if nodename.endswith('s'):
|
|
||||||
singular = nodename[:-1]
|
|
||||||
else:
|
|
||||||
singular = 'item'
|
|
||||||
for item in data:
|
|
||||||
node = self._to_xml_node(doc, metadata, singular, item)
|
|
||||||
result.appendChild(node)
|
|
||||||
elif type(data) is dict:
|
|
||||||
collections = metadata.get('dict_collections', {})
|
|
||||||
if nodename in collections:
|
|
||||||
metadata = collections[nodename]
|
|
||||||
for k, v in data.items():
|
|
||||||
node = doc.createElement(metadata['item_name'])
|
|
||||||
node.setAttribute(metadata['item_key'], str(k))
|
|
||||||
text = doc.createTextNode(str(v))
|
|
||||||
node.appendChild(text)
|
|
||||||
result.appendChild(node)
|
|
||||||
return result
|
|
||||||
attrs = metadata.get('attributes', {}).get(nodename, {})
|
|
||||||
for k, v in data.items():
|
|
||||||
if k in attrs:
|
|
||||||
result.setAttribute(k, str(v))
|
|
||||||
else:
|
|
||||||
node = self._to_xml_node(doc, metadata, k, v)
|
|
||||||
result.appendChild(node)
|
|
||||||
else:
|
|
||||||
# Type is atom
|
|
||||||
node = doc.createTextNode(str(data))
|
|
||||||
result.appendChild(node)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def paste_config_file(basename):
|
def paste_config_file(basename):
|
||||||
"""Find the best location in the system for a paste config file.
|
"""Find the best location in the system for a paste config file.
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user