Merged trunk

This commit is contained in:
Chuck Short
2011-03-09 21:33:01 -05:00
10 changed files with 278 additions and 75 deletions

View File

@@ -545,6 +545,15 @@ class NetworkCommands(object):
network.dhcp_start,
network.dns)
def delete(self, fixed_range):
"""Deletes a network"""
network = db.network_get_by_cidr(context.get_admin_context(), \
fixed_range)
if network.project_id is not None:
raise ValueError(_('Network must be disassociated from project %s'
' before delete' % network.project_id))
db.network_delete_safe(context.get_admin_context(), network.id)
class ServiceCommands(object):
"""Enable and disable running services"""

View File

@@ -8,5 +8,6 @@ from nova import utils
def setup(app):
rootdir = os.path.abspath(app.srcdir + '/..')
print "**Autodocumenting from %s" % rootdir
rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir)
os.chdir(rootdir)
rv = utils.execute('./generate_autodoc_index.sh')
print rv[0]

View File

@@ -88,6 +88,10 @@ class InvalidInputException(Error):
pass
class InvalidContentType(Error):
pass
class TimeoutException(Error):
pass

View File

@@ -321,6 +321,8 @@ DEFINE_integer('auth_token_ttl', 3600, 'Seconds for auth tokens to linger')
DEFINE_string('state_path', os.path.join(os.path.dirname(__file__), '../'),
"Top-level directory for maintaining nova's state")
DEFINE_string('lock_path', os.path.join(os.path.dirname(__file__), '../'),
"Directory for lock files")
DEFINE_string('logdir', None, 'output to a per-service log file in named '
'directory')

View File

@@ -59,6 +59,7 @@ class DirectTestCase(test.TestCase):
req.headers['X-OpenStack-User'] = 'user1'
req.headers['X-OpenStack-Project'] = 'proj1'
resp = req.get_response(self.auth_router)
self.assertEqual(resp.status_int, 200)
data = json.loads(resp.body)
self.assertEqual(data['user'], 'user1')
self.assertEqual(data['project'], 'proj1')
@@ -69,6 +70,7 @@ class DirectTestCase(test.TestCase):
req.method = 'POST'
req.body = 'json=%s' % json.dumps({'data': 'foo'})
resp = req.get_response(self.router)
self.assertEqual(resp.status_int, 200)
resp_parsed = json.loads(resp.body)
self.assertEqual(resp_parsed['data'], 'foo')
@@ -78,6 +80,7 @@ class DirectTestCase(test.TestCase):
req.method = 'POST'
req.body = 'data=foo'
resp = req.get_response(self.router)
self.assertEqual(resp.status_int, 200)
resp_parsed = json.loads(resp.body)
self.assertEqual(resp_parsed['data'], 'foo')

View File

@@ -14,10 +14,12 @@
# License for the specific language governing permissions and limitations
# under the License.
import errno
import os
import select
from nova import test
from nova.utils import parse_mailmap, str_dict_replace
from nova.utils import parse_mailmap, str_dict_replace, synchronized
class ProjectTestCase(test.TestCase):
@@ -55,3 +57,47 @@ class ProjectTestCase(test.TestCase):
'%r not listed in Authors' % missing)
finally:
tree.unlock()
class LockTestCase(test.TestCase):
def test_synchronized_wrapped_function_metadata(self):
@synchronized('whatever')
def foo():
"""Bar"""
pass
self.assertEquals(foo.__doc__, 'Bar', "Wrapped function's docstring "
"got lost")
self.assertEquals(foo.__name__, 'foo', "Wrapped function's name "
"got mangled")
def test_synchronized(self):
rpipe1, wpipe1 = os.pipe()
rpipe2, wpipe2 = os.pipe()
@synchronized('testlock')
def f(rpipe, wpipe):
try:
os.write(wpipe, "foo")
except OSError, e:
self.assertEquals(e.errno, errno.EPIPE)
return
rfds, _, __ = select.select([rpipe], [], [], 1)
self.assertEquals(len(rfds), 0, "The other process, which was"
" supposed to be locked, "
"wrote on its end of the "
"pipe")
os.close(rpipe)
pid = os.fork()
if pid > 0:
os.close(wpipe1)
os.close(rpipe2)
f(rpipe1, wpipe2)
else:
os.close(rpipe1)
os.close(wpipe2)
f(rpipe2, wpipe1)
os._exit(0)

View File

@@ -343,13 +343,13 @@ def lease_ip(private_ip):
private_ip)
instance_ref = db.fixed_ip_get_instance(context.get_admin_context(),
private_ip)
cmd = "%s add %s %s fake" % (binpath('nova-dhcpbridge'),
instance_ref['mac_address'],
private_ip)
cmd = (binpath('nova-dhcpbridge'), 'add',
instance_ref['mac_address'],
private_ip, 'fake')
env = {'DNSMASQ_INTERFACE': network_ref['bridge'],
'TESTING': '1',
'FLAGFILE': FLAGS.dhcpbridge_flagfile}
(out, err) = utils.execute(cmd, addl_env=env)
(out, err) = utils.execute(*cmd, addl_env=env)
LOG.debug("ISSUE_IP: %s, %s ", out, err)
@@ -359,11 +359,11 @@ def release_ip(private_ip):
private_ip)
instance_ref = db.fixed_ip_get_instance(context.get_admin_context(),
private_ip)
cmd = "%s del %s %s fake" % (binpath('nova-dhcpbridge'),
instance_ref['mac_address'],
private_ip)
cmd = (binpath('nova-dhcpbridge'), 'del',
instance_ref['mac_address'],
private_ip, 'fake')
env = {'DNSMASQ_INTERFACE': network_ref['bridge'],
'TESTING': '1',
'FLAGFILE': FLAGS.dhcpbridge_flagfile}
(out, err) = utils.execute(cmd, addl_env=env)
(out, err) = utils.execute(*cmd, addl_env=env)
LOG.debug("RELEASE_IP: %s, %s ", out, err)

View File

@@ -14,6 +14,9 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
import eventlet
from xml.etree.ElementTree import fromstring as xml_to_tree
from xml.dom.minidom import parseString as xml_to_dom
@@ -30,6 +33,70 @@ FLAGS = flags.FLAGS
flags.DECLARE('instances_path', 'nova.compute.manager')
def _concurrency(wait, done, target):
wait.wait()
done.send()
class CacheConcurrencyTestCase(test.TestCase):
def setUp(self):
super(CacheConcurrencyTestCase, self).setUp()
def fake_exists(fname):
basedir = os.path.join(FLAGS.instances_path, '_base')
if fname == basedir:
return True
return False
def fake_execute(*args, **kwargs):
pass
self.stubs.Set(os.path, 'exists', fake_exists)
self.stubs.Set(utils, 'execute', fake_execute)
def test_same_fname_concurrency(self):
"""Ensures that the same fname cache runs at a sequentially"""
conn = libvirt_conn.LibvirtConnection
wait1 = eventlet.event.Event()
done1 = eventlet.event.Event()
eventlet.spawn(conn._cache_image, _concurrency,
'target', 'fname', False, wait1, done1)
wait2 = eventlet.event.Event()
done2 = eventlet.event.Event()
eventlet.spawn(conn._cache_image, _concurrency,
'target', 'fname', False, wait2, done2)
wait2.send()
eventlet.sleep(0)
try:
self.assertFalse(done2.ready())
self.assertTrue('fname' in conn._image_sems)
finally:
wait1.send()
done1.wait()
eventlet.sleep(0)
self.assertTrue(done2.ready())
self.assertFalse('fname' in conn._image_sems)
def test_different_fname_concurrency(self):
"""Ensures that two different fname caches are concurrent"""
conn = libvirt_conn.LibvirtConnection
wait1 = eventlet.event.Event()
done1 = eventlet.event.Event()
eventlet.spawn(conn._cache_image, _concurrency,
'target', 'fname2', False, wait1, done1)
wait2 = eventlet.event.Event()
done2 = eventlet.event.Event()
eventlet.spawn(conn._cache_image, _concurrency,
'target', 'fname1', False, wait2, done2)
wait2.send()
eventlet.sleep(0)
try:
self.assertTrue(done2.ready())
finally:
wait1.send()
eventlet.sleep(0)
class LibvirtConnTestCase(test.TestCase):
def setUp(self):
super(LibvirtConnTestCase, self).setUp()
@@ -318,15 +385,16 @@ class IptablesFirewallTestCase(test.TestCase):
instance_ref = db.instance_get(admin_ctxt, instance_ref['id'])
# self.fw.add_instance(instance_ref)
def fake_iptables_execute(cmd, process_input=None):
if cmd == 'sudo ip6tables-save -t filter':
def fake_iptables_execute(*cmd, **kwargs):
process_input = kwargs.get('process_input', None)
if cmd == ('sudo', 'ip6tables-save', '-t', 'filter'):
return '\n'.join(self.in6_rules), None
if cmd == 'sudo iptables-save -t filter':
if cmd == ('sudo', 'iptables-save', '-t', 'filter'):
return '\n'.join(self.in_rules), None
if cmd == 'sudo iptables-restore':
if cmd == ('sudo', 'iptables-restore'):
self.out_rules = process_input.split('\n')
return '', ''
if cmd == 'sudo ip6tables-restore':
if cmd == ('sudo', 'ip6tables-restore'):
self.out6_rules = process_input.split('\n')
return '', ''
self.fw.execute = fake_iptables_execute

View File

@@ -23,10 +23,14 @@ System-level utilities and helper functions.
import base64
import datetime
import functools
import inspect
import json
import lockfile
import netaddr
import os
import random
import re
import socket
import string
import struct
@@ -34,20 +38,20 @@ import sys
import time
import types
from xml.sax import saxutils
import re
import netaddr
from eventlet import event
from eventlet import greenthread
from eventlet.green import subprocess
None
from nova import exception
from nova.exception import ProcessExecutionError
from nova import flags
from nova import log as logging
LOG = logging.getLogger("nova.utils")
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
FLAGS = flags.FLAGS
def import_class(import_str):
@@ -125,16 +129,24 @@ def fetchfile(url, target):
# c.perform()
# c.close()
# fp.close()
execute("curl --fail %s -o %s" % (url, target))
execute("curl", "--fail", url, "-o", target)
def execute(cmd, process_input=None, addl_env=None, check_exit_code=True):
LOG.debug(_("Running cmd (subprocess): %s"), cmd)
def execute(*cmd, **kwargs):
process_input = kwargs.get('process_input', None)
addl_env = kwargs.get('addl_env', None)
check_exit_code = kwargs.get('check_exit_code', 0)
stdin = kwargs.get('stdin', subprocess.PIPE)
stdout = kwargs.get('stdout', subprocess.PIPE)
stderr = kwargs.get('stderr', subprocess.PIPE)
cmd = map(str, cmd)
LOG.debug(_("Running cmd (subprocess): %s"), ' '.join(cmd))
env = os.environ.copy()
if addl_env:
env.update(addl_env)
obj = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
obj = subprocess.Popen(cmd, stdin=stdin,
stdout=stdout, stderr=stderr, env=env)
result = None
if process_input != None:
result = obj.communicate(process_input)
@@ -143,12 +155,13 @@ def execute(cmd, process_input=None, addl_env=None, check_exit_code=True):
obj.stdin.close()
if obj.returncode:
LOG.debug(_("Result was %s") % obj.returncode)
if check_exit_code and obj.returncode != 0:
if type(check_exit_code) == types.IntType \
and obj.returncode != check_exit_code:
(stdout, stderr) = result
raise ProcessExecutionError(exit_code=obj.returncode,
stdout=stdout,
stderr=stderr,
cmd=cmd)
cmd=' '.join(cmd))
# NOTE(termie): this appears to be necessary to let the subprocess call
# clean something up in between calls, without it two
# execute calls in a row hangs the second one
@@ -158,7 +171,7 @@ def execute(cmd, process_input=None, addl_env=None, check_exit_code=True):
def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True):
LOG.debug(_("Running cmd (SSH): %s"), cmd)
LOG.debug(_("Running cmd (SSH): %s"), ' '.join(cmd))
if addl_env:
raise exception.Error("Environment not supported over SSH")
@@ -187,7 +200,7 @@ def ssh_execute(ssh, cmd, process_input=None,
raise exception.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=cmd)
cmd=' '.join(cmd))
return (stdout, stderr)
@@ -220,9 +233,9 @@ def debug(arg):
return arg
def runthis(prompt, cmd, check_exit_code=True):
LOG.debug(_("Running %s"), (cmd))
rv, err = execute(cmd, check_exit_code=check_exit_code)
def runthis(prompt, *cmd, **kwargs):
LOG.debug(_("Running %s"), (" ".join(cmd)))
rv, err = execute(*cmd, **kwargs)
def generate_uid(topic, size=8):
@@ -254,7 +267,7 @@ def last_octet(address):
def get_my_linklocal(interface):
try:
if_str = execute("ip -f inet6 -o addr show %s" % interface)
if_str = execute("ip", "-f", "inet6", "-o", "addr", "show", interface)
condition = "\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link"
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
@@ -491,6 +504,18 @@ def loads(s):
return json.loads(s)
def synchronized(name):
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
lock = lockfile.FileLock(os.path.join(FLAGS.lock_path,
'nova-%s.lock' % name))
with lock:
return f(*args, **kwargs)
return inner
return wrap
def ensure_b64_encoding(val):
"""Safety method to ensure that values expected to be base64-encoded
actually are. If they are, the value is returned unchanged. Otherwise,

View File

@@ -36,6 +36,7 @@ import webob.exc
from paste import deploy
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
@@ -82,6 +83,35 @@ class Server(object):
log=WritableLogger(logger))
class Request(webob.Request):
def best_match_content_type(self):
"""
Determine the most acceptable content-type based on the
query extension then the Accept header
"""
parts = self.path.rsplit(".", 1)
if len(parts) > 1:
format = parts[1]
if format in ["json", "xml"]:
return "application/{0}".format(parts[1])
ctypes = ["application/json", "application/xml"]
bm = self.accept.best_match(ctypes)
return bm or "application/json"
def get_content_type(self):
try:
ct = self.headers["Content-Type"]
assert ct in ("application/xml", "application/json")
return ct
except Exception:
raise webob.exc.HTTPBadRequest("Invalid content type")
class Application(object):
"""Base WSGI application wrapper. Subclasses need to implement __call__."""
@@ -113,7 +143,7 @@ class Application(object):
def __call__(self, environ, start_response):
r"""Subclasses will probably want to implement __call__ like this:
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
# Any of the following objects work as responses:
@@ -199,7 +229,7 @@ class Middleware(Application):
"""Do whatever you'd like to the response."""
return response
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
response = self.process_request(req)
if response:
@@ -212,7 +242,7 @@ class Debug(Middleware):
"""Helper class that can be inserted into any WSGI application chain
to get information about the request and response."""
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
print ("*" * 40) + " REQUEST ENVIRON"
for key, value in req.environ.items():
@@ -276,7 +306,7 @@ class Router(object):
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
self.map)
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""
Route the incoming request to a controller based on self.map.
@@ -285,7 +315,7 @@ class Router(object):
return self._router
@staticmethod
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def _dispatch(req):
"""
Called by self._router after matching the incoming request to a route
@@ -304,11 +334,11 @@ class Controller(object):
WSGI app that reads routing information supplied by RoutesMiddleware
and calls the requested action method upon itself. All action methods
must, in addition to their normal parameters, accept a 'req' argument
which is the incoming webob.Request. They raise a webob.exc exception,
which is the incoming wsgi.Request. They raise a webob.exc exception,
or return a dict which will be serialized by requested content type.
"""
@webob.dec.wsgify
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""
Call the method specified in req.environ by RoutesMiddleware.
@@ -318,32 +348,45 @@ class Controller(object):
method = getattr(self, action)
del arg_dict['controller']
del arg_dict['action']
if 'format' in arg_dict:
del arg_dict['format']
arg_dict['req'] = req
result = method(**arg_dict)
if type(result) is dict:
return self._serialize(result, req)
content_type = req.best_match_content_type()
body = self._serialize(result, content_type)
response = webob.Response()
response.headers["Content-Type"] = content_type
response.body = body
return response
else:
return result
def _serialize(self, data, request):
def _serialize(self, data, content_type):
"""
Serialize the given dict to the response type requested in request.
Serialize the given dict to the provided content_type.
Uses self._serialization_metadata if it exists, which is a dict mapping
MIME types to information needed to serialize to that type.
"""
_metadata = getattr(type(self), "_serialization_metadata", {})
serializer = Serializer(request.environ, _metadata)
return serializer.to_content_type(data)
serializer = Serializer(_metadata)
try:
return serializer.serialize(data, content_type)
except exception.InvalidContentType:
raise webob.exc.HTTPNotAcceptable()
def _deserialize(self, data, request):
def _deserialize(self, data, content_type):
"""
Deserialize the request body to the response type requested in request.
Deserialize the request body to the specefied content type.
Uses self._serialization_metadata if it exists, which is a dict mapping
MIME types to information needed to serialize to that type.
"""
_metadata = getattr(type(self), "_serialization_metadata", {})
serializer = Serializer(request.environ, _metadata)
return serializer.deserialize(data)
serializer = Serializer(_metadata)
return serializer.deserialize(data, content_type)
class Serializer(object):
@@ -351,50 +394,52 @@ class Serializer(object):
Serializes and deserializes dictionaries to certain MIME types.
"""
def __init__(self, environ, metadata=None):
def __init__(self, metadata=None):
"""
Create a serializer based on the given WSGI environment.
'metadata' is an optional dict mapping MIME types to information
needed to serialize a dictionary to that type.
"""
self.metadata = metadata or {}
req = webob.Request.blank('', environ)
suffix = req.path_info.split('.')[-1].lower()
if suffix == 'json':
self.handler = self._to_json
elif suffix == 'xml':
self.handler = self._to_xml
elif 'application/json' in req.accept:
self.handler = self._to_json
elif 'application/xml' in req.accept:
self.handler = self._to_xml
else:
# This is the default
self.handler = self._to_json
def to_content_type(self, data):
def _get_serialize_handler(self, content_type):
handlers = {
"application/json": self._to_json,
"application/xml": self._to_xml,
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType()
def serialize(self, data, content_type):
"""
Serialize a dictionary into a string.
The format of the string will be decided based on the Content Type
requested in self.environ: by Accept: header, or by URL suffix.
Serialize a dictionary into a string of the specified content type.
"""
return self.handler(data)
return self._get_serialize_handler(content_type)(data)
def deserialize(self, datastring):
def deserialize(self, datastring, content_type):
"""
Deserialize a string to a dictionary.
The string must be in the format of a supported MIME type.
"""
datastring = datastring.strip()
return self.get_deserialize_handler(content_type)(datastring)
def get_deserialize_handler(self, content_type):
handlers = {
"application/json": self._from_json,
"application/xml": self._from_xml,
}
try:
is_xml = (datastring[0] == '<')
if not is_xml:
return utils.loads(datastring)
return self._from_xml(datastring)
except:
return None
return handlers[content_type]
except Exception:
raise exception.InvalidContentType()
def _from_json(self, datastring):
return utils.loads(datastring)
def _from_xml(self, datastring):
xmldata = self.metadata.get('application/xml', {})