Merge from trunk
This commit is contained in:
@@ -29,4 +29,4 @@ if __name__ == '__main__':
|
||||
twistd.serve(__file__)
|
||||
|
||||
if __name__ == '__builtin__':
|
||||
application = service.ComputeService.create()
|
||||
application = service.ComputeService.create() # pylint: disable-msg=C0103
|
||||
|
||||
@@ -40,29 +40,29 @@ from nova.network import service
|
||||
FLAGS = flags.FLAGS
|
||||
|
||||
|
||||
def add_lease(_mac, ip, _hostname, _interface):
|
||||
def add_lease(_mac, ip_address, _hostname, _interface):
|
||||
"""Set the IP that was assigned by the DHCP server."""
|
||||
if FLAGS.fake_rabbit:
|
||||
service.VlanNetworkService().lease_ip(ip)
|
||||
service.VlanNetworkService().lease_ip(ip_address)
|
||||
else:
|
||||
rpc.cast("%s.%s" % (FLAGS.network_topic, FLAGS.node_name),
|
||||
{"method": "lease_ip",
|
||||
"args": {"fixed_ip": ip}})
|
||||
"args": {"fixed_ip": ip_address}})
|
||||
|
||||
|
||||
def old_lease(_mac, _ip, _hostname, _interface):
|
||||
def old_lease(_mac, _ip_address, _hostname, _interface):
|
||||
"""Do nothing, just an old lease update."""
|
||||
logging.debug("Adopted old lease or got a change of mac/hostname")
|
||||
|
||||
|
||||
def del_lease(_mac, ip, _hostname, _interface):
|
||||
def del_lease(_mac, ip_address, _hostname, _interface):
|
||||
"""Called when a lease expires."""
|
||||
if FLAGS.fake_rabbit:
|
||||
service.VlanNetworkService().release_ip(ip)
|
||||
service.VlanNetworkService().release_ip(ip_address)
|
||||
else:
|
||||
rpc.cast("%s.%s" % (FLAGS.network_topic, FLAGS.node_name),
|
||||
{"method": "release_ip",
|
||||
"args": {"fixed_ip": ip}})
|
||||
"args": {"fixed_ip": ip_address}})
|
||||
|
||||
|
||||
def init_leases(interface):
|
||||
|
||||
@@ -35,12 +35,12 @@ from nova.objectstore import image
|
||||
|
||||
FLAGS = flags.FLAGS
|
||||
|
||||
api_url = 'https://imagestore.canonical.com/api/dashboard'
|
||||
API_URL = 'https://imagestore.canonical.com/api/dashboard'
|
||||
|
||||
|
||||
def get_images():
|
||||
"""Get a list of the images from the imagestore URL."""
|
||||
images = json.load(urllib2.urlopen(api_url))['images']
|
||||
images = json.load(urllib2.urlopen(API_URL))['images']
|
||||
images = [img for img in images if img['title'].find('amd64') > -1]
|
||||
return images
|
||||
|
||||
@@ -56,21 +56,21 @@ def download(img):
|
||||
for f in img['files']:
|
||||
if f['kind'] == 'kernel':
|
||||
dest = os.path.join(tempdir, 'kernel')
|
||||
subprocess.call(['curl', f['url'], '-o', dest])
|
||||
subprocess.call(['curl', '--fail', f['url'], '-o', dest])
|
||||
kernel_id = image.Image.add(dest,
|
||||
description='kernel/' + img['title'], kernel=True)
|
||||
|
||||
for f in img['files']:
|
||||
if f['kind'] == 'ramdisk':
|
||||
dest = os.path.join(tempdir, 'ramdisk')
|
||||
subprocess.call(['curl', f['url'], '-o', dest])
|
||||
subprocess.call(['curl', '--fail', f['url'], '-o', dest])
|
||||
ramdisk_id = image.Image.add(dest,
|
||||
description='ramdisk/' + img['title'], ramdisk=True)
|
||||
|
||||
for f in img['files']:
|
||||
if f['kind'] == 'image':
|
||||
dest = os.path.join(tempdir, 'image')
|
||||
subprocess.call(['curl', f['url'], '-o', dest])
|
||||
subprocess.call(['curl', '--fail', f['url'], '-o', dest])
|
||||
ramdisk_id = image.Image.add(dest,
|
||||
description=img['title'], kernel=kernel_id, ramdisk=ramdisk_id)
|
||||
|
||||
|
||||
@@ -35,9 +35,10 @@ if __name__ == '__main__':
|
||||
|
||||
if __name__ == '__builtin__':
|
||||
logging.warn('Starting instance monitor')
|
||||
m = monitor.InstanceMonitor()
|
||||
# pylint: disable-msg=C0103
|
||||
monitor = monitor.InstanceMonitor()
|
||||
|
||||
# This is the parent service that twistd will be looking for when it
|
||||
# parses this file, return it so that we can get it into globals below
|
||||
application = service.Application('nova-instancemonitor')
|
||||
m.setServiceParent(application)
|
||||
monitor.setServiceParent(application)
|
||||
|
||||
@@ -56,7 +56,8 @@ class VpnCommands(object):
|
||||
vpn = self._vpn_for(project.id)
|
||||
if vpn:
|
||||
command = "ping -c1 -w1 %s > /dev/null; echo $?"
|
||||
out, _err = utils.execute(command % vpn['private_dns_name'])
|
||||
out, _err = utils.execute( command % vpn['private_dns_name'],
|
||||
check_exit_code=False)
|
||||
if out.strip() == '0':
|
||||
net = 'up'
|
||||
else:
|
||||
@@ -211,7 +212,7 @@ class ProjectCommands(object):
|
||||
f.write(zip_file)
|
||||
|
||||
|
||||
categories = [
|
||||
CATEGORIES = [
|
||||
('user', UserCommands),
|
||||
('project', ProjectCommands),
|
||||
('role', RoleCommands),
|
||||
@@ -258,11 +259,11 @@ def main():
|
||||
if len(argv) < 1:
|
||||
print script_name + " category action [<args>]"
|
||||
print "Available categories:"
|
||||
for k, _ in categories:
|
||||
for k, _ in CATEGORIES:
|
||||
print "\t%s" % k
|
||||
sys.exit(2)
|
||||
category = argv.pop(0)
|
||||
matches = lazy_match(category, categories)
|
||||
matches = lazy_match(category, CATEGORIES)
|
||||
# instantiate the command group object
|
||||
category, fn = matches[0]
|
||||
command_object = fn()
|
||||
|
||||
@@ -33,4 +33,5 @@ if __name__ == '__main__':
|
||||
twistd.serve(__file__)
|
||||
|
||||
if __name__ == '__builtin__':
|
||||
# pylint: disable-msg=C0103
|
||||
application = service.type_to_class(FLAGS.network_type).create()
|
||||
|
||||
@@ -35,4 +35,4 @@ if __name__ == '__main__':
|
||||
|
||||
if __name__ == '__builtin__':
|
||||
utils.default_flagfile()
|
||||
application = handler.get_application()
|
||||
application = handler.get_application() # pylint: disable-msg=C0103
|
||||
|
||||
@@ -29,4 +29,4 @@ if __name__ == '__main__':
|
||||
twistd.serve(__file__)
|
||||
|
||||
if __name__ == '__builtin__':
|
||||
application = service.VolumeService.create()
|
||||
application = service.VolumeService.create() # pylint: disable-msg=C0103
|
||||
|
||||
@@ -30,20 +30,23 @@ from nova import datastore
|
||||
|
||||
SCOPE_BASE = 0
|
||||
SCOPE_ONELEVEL = 1 # not implemented
|
||||
SCOPE_SUBTREE = 2
|
||||
SCOPE_SUBTREE = 2
|
||||
MOD_ADD = 0
|
||||
MOD_DELETE = 1
|
||||
|
||||
|
||||
class NO_SUCH_OBJECT(Exception):
|
||||
class NO_SUCH_OBJECT(Exception): # pylint: disable-msg=C0103
|
||||
"""Duplicate exception class from real LDAP module."""
|
||||
pass
|
||||
|
||||
|
||||
class OBJECT_CLASS_VIOLATION(Exception):
|
||||
class OBJECT_CLASS_VIOLATION(Exception): # pylint: disable-msg=C0103
|
||||
"""Duplicate exception class from real LDAP module."""
|
||||
pass
|
||||
|
||||
|
||||
def initialize(uri):
|
||||
def initialize(_uri):
|
||||
"""Opens a fake connection with an LDAP server."""
|
||||
return FakeLDAP()
|
||||
|
||||
|
||||
@@ -68,7 +71,7 @@ def _match_query(query, attrs):
|
||||
# cut off the ! and the nested parentheses
|
||||
return not _match_query(query[2:-1], attrs)
|
||||
|
||||
(k, sep, v) = inner.partition('=')
|
||||
(k, _sep, v) = inner.partition('=')
|
||||
return _match(k, v, attrs)
|
||||
|
||||
|
||||
@@ -85,20 +88,20 @@ def _paren_groups(source):
|
||||
if source[pos] == ')':
|
||||
count -= 1
|
||||
if count == 0:
|
||||
result.append(source[start:pos+1])
|
||||
result.append(source[start:pos + 1])
|
||||
return result
|
||||
|
||||
|
||||
def _match(k, v, attrs):
|
||||
def _match(key, value, attrs):
|
||||
"""Match a given key and value against an attribute list."""
|
||||
if k not in attrs:
|
||||
if key not in attrs:
|
||||
return False
|
||||
if k != "objectclass":
|
||||
return v in attrs[k]
|
||||
if key != "objectclass":
|
||||
return value in attrs[key]
|
||||
# it is an objectclass check, so check subclasses
|
||||
values = _subs(v)
|
||||
for value in values:
|
||||
if value in attrs[k]:
|
||||
values = _subs(value)
|
||||
for v in values:
|
||||
if v in attrs[key]:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -145,6 +148,7 @@ def _to_json(unencoded):
|
||||
class FakeLDAP(object):
|
||||
#TODO(vish): refactor this class to use a wrapper instead of accessing
|
||||
# redis directly
|
||||
"""Fake LDAP connection."""
|
||||
|
||||
def simple_bind_s(self, dn, password):
|
||||
"""This method is ignored, but provided for compatibility."""
|
||||
@@ -207,6 +211,7 @@ class FakeLDAP(object):
|
||||
# get the attributes from redis
|
||||
attrs = redis.hgetall(key)
|
||||
# turn the values from redis into lists
|
||||
# pylint: disable-msg=E1103
|
||||
attrs = dict([(k, _from_json(v))
|
||||
for k, v in attrs.iteritems()])
|
||||
# filter the objects by query
|
||||
@@ -215,12 +220,12 @@ class FakeLDAP(object):
|
||||
attrs = dict([(k, v) for k, v in attrs.iteritems()
|
||||
if not fields or k in fields])
|
||||
objects.append((key[len(self.__redis_prefix):], attrs))
|
||||
# pylint: enable-msg=E1103
|
||||
if objects == []:
|
||||
raise NO_SUCH_OBJECT()
|
||||
return objects
|
||||
|
||||
@property
|
||||
def __redis_prefix(self):
|
||||
def __redis_prefix(self): # pylint: disable-msg=R0201
|
||||
"""Get the prefix to use for all redis keys."""
|
||||
return 'ldap:'
|
||||
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ from nova import flags
|
||||
FLAGS = flags.FLAGS
|
||||
flags.DEFINE_string('ldap_url', 'ldap://localhost',
|
||||
'Point this at your ldap server')
|
||||
flags.DEFINE_string('ldap_password', 'changeme', 'LDAP password')
|
||||
flags.DEFINE_string('ldap_password', 'changeme', 'LDAP password')
|
||||
flags.DEFINE_string('ldap_user_dn', 'cn=Manager,dc=example,dc=com',
|
||||
'DN of admin user')
|
||||
flags.DEFINE_string('ldap_user_unit', 'Users', 'OID for Users')
|
||||
@@ -63,14 +63,18 @@ flags.DEFINE_string('ldap_developer',
|
||||
# to define a set interface for AuthDrivers. I'm delaying
|
||||
# creating this now because I'm expecting an auth refactor
|
||||
# in which we may want to change the interface a bit more.
|
||||
|
||||
|
||||
class LdapDriver(object):
|
||||
"""Ldap Auth driver
|
||||
|
||||
Defines enter and exit and therefore supports the with/as syntax.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Imports the LDAP module"""
|
||||
self.ldap = __import__('ldap')
|
||||
self.conn = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Creates the connection to LDAP"""
|
||||
@@ -78,7 +82,7 @@ class LdapDriver(object):
|
||||
self.conn.simple_bind_s(FLAGS.ldap_user_dn, FLAGS.ldap_password)
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
"""Destroys the connection to LDAP"""
|
||||
self.conn.unbind_s()
|
||||
return False
|
||||
@@ -123,11 +127,11 @@ class LdapDriver(object):
|
||||
|
||||
def get_projects(self, uid=None):
|
||||
"""Retrieve list of projects"""
|
||||
filter = '(objectclass=novaProject)'
|
||||
pattern = '(objectclass=novaProject)'
|
||||
if uid:
|
||||
filter = "(&%s(member=%s))" % (filter, self.__uid_to_dn(uid))
|
||||
pattern = "(&%s(member=%s))" % (pattern, self.__uid_to_dn(uid))
|
||||
attrs = self.__find_objects(FLAGS.ldap_project_subtree,
|
||||
filter)
|
||||
pattern)
|
||||
return [self.__to_project(attr) for attr in attrs]
|
||||
|
||||
def create_user(self, name, access_key, secret_key, is_admin):
|
||||
@@ -194,8 +198,7 @@ class LdapDriver(object):
|
||||
('cn', [name]),
|
||||
('description', [description]),
|
||||
('projectManager', [manager_dn]),
|
||||
('member', members)
|
||||
]
|
||||
('member', members)]
|
||||
self.conn.add_s('cn=%s,%s' % (name, FLAGS.ldap_project_subtree), attr)
|
||||
return self.__to_project(dict(attr))
|
||||
|
||||
@@ -287,7 +290,6 @@ class LdapDriver(object):
|
||||
|
||||
def __key_pair_exists(self, uid, key_name):
|
||||
"""Check if key pair exists"""
|
||||
return self.get_user(uid) != None
|
||||
return self.get_key_pair(uid, key_name) != None
|
||||
|
||||
def __project_exists(self, project_id):
|
||||
@@ -310,7 +312,7 @@ class LdapDriver(object):
|
||||
except self.ldap.NO_SUCH_OBJECT:
|
||||
return []
|
||||
# just return the DNs
|
||||
return [dn for dn, attributes in res]
|
||||
return [dn for dn, _attributes in res]
|
||||
|
||||
def __find_objects(self, dn, query=None, scope=None):
|
||||
"""Find objects by query"""
|
||||
@@ -346,7 +348,8 @@ class LdapDriver(object):
|
||||
for key in keys:
|
||||
self.delete_key_pair(uid, key['name'])
|
||||
|
||||
def __role_to_dn(self, role, project_id=None):
|
||||
@staticmethod
|
||||
def __role_to_dn(role, project_id=None):
|
||||
"""Convert role to corresponding dn"""
|
||||
if project_id == None:
|
||||
return FLAGS.__getitem__("ldap_%s" % role).value
|
||||
@@ -356,7 +359,7 @@ class LdapDriver(object):
|
||||
FLAGS.ldap_project_subtree)
|
||||
|
||||
def __create_group(self, group_dn, name, uid,
|
||||
description, member_uids = None):
|
||||
description, member_uids=None):
|
||||
"""Create a group"""
|
||||
if self.__group_exists(group_dn):
|
||||
raise exception.Duplicate("Group can't be created because "
|
||||
@@ -375,8 +378,7 @@ class LdapDriver(object):
|
||||
('objectclass', ['groupOfNames']),
|
||||
('cn', [name]),
|
||||
('description', [description]),
|
||||
('member', members)
|
||||
]
|
||||
('member', members)]
|
||||
self.conn.add_s(group_dn, attr)
|
||||
|
||||
def __is_in_group(self, uid, group_dn):
|
||||
@@ -402,9 +404,7 @@ class LdapDriver(object):
|
||||
if self.__is_in_group(uid, group_dn):
|
||||
raise exception.Duplicate("User %s is already a member of "
|
||||
"the group %s" % (uid, group_dn))
|
||||
attr = [
|
||||
(self.ldap.MOD_ADD, 'member', self.__uid_to_dn(uid))
|
||||
]
|
||||
attr = [(self.ldap.MOD_ADD, 'member', self.__uid_to_dn(uid))]
|
||||
self.conn.modify_s(group_dn, attr)
|
||||
|
||||
def __remove_from_group(self, uid, group_dn):
|
||||
@@ -432,7 +432,7 @@ class LdapDriver(object):
|
||||
self.conn.modify_s(group_dn, attr)
|
||||
except self.ldap.OBJECT_CLASS_VIOLATION:
|
||||
logging.debug("Attempted to remove the last member of a group. "
|
||||
"Deleting the group at %s instead." % group_dn )
|
||||
"Deleting the group at %s instead.", group_dn)
|
||||
self.__delete_group(group_dn)
|
||||
|
||||
def __remove_from_all(self, uid):
|
||||
@@ -440,7 +440,6 @@ class LdapDriver(object):
|
||||
if not self.__user_exists(uid):
|
||||
raise exception.NotFound("User %s can't be removed from all "
|
||||
"because the user doesn't exist" % (uid,))
|
||||
dn = self.__uid_to_dn(uid)
|
||||
role_dns = self.__find_group_dns_with_member(
|
||||
FLAGS.role_project_subtree, uid)
|
||||
for role_dn in role_dns:
|
||||
@@ -448,7 +447,7 @@ class LdapDriver(object):
|
||||
project_dns = self.__find_group_dns_with_member(
|
||||
FLAGS.ldap_project_subtree, uid)
|
||||
for project_dn in project_dns:
|
||||
self.__safe_remove_from_group(uid, role_dn)
|
||||
self.__safe_remove_from_group(uid, project_dn)
|
||||
|
||||
def __delete_group(self, group_dn):
|
||||
"""Delete Group"""
|
||||
@@ -461,7 +460,8 @@ class LdapDriver(object):
|
||||
for role_dn in self.__find_role_dns(project_dn):
|
||||
self.__delete_group(role_dn)
|
||||
|
||||
def __to_user(self, attr):
|
||||
@staticmethod
|
||||
def __to_user(attr):
|
||||
"""Convert ldap attributes to User object"""
|
||||
if attr == None:
|
||||
return None
|
||||
@@ -470,10 +470,10 @@ class LdapDriver(object):
|
||||
'name': attr['cn'][0],
|
||||
'access': attr['accessKey'][0],
|
||||
'secret': attr['secretKey'][0],
|
||||
'admin': (attr['isAdmin'][0] == 'TRUE')
|
||||
}
|
||||
'admin': (attr['isAdmin'][0] == 'TRUE')}
|
||||
|
||||
def __to_key_pair(self, owner, attr):
|
||||
@staticmethod
|
||||
def __to_key_pair(owner, attr):
|
||||
"""Convert ldap attributes to KeyPair object"""
|
||||
if attr == None:
|
||||
return None
|
||||
@@ -482,8 +482,7 @@ class LdapDriver(object):
|
||||
'name': attr['cn'][0],
|
||||
'owner_id': owner,
|
||||
'public_key': attr['sshPublicKey'][0],
|
||||
'fingerprint': attr['keyFingerprint'][0],
|
||||
}
|
||||
'fingerprint': attr['keyFingerprint'][0]}
|
||||
|
||||
def __to_project(self, attr):
|
||||
"""Convert ldap attributes to Project object"""
|
||||
@@ -495,21 +494,22 @@ class LdapDriver(object):
|
||||
'name': attr['cn'][0],
|
||||
'project_manager_id': self.__dn_to_uid(attr['projectManager'][0]),
|
||||
'description': attr.get('description', [None])[0],
|
||||
'member_ids': [self.__dn_to_uid(x) for x in member_dns]
|
||||
}
|
||||
'member_ids': [self.__dn_to_uid(x) for x in member_dns]}
|
||||
|
||||
def __dn_to_uid(self, dn):
|
||||
@staticmethod
|
||||
def __dn_to_uid(dn):
|
||||
"""Convert user dn to uid"""
|
||||
return dn.split(',')[0].split('=')[1]
|
||||
|
||||
def __uid_to_dn(self, dn):
|
||||
@staticmethod
|
||||
def __uid_to_dn(dn):
|
||||
"""Convert uid to dn"""
|
||||
return 'uid=%s,%s' % (dn, FLAGS.ldap_user_subtree)
|
||||
|
||||
|
||||
class FakeLdapDriver(LdapDriver):
|
||||
"""Fake Ldap Auth driver"""
|
||||
def __init__(self):
|
||||
|
||||
def __init__(self): # pylint: disable-msg=W0231
|
||||
__import__('nova.auth.fakeldap')
|
||||
self.ldap = sys.modules['nova.auth.fakeldap']
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ Nova authentication management
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import string
|
||||
import string # pylint: disable-msg=W0402
|
||||
import tempfile
|
||||
import uuid
|
||||
import zipfile
|
||||
@@ -194,12 +194,12 @@ class Project(AuthBase):
|
||||
|
||||
@property
|
||||
def vpn_ip(self):
|
||||
ip, port = AuthManager().get_project_vpn_data(self)
|
||||
ip, _port = AuthManager().get_project_vpn_data(self)
|
||||
return ip
|
||||
|
||||
@property
|
||||
def vpn_port(self):
|
||||
ip, port = AuthManager().get_project_vpn_data(self)
|
||||
_ip, port = AuthManager().get_project_vpn_data(self)
|
||||
return port
|
||||
|
||||
def has_manager(self, user):
|
||||
@@ -221,11 +221,9 @@ class Project(AuthBase):
|
||||
return AuthManager().get_credentials(user, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "Project('%s', '%s', '%s', '%s', %s)" % (self.id,
|
||||
self.name,
|
||||
self.project_manager_id,
|
||||
self.description,
|
||||
self.member_ids)
|
||||
return "Project('%s', '%s', '%s', '%s', %s)" % \
|
||||
(self.id, self.name, self.project_manager_id, self.description,
|
||||
self.member_ids)
|
||||
|
||||
|
||||
class AuthManager(object):
|
||||
@@ -297,7 +295,7 @@ class AuthManager(object):
|
||||
@return: User and project that the request represents.
|
||||
"""
|
||||
# TODO(vish): check for valid timestamp
|
||||
(access_key, sep, project_id) = access.partition(':')
|
||||
(access_key, _sep, project_id) = access.partition(':')
|
||||
|
||||
logging.info('Looking up user: %r', access_key)
|
||||
user = self.get_user_from_access_key(access_key)
|
||||
@@ -320,7 +318,8 @@ class AuthManager(object):
|
||||
raise exception.NotFound('User %s is not a member of project %s' %
|
||||
(user.id, project.id))
|
||||
if check_type == 's3':
|
||||
expected_signature = signer.Signer(user.secret.encode()).s3_authorization(headers, verb, path)
|
||||
sign = signer.Signer(user.secret.encode())
|
||||
expected_signature = sign.s3_authorization(headers, verb, path)
|
||||
logging.debug('user.secret: %s', user.secret)
|
||||
logging.debug('expected_signature: %s', expected_signature)
|
||||
logging.debug('signature: %s', signature)
|
||||
@@ -465,7 +464,8 @@ class AuthManager(object):
|
||||
with self.driver() as drv:
|
||||
drv.remove_role(User.safe_id(user), role, Project.safe_id(project))
|
||||
|
||||
def get_roles(self, project_roles=True):
|
||||
@staticmethod
|
||||
def get_roles(project_roles=True):
|
||||
"""Get list of allowed roles"""
|
||||
if project_roles:
|
||||
return list(set(FLAGS.allowed_roles) - set(FLAGS.global_roles))
|
||||
@@ -518,10 +518,10 @@ class AuthManager(object):
|
||||
if member_users:
|
||||
member_users = [User.safe_id(u) for u in member_users]
|
||||
with self.driver() as drv:
|
||||
project_dict = drv.create_project(name,
|
||||
User.safe_id(manager_user),
|
||||
description,
|
||||
member_users)
|
||||
project_dict = drv.create_project(name,
|
||||
User.safe_id(manager_user),
|
||||
description,
|
||||
member_users)
|
||||
if project_dict:
|
||||
return Project(**project_dict)
|
||||
|
||||
@@ -549,7 +549,8 @@ class AuthManager(object):
|
||||
return drv.remove_from_project(User.safe_id(user),
|
||||
Project.safe_id(project))
|
||||
|
||||
def get_project_vpn_data(self, project):
|
||||
@staticmethod
|
||||
def get_project_vpn_data(project):
|
||||
"""Gets vpn ip and port for project
|
||||
|
||||
@type project: Project or project_id
|
||||
@@ -613,8 +614,10 @@ class AuthManager(object):
|
||||
@rtype: User
|
||||
@return: The new user.
|
||||
"""
|
||||
if access == None: access = str(uuid.uuid4())
|
||||
if secret == None: secret = str(uuid.uuid4())
|
||||
if access == None:
|
||||
access = str(uuid.uuid4())
|
||||
if secret == None:
|
||||
secret = str(uuid.uuid4())
|
||||
with self.driver() as drv:
|
||||
user_dict = drv.create_user(name, access, secret, admin)
|
||||
if user_dict:
|
||||
@@ -656,10 +659,10 @@ class AuthManager(object):
|
||||
def create_key_pair(self, user, key_name, public_key, fingerprint):
|
||||
"""Creates a key pair for user"""
|
||||
with self.driver() as drv:
|
||||
kp_dict = drv.create_key_pair(User.safe_id(user),
|
||||
key_name,
|
||||
public_key,
|
||||
fingerprint)
|
||||
kp_dict = drv.create_key_pair(User.safe_id(user),
|
||||
key_name,
|
||||
public_key,
|
||||
fingerprint)
|
||||
if kp_dict:
|
||||
return KeyPair(**kp_dict)
|
||||
|
||||
@@ -702,7 +705,7 @@ class AuthManager(object):
|
||||
|
||||
network_data = vpn.NetworkData.lookup(pid)
|
||||
if network_data:
|
||||
configfile = open(FLAGS.vpn_client_template,"r")
|
||||
configfile = open(FLAGS.vpn_client_template, "r")
|
||||
s = string.Template(configfile.read())
|
||||
configfile.close()
|
||||
config = s.substitute(keyfile=FLAGS.credential_key_file,
|
||||
@@ -717,10 +720,10 @@ class AuthManager(object):
|
||||
zippy.writestr(FLAGS.ca_file, crypto.fetch_ca(user.id))
|
||||
zippy.close()
|
||||
with open(zf, 'rb') as f:
|
||||
buffer = f.read()
|
||||
read_buffer = f.read()
|
||||
|
||||
shutil.rmtree(tmpdir)
|
||||
return buffer
|
||||
return read_buffer
|
||||
|
||||
def get_environment_rc(self, user, project=None):
|
||||
"""Get credential zip for user in project"""
|
||||
@@ -731,18 +734,18 @@ class AuthManager(object):
|
||||
pid = Project.safe_id(project)
|
||||
return self.__generate_rc(user.access, user.secret, pid)
|
||||
|
||||
def __generate_rc(self, access, secret, pid):
|
||||
@staticmethod
|
||||
def __generate_rc(access, secret, pid):
|
||||
"""Generate rc file for user"""
|
||||
rc = open(FLAGS.credentials_template).read()
|
||||
rc = rc % { 'access': access,
|
||||
'project': pid,
|
||||
'secret': secret,
|
||||
'ec2': FLAGS.ec2_url,
|
||||
's3': 'http://%s:%s' % (FLAGS.s3_host, FLAGS.s3_port),
|
||||
'nova': FLAGS.ca_file,
|
||||
'cert': FLAGS.credential_cert_file,
|
||||
'key': FLAGS.credential_key_file,
|
||||
}
|
||||
rc = rc % {'access': access,
|
||||
'project': pid,
|
||||
'secret': secret,
|
||||
'ec2': FLAGS.ec2_url,
|
||||
's3': 'http://%s:%s' % (FLAGS.s3_host, FLAGS.s3_port),
|
||||
'nova': FLAGS.ca_file,
|
||||
'cert': FLAGS.credential_cert_file,
|
||||
'key': FLAGS.credential_key_file}
|
||||
return rc
|
||||
|
||||
def _generate_x509_cert(self, uid, pid):
|
||||
@@ -753,6 +756,7 @@ class AuthManager(object):
|
||||
signed_cert = crypto.sign_csr(csr, pid)
|
||||
return (private_key, signed_cert)
|
||||
|
||||
def __cert_subject(self, uid):
|
||||
@staticmethod
|
||||
def __cert_subject(uid):
|
||||
"""Helper to generate cert subject"""
|
||||
return FLAGS.credential_cert_subject % (uid, utils.isotime())
|
||||
|
||||
@@ -16,40 +16,54 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Role-based access control decorators to use fpr wrapping other
|
||||
methods with."""
|
||||
|
||||
from nova import exception
|
||||
from nova.auth import manager
|
||||
|
||||
|
||||
def allow(*roles):
|
||||
def wrap(f):
|
||||
def wrapped_f(self, context, *args, **kwargs):
|
||||
"""Allow the given roles access the wrapped function."""
|
||||
|
||||
def wrap(func): # pylint: disable-msg=C0111
|
||||
|
||||
def wrapped_func(self, context, *args,
|
||||
**kwargs): # pylint: disable-msg=C0111
|
||||
if context.user.is_superuser():
|
||||
return f(self, context, *args, **kwargs)
|
||||
return func(self, context, *args, **kwargs)
|
||||
for role in roles:
|
||||
if __matches_role(context, role):
|
||||
return f(self, context, *args, **kwargs)
|
||||
return func(self, context, *args, **kwargs)
|
||||
raise exception.NotAuthorized()
|
||||
return wrapped_f
|
||||
|
||||
return wrapped_func
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def deny(*roles):
|
||||
def wrap(f):
|
||||
def wrapped_f(self, context, *args, **kwargs):
|
||||
"""Deny the given roles access the wrapped function."""
|
||||
|
||||
def wrap(func): # pylint: disable-msg=C0111
|
||||
|
||||
def wrapped_func(self, context, *args,
|
||||
**kwargs): # pylint: disable-msg=C0111
|
||||
if context.user.is_superuser():
|
||||
return f(self, context, *args, **kwargs)
|
||||
return func(self, context, *args, **kwargs)
|
||||
for role in roles:
|
||||
if __matches_role(context, role):
|
||||
raise exception.NotAuthorized()
|
||||
return f(self, context, *args, **kwargs)
|
||||
return wrapped_f
|
||||
return func(self, context, *args, **kwargs)
|
||||
|
||||
return wrapped_func
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def __matches_role(context, role):
|
||||
"""Check if a role is allowed."""
|
||||
if role == 'all':
|
||||
return True
|
||||
if role == 'none':
|
||||
return False
|
||||
return context.project.has_role(context.user.id, role)
|
||||
|
||||
|
||||
@@ -50,15 +50,15 @@ import logging
|
||||
import urllib
|
||||
|
||||
# NOTE(vish): for new boto
|
||||
import boto
|
||||
import boto
|
||||
# NOTE(vish): for old boto
|
||||
import boto.utils
|
||||
import boto.utils
|
||||
|
||||
from nova.exception import Error
|
||||
|
||||
|
||||
class Signer(object):
|
||||
""" hacked up code from boto/connection.py """
|
||||
"""Hacked up code from boto/connection.py"""
|
||||
|
||||
def __init__(self, secret_key):
|
||||
self.hmac = hmac.new(secret_key, digestmod=hashlib.sha1)
|
||||
@@ -66,22 +66,27 @@ class Signer(object):
|
||||
self.hmac_256 = hmac.new(secret_key, digestmod=hashlib.sha256)
|
||||
|
||||
def s3_authorization(self, headers, verb, path):
|
||||
"""Generate S3 authorization string."""
|
||||
c_string = boto.utils.canonical_string(verb, path, headers)
|
||||
hmac = self.hmac.copy()
|
||||
hmac.update(c_string)
|
||||
b64_hmac = base64.encodestring(hmac.digest()).strip()
|
||||
hmac_copy = self.hmac.copy()
|
||||
hmac_copy.update(c_string)
|
||||
b64_hmac = base64.encodestring(hmac_copy.digest()).strip()
|
||||
return b64_hmac
|
||||
|
||||
def generate(self, params, verb, server_string, path):
|
||||
"""Generate auth string according to what SignatureVersion is given."""
|
||||
if params['SignatureVersion'] == '0':
|
||||
return self._calc_signature_0(params)
|
||||
if params['SignatureVersion'] == '1':
|
||||
return self._calc_signature_1(params)
|
||||
if params['SignatureVersion'] == '2':
|
||||
return self._calc_signature_2(params, verb, server_string, path)
|
||||
raise Error('Unknown Signature Version: %s' % self.SignatureVersion)
|
||||
raise Error('Unknown Signature Version: %s' %
|
||||
params['SignatureVersion'])
|
||||
|
||||
def _get_utf8_value(self, value):
|
||||
@staticmethod
|
||||
def _get_utf8_value(value):
|
||||
"""Get the UTF8-encoded version of a value."""
|
||||
if not isinstance(value, str) and not isinstance(value, unicode):
|
||||
value = str(value)
|
||||
if isinstance(value, unicode):
|
||||
@@ -90,10 +95,11 @@ class Signer(object):
|
||||
return value
|
||||
|
||||
def _calc_signature_0(self, params):
|
||||
"""Generate AWS signature version 0 string."""
|
||||
s = params['Action'] + params['Timestamp']
|
||||
self.hmac.update(s)
|
||||
keys = params.keys()
|
||||
keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower()))
|
||||
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
|
||||
pairs = []
|
||||
for key in keys:
|
||||
val = self._get_utf8_value(params[key])
|
||||
@@ -101,8 +107,9 @@ class Signer(object):
|
||||
return base64.b64encode(self.hmac.digest())
|
||||
|
||||
def _calc_signature_1(self, params):
|
||||
"""Generate AWS signature version 1 string."""
|
||||
keys = params.keys()
|
||||
keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower()))
|
||||
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
|
||||
pairs = []
|
||||
for key in keys:
|
||||
self.hmac.update(key)
|
||||
@@ -112,30 +119,34 @@ class Signer(object):
|
||||
return base64.b64encode(self.hmac.digest())
|
||||
|
||||
def _calc_signature_2(self, params, verb, server_string, path):
|
||||
"""Generate AWS signature version 2 string."""
|
||||
logging.debug('using _calc_signature_2')
|
||||
string_to_sign = '%s\n%s\n%s\n' % (verb, server_string, path)
|
||||
if self.hmac_256:
|
||||
hmac = self.hmac_256
|
||||
current_hmac = self.hmac_256
|
||||
params['SignatureMethod'] = 'HmacSHA256'
|
||||
else:
|
||||
hmac = self.hmac
|
||||
current_hmac = self.hmac
|
||||
params['SignatureMethod'] = 'HmacSHA1'
|
||||
keys = params.keys()
|
||||
keys.sort()
|
||||
pairs = []
|
||||
for key in keys:
|
||||
val = self._get_utf8_value(params[key])
|
||||
pairs.append(urllib.quote(key, safe='') + '=' + urllib.quote(val, safe='-_~'))
|
||||
val = urllib.quote(val, safe='-_~')
|
||||
pairs.append(urllib.quote(key, safe='') + '=' + val)
|
||||
qs = '&'.join(pairs)
|
||||
logging.debug('query string: %s' % qs)
|
||||
logging.debug('query string: %s', qs)
|
||||
string_to_sign += qs
|
||||
logging.debug('string_to_sign: %s' % string_to_sign)
|
||||
hmac.update(string_to_sign)
|
||||
b64 = base64.b64encode(hmac.digest())
|
||||
logging.debug('len(b64)=%d' % len(b64))
|
||||
logging.debug('base64 encoded digest: %s' % b64)
|
||||
logging.debug('string_to_sign: %s', string_to_sign)
|
||||
current_hmac.update(string_to_sign)
|
||||
b64 = base64.b64encode(current_hmac.digest())
|
||||
logging.debug('len(b64)=%d', len(b64))
|
||||
logging.debug('base64 encoded digest: %s', b64)
|
||||
return b64
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print Signer('foo').generate({"SignatureMethod": 'HmacSHA256', 'SignatureVersion': '2'}, "get", "server", "/foo")
|
||||
print Signer('foo').generate({'SignatureMethod': 'HmacSHA256',
|
||||
'SignatureVersion': '2'},
|
||||
'get', 'server', '/foo')
|
||||
|
||||
169
nova/process.py
169
nova/process.py
@@ -2,6 +2,7 @@
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2010 FathomDB Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@@ -20,17 +21,12 @@
|
||||
Process pool, still buggy right now.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import multiprocessing
|
||||
import StringIO
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.internet import error
|
||||
from twisted.internet import process
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import threads
|
||||
from twisted.python import failure
|
||||
|
||||
from nova import flags
|
||||
|
||||
@@ -55,111 +51,100 @@ class UnexpectedErrorOutput(IOError):
|
||||
IOError.__init__(self, "got stdout: %r\nstderr: %r" % (stdout, stderr))
|
||||
|
||||
|
||||
# NOTE(termie): this too
|
||||
class _BackRelay(protocol.ProcessProtocol):
|
||||
# This is based on _BackRelay from twister.internal.utils, but modified to
|
||||
# capture both stdout and stderr, without odd stderr handling, and also to
|
||||
# handle stdin
|
||||
class BackRelayWithInput(protocol.ProcessProtocol):
|
||||
"""
|
||||
Trivial protocol for communicating with a process and turning its output
|
||||
into the result of a L{Deferred}.
|
||||
|
||||
@ivar deferred: A L{Deferred} which will be called back with all of stdout
|
||||
and, if C{errortoo} is true, all of stderr as well (mixed together in
|
||||
one string). If C{errortoo} is false and any bytes are received over
|
||||
stderr, this will fire with an L{_UnexpectedErrorOutput} instance and
|
||||
the attribute will be set to C{None}.
|
||||
and all of stderr as well (as a tuple). C{terminate_on_stderr} is true
|
||||
and any bytes are received over stderr, this will fire with an
|
||||
L{_UnexpectedErrorOutput} instance and the attribute will be set to
|
||||
C{None}.
|
||||
|
||||
@ivar onProcessEnded: If C{errortoo} is false and bytes are received over
|
||||
stderr, this attribute will refer to a L{Deferred} which will be called
|
||||
back when the process ends. This C{Deferred} is also associated with
|
||||
the L{_UnexpectedErrorOutput} which C{deferred} fires with earlier in
|
||||
this case so that users can determine when the process has actually
|
||||
ended, in addition to knowing when bytes have been received via stderr.
|
||||
@ivar onProcessEnded: If C{terminate_on_stderr} is false and bytes are
|
||||
received over stderr, this attribute will refer to a L{Deferred} which
|
||||
will be called back when the process ends. This C{Deferred} is also
|
||||
associated with the L{_UnexpectedErrorOutput} which C{deferred} fires
|
||||
with earlier in this case so that users can determine when the process
|
||||
has actually ended, in addition to knowing when bytes have been received
|
||||
via stderr.
|
||||
"""
|
||||
|
||||
def __init__(self, deferred, errortoo=0):
|
||||
def __init__(self, deferred, started_deferred=None,
|
||||
terminate_on_stderr=False, check_exit_code=True,
|
||||
process_input=None):
|
||||
self.deferred = deferred
|
||||
self.s = StringIO.StringIO()
|
||||
if errortoo:
|
||||
self.errReceived = self.errReceivedIsGood
|
||||
else:
|
||||
self.errReceived = self.errReceivedIsBad
|
||||
|
||||
def errReceivedIsBad(self, text):
|
||||
if self.deferred is not None:
|
||||
self.onProcessEnded = defer.Deferred()
|
||||
err = UnexpectedErrorOutput(text, self.onProcessEnded)
|
||||
self.deferred.errback(failure.Failure(err))
|
||||
self.stdout = StringIO.StringIO()
|
||||
self.stderr = StringIO.StringIO()
|
||||
self.started_deferred = started_deferred
|
||||
self.terminate_on_stderr = terminate_on_stderr
|
||||
self.check_exit_code = check_exit_code
|
||||
self.process_input = process_input
|
||||
self.on_process_ended = None
|
||||
|
||||
def errReceived(self, text):
|
||||
self.stderr.write(text)
|
||||
if self.terminate_on_stderr and (self.deferred is not None):
|
||||
self.on_process_ended = defer.Deferred()
|
||||
self.deferred.errback(UnexpectedErrorOutput(
|
||||
stdout=self.stdout.getvalue(),
|
||||
stderr=self.stderr.getvalue()))
|
||||
self.deferred = None
|
||||
self.transport.loseConnection()
|
||||
|
||||
def errReceivedIsGood(self, text):
|
||||
self.s.write(text)
|
||||
|
||||
def outReceived(self, text):
|
||||
self.s.write(text)
|
||||
self.stdout.write(text)
|
||||
|
||||
def processEnded(self, reason):
|
||||
if self.deferred is not None:
|
||||
self.deferred.callback(self.s.getvalue())
|
||||
elif self.onProcessEnded is not None:
|
||||
self.onProcessEnded.errback(reason)
|
||||
|
||||
|
||||
class BackRelayWithInput(_BackRelay):
|
||||
def __init__(self, deferred, startedDeferred=None, error_ok=0,
|
||||
input=None):
|
||||
# Twisted doesn't use new-style classes in most places :(
|
||||
_BackRelay.__init__(self, deferred, errortoo=error_ok)
|
||||
self.error_ok = error_ok
|
||||
self.input = input
|
||||
self.stderr = StringIO.StringIO()
|
||||
self.startedDeferred = startedDeferred
|
||||
|
||||
def errReceivedIsBad(self, text):
|
||||
self.stderr.write(text)
|
||||
self.transport.loseConnection()
|
||||
|
||||
def errReceivedIsGood(self, text):
|
||||
self.stderr.write(text)
|
||||
|
||||
def connectionMade(self):
|
||||
if self.startedDeferred:
|
||||
self.startedDeferred.callback(self)
|
||||
if self.input:
|
||||
self.transport.write(self.input)
|
||||
self.transport.closeStdin()
|
||||
|
||||
def processEnded(self, reason):
|
||||
if self.deferred is not None:
|
||||
stdout, stderr = self.s.getvalue(), self.stderr.getvalue()
|
||||
stdout, stderr = self.stdout.getvalue(), self.stderr.getvalue()
|
||||
try:
|
||||
# NOTE(termie): current behavior means if error_ok is True
|
||||
# we won't throw an error even if the process
|
||||
# exited with a non-0 status, so you can't be
|
||||
# okay with stderr output and not with bad exit
|
||||
# codes.
|
||||
if not self.error_ok:
|
||||
if self.check_exit_code:
|
||||
reason.trap(error.ProcessDone)
|
||||
self.deferred.callback((stdout, stderr))
|
||||
except:
|
||||
# NOTE(justinsb): This logic is a little suspicious to me...
|
||||
# If the callback throws an exception, then errback will be
|
||||
# called also. However, this is what the unit tests test for...
|
||||
self.deferred.errback(UnexpectedErrorOutput(stdout, stderr))
|
||||
elif self.on_process_ended is not None:
|
||||
self.on_process_ended.errback(reason)
|
||||
|
||||
|
||||
def getProcessOutput(executable, args=None, env=None, path=None, reactor=None,
|
||||
error_ok=0, input=None, startedDeferred=None):
|
||||
if reactor is None:
|
||||
from twisted.internet import reactor
|
||||
def connectionMade(self):
|
||||
if self.started_deferred:
|
||||
self.started_deferred.callback(self)
|
||||
if self.process_input:
|
||||
self.transport.write(self.process_input)
|
||||
self.transport.closeStdin()
|
||||
|
||||
def get_process_output(executable, args=None, env=None, path=None,
|
||||
process_reactor=None, check_exit_code=True,
|
||||
process_input=None, started_deferred=None,
|
||||
terminate_on_stderr=False):
|
||||
if process_reactor is None:
|
||||
process_reactor = reactor
|
||||
args = args and args or ()
|
||||
env = env and env and {}
|
||||
d = defer.Deferred()
|
||||
p = BackRelayWithInput(
|
||||
d, startedDeferred=startedDeferred, error_ok=error_ok, input=input)
|
||||
deferred = defer.Deferred()
|
||||
process_handler = BackRelayWithInput(
|
||||
deferred,
|
||||
started_deferred=started_deferred,
|
||||
check_exit_code=check_exit_code,
|
||||
process_input=process_input,
|
||||
terminate_on_stderr=terminate_on_stderr)
|
||||
# NOTE(vish): commands come in as unicode, but self.executes needs
|
||||
# strings or process.spawn raises a deprecation warning
|
||||
executable = str(executable)
|
||||
if not args is None:
|
||||
args = [str(x) for x in args]
|
||||
reactor.spawnProcess(p, executable, (executable,)+tuple(args), env, path)
|
||||
return d
|
||||
process_reactor.spawnProcess( process_handler, executable,
|
||||
(executable,)+tuple(args), env, path)
|
||||
return deferred
|
||||
|
||||
|
||||
class ProcessPool(object):
|
||||
@@ -185,26 +170,26 @@ class ProcessPool(object):
|
||||
return self.execute(executable, args, **kw)
|
||||
|
||||
def execute(self, *args, **kw):
|
||||
d = self._pool.acquire()
|
||||
deferred = self._pool.acquire()
|
||||
|
||||
def _associateProcess(proto):
|
||||
d.process = proto.transport
|
||||
def _associate_process(proto):
|
||||
deferred.process = proto.transport
|
||||
return proto.transport
|
||||
|
||||
started = defer.Deferred()
|
||||
started.addCallback(_associateProcess)
|
||||
kw.setdefault('startedDeferred', started)
|
||||
started.addCallback(_associate_process)
|
||||
kw.setdefault('started_deferred', started)
|
||||
|
||||
d.process = None
|
||||
d.started = started
|
||||
deferred.process = None
|
||||
deferred.started = started
|
||||
|
||||
d.addCallback(lambda _: getProcessOutput(*args, **kw))
|
||||
d.addBoth(self._release)
|
||||
return d
|
||||
deferred.addCallback(lambda _: get_process_output(*args, **kw))
|
||||
deferred.addBoth(self._release)
|
||||
return deferred
|
||||
|
||||
def _release(self, rv=None):
|
||||
def _release(self, retval=None):
|
||||
self._pool.release()
|
||||
return rv
|
||||
return retval
|
||||
|
||||
|
||||
class SharedPool(object):
|
||||
|
||||
@@ -166,7 +166,6 @@ class NetworkTestCase(test.TrialTestCase):
|
||||
release_ip(mac3, address3, hostname, net.bridge_name)
|
||||
net = model.get_project_network(self.projects[0].id, "default")
|
||||
self.service.deallocate_fixed_ip(firstaddress)
|
||||
release_ip(mac, firstaddress, hostname, net.bridge_name)
|
||||
|
||||
def test_vpn_ip_and_port_looks_valid(self):
|
||||
"""Ensure the vpn ip and port are reasonable"""
|
||||
|
||||
@@ -48,7 +48,7 @@ class ProcessTestCase(test.TrialTestCase):
|
||||
|
||||
def test_execute_stderr(self):
|
||||
pool = process.ProcessPool(2)
|
||||
d = pool.simple_execute('cat BAD_FILE', error_ok=1)
|
||||
d = pool.simple_execute('cat BAD_FILE', check_exit_code=False)
|
||||
def _check(rv):
|
||||
self.assertEqual(rv[0], '')
|
||||
self.assert_('No such file' in rv[1])
|
||||
|
||||
96
nova/wsgi_test.py
Normal file
96
nova/wsgi_test.py
Normal file
@@ -0,0 +1,96 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2010 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Test WSGI basics and provide some helper functions for other WSGI tests.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import routes
|
||||
import webob
|
||||
|
||||
from nova import wsgi
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
def test_debug(self):
|
||||
|
||||
class Application(wsgi.Application):
|
||||
"""Dummy application to test debug."""
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
start_response("200", [("X-Test", "checking")])
|
||||
return ['Test result']
|
||||
|
||||
application = wsgi.Debug(Application())
|
||||
result = webob.Request.blank('/').get_response(application)
|
||||
self.assertEqual(result.body, "Test result")
|
||||
|
||||
def test_router(self):
|
||||
|
||||
class Application(wsgi.Application):
|
||||
"""Test application to call from router."""
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
start_response("200", [])
|
||||
return ['Router result']
|
||||
|
||||
class Router(wsgi.Router):
|
||||
"""Test router."""
|
||||
|
||||
def __init__(self):
|
||||
mapper = routes.Mapper()
|
||||
mapper.connect("/test", controller=Application())
|
||||
super(Router, self).__init__(mapper)
|
||||
|
||||
result = webob.Request.blank('/test').get_response(Router())
|
||||
self.assertEqual(result.body, "Router result")
|
||||
result = webob.Request.blank('/bad').get_response(Router())
|
||||
self.assertNotEqual(result.body, "Router result")
|
||||
|
||||
def test_controller(self):
|
||||
|
||||
class Controller(wsgi.Controller):
|
||||
"""Test controller to call from router."""
|
||||
test = self
|
||||
|
||||
def show(self, req, id): # pylint: disable-msg=W0622,C0103
|
||||
"""Default action called for requests with an ID."""
|
||||
self.test.assertEqual(req.path_info, '/tests/123')
|
||||
self.test.assertEqual(id, '123')
|
||||
return id
|
||||
|
||||
class Router(wsgi.Router):
|
||||
"""Test router."""
|
||||
|
||||
def __init__(self):
|
||||
mapper = routes.Mapper()
|
||||
mapper.resource("test", "tests", controller=Controller())
|
||||
super(Router, self).__init__(mapper)
|
||||
|
||||
result = webob.Request.blank('/tests/123').get_response(Router())
|
||||
self.assertEqual(result.body, "123")
|
||||
result = webob.Request.blank('/test/123').get_response(Router())
|
||||
self.assertNotEqual(result.body, "123")
|
||||
|
||||
def test_serializer(self):
|
||||
# TODO(eday): Placeholder for serializer testing.
|
||||
pass
|
||||
Reference in New Issue
Block a user