Merge trunk and apply some sphinx love.
This commit is contained in:
commit
b76286c5ef
BIN
doc/source/_templates/.DS_Store
vendored
Normal file
BIN
doc/source/_templates/.DS_Store
vendored
Normal file
Binary file not shown.
@ -27,7 +27,13 @@ extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo',
|
|||||||
todo_include_todos = True
|
todo_include_todos = True
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
# Changing the path so that the Hudson build output contains GA code and the source
|
||||||
|
# docs do not contain the code so local, offline sphinx builds are "clean."
|
||||||
|
templates_path = []
|
||||||
|
if os.getenv('HUDSON_PUBLISH_DOCS'):
|
||||||
|
templates_path = ['_ga', '_templates']
|
||||||
|
else:
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = '.rst'
|
||||||
|
@ -33,14 +33,15 @@ DEFAULT_SECRET_KEY = 'admin'
|
|||||||
|
|
||||||
class UserInfo(object):
|
class UserInfo(object):
|
||||||
"""
|
"""
|
||||||
Information about a Nova user, as parsed through SAX
|
Information about a Nova user, as parsed through SAX.
|
||||||
fields include:
|
|
||||||
username
|
**Fields Include**
|
||||||
accesskey
|
|
||||||
secretkey
|
* username
|
||||||
|
* accesskey
|
||||||
|
* secretkey
|
||||||
|
* file (optional) containing zip of X509 cert & rc file
|
||||||
|
|
||||||
and an optional field containing a zip with X509 cert & rc
|
|
||||||
file
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection=None, username=None, endpoint=None):
|
def __init__(self, connection=None, username=None, endpoint=None):
|
||||||
@ -68,9 +69,13 @@ class UserInfo(object):
|
|||||||
class UserRole(object):
|
class UserRole(object):
|
||||||
"""
|
"""
|
||||||
Information about a Nova user's role, as parsed through SAX.
|
Information about a Nova user's role, as parsed through SAX.
|
||||||
Fields include:
|
|
||||||
role
|
**Fields include**
|
||||||
|
|
||||||
|
* role
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
def __init__(self, connection=None):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
self.role = None
|
self.role = None
|
||||||
@ -90,12 +95,15 @@ class UserRole(object):
|
|||||||
|
|
||||||
class ProjectInfo(object):
|
class ProjectInfo(object):
|
||||||
"""
|
"""
|
||||||
Information about a Nova project, as parsed through SAX
|
Information about a Nova project, as parsed through SAX.
|
||||||
Fields include:
|
|
||||||
projectname
|
**Fields include**
|
||||||
description
|
|
||||||
projectManagerId
|
* projectname
|
||||||
memberIds
|
* description
|
||||||
|
* projectManagerId
|
||||||
|
* memberIds
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
def __init__(self, connection=None):
|
||||||
@ -127,8 +135,11 @@ class ProjectInfo(object):
|
|||||||
class ProjectMember(object):
|
class ProjectMember(object):
|
||||||
"""
|
"""
|
||||||
Information about a Nova project member, as parsed through SAX.
|
Information about a Nova project member, as parsed through SAX.
|
||||||
Fields include:
|
|
||||||
memberId
|
**Fields include**
|
||||||
|
|
||||||
|
* memberId
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
def __init__(self, connection=None):
|
||||||
@ -150,14 +161,18 @@ class ProjectMember(object):
|
|||||||
|
|
||||||
class HostInfo(object):
|
class HostInfo(object):
|
||||||
"""
|
"""
|
||||||
Information about a Nova Host, as parsed through SAX:
|
Information about a Nova Host, as parsed through SAX.
|
||||||
Disk stats
|
|
||||||
Running Instances
|
**Fields Include**
|
||||||
Memory stats
|
|
||||||
CPU stats
|
* Disk stats
|
||||||
Network address info
|
* Running Instances
|
||||||
Firewall info
|
* Memory stats
|
||||||
Bridge and devices
|
* CPU stats
|
||||||
|
* Network address info
|
||||||
|
* Firewall info
|
||||||
|
* Bridge and devices
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
def __init__(self, connection=None):
|
||||||
@ -257,9 +272,12 @@ class NovaAdminClient(object):
|
|||||||
[('item', UserRole)])
|
[('item', UserRole)])
|
||||||
|
|
||||||
def get_user_roles(self, user, project=None):
|
def get_user_roles(self, user, project=None):
|
||||||
"""Returns a list of roles for the given user. Omitting project will
|
"""Returns a list of roles for the given user.
|
||||||
return any global roles that the user has. Specifying project will
|
|
||||||
return only project specific roles."""
|
Omitting project will return any global roles that the user has.
|
||||||
|
Specifying project will return only project specific roles.
|
||||||
|
|
||||||
|
"""
|
||||||
params = {'User': user}
|
params = {'User': user}
|
||||||
if project:
|
if project:
|
||||||
params['Project'] = project
|
params['Project'] = project
|
||||||
|
@ -99,6 +99,7 @@ class CloudController(object):
|
|||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.network_manager = utils.import_object(FLAGS.network_manager)
|
self.network_manager = utils.import_object(FLAGS.network_manager)
|
||||||
|
self.compute_manager = utils.import_object(FLAGS.compute_manager)
|
||||||
self.setup()
|
self.setup()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -835,21 +836,21 @@ class CloudController(object):
|
|||||||
elevated = context.elevated()
|
elevated = context.elevated()
|
||||||
|
|
||||||
for num in range(num_instances):
|
for num in range(num_instances):
|
||||||
instance_ref = db.instance_create(context, base_options)
|
|
||||||
|
instance_ref = self.compute_manager.create_instance(context,
|
||||||
|
security_groups,
|
||||||
|
mac_address=utils.generate_mac(),
|
||||||
|
launch_index=num,
|
||||||
|
**base_options)
|
||||||
inst_id = instance_ref['id']
|
inst_id = instance_ref['id']
|
||||||
|
|
||||||
for security_group_id in security_groups:
|
|
||||||
db.instance_add_security_group(elevated,
|
|
||||||
inst_id,
|
|
||||||
security_group_id)
|
|
||||||
|
|
||||||
inst = {}
|
|
||||||
inst['mac_address'] = utils.generate_mac()
|
|
||||||
inst['launch_index'] = num
|
|
||||||
internal_id = instance_ref['internal_id']
|
internal_id = instance_ref['internal_id']
|
||||||
ec2_id = internal_id_to_ec2_id(internal_id)
|
ec2_id = internal_id_to_ec2_id(internal_id)
|
||||||
inst['hostname'] = ec2_id
|
|
||||||
db.instance_update(context, inst_id, inst)
|
self.compute_manager.update_instance(context,
|
||||||
|
inst_id,
|
||||||
|
hostname=ec2_id)
|
||||||
|
|
||||||
# TODO(vish): This probably should be done in the scheduler
|
# TODO(vish): This probably should be done in the scheduler
|
||||||
# or in compute as a call. The network should be
|
# or in compute as a call. The network should be
|
||||||
# allocated after the host is assigned and setup
|
# allocated after the host is assigned and setup
|
||||||
@ -895,11 +896,12 @@ class CloudController(object):
|
|||||||
id_str)
|
id_str)
|
||||||
continue
|
continue
|
||||||
now = datetime.datetime.utcnow()
|
now = datetime.datetime.utcnow()
|
||||||
db.instance_update(context,
|
self.compute_manager.update_instance(context,
|
||||||
instance_ref['id'],
|
instance_ref['id'],
|
||||||
{'state_description': 'terminating',
|
state_description='terminating',
|
||||||
'state': 0,
|
state=0,
|
||||||
'terminated_at': now})
|
terminated_at=now)
|
||||||
|
|
||||||
# FIXME(ja): where should network deallocate occur?
|
# FIXME(ja): where should network deallocate occur?
|
||||||
address = db.instance_get_floating_address(context,
|
address = db.instance_get_floating_address(context,
|
||||||
instance_ref['id'])
|
instance_ref['id'])
|
||||||
|
@ -95,6 +95,7 @@ class Controller(wsgi.Controller):
|
|||||||
db_driver = FLAGS.db_driver
|
db_driver = FLAGS.db_driver
|
||||||
self.db_driver = utils.import_object(db_driver)
|
self.db_driver = utils.import_object(db_driver)
|
||||||
self.network_manager = utils.import_object(FLAGS.network_manager)
|
self.network_manager = utils.import_object(FLAGS.network_manager)
|
||||||
|
self.compute_manager = utils.import_object(FLAGS.compute_manager)
|
||||||
super(Controller, self).__init__()
|
super(Controller, self).__init__()
|
||||||
|
|
||||||
def index(self, req):
|
def index(self, req):
|
||||||
@ -242,34 +243,30 @@ class Controller(wsgi.Controller):
|
|||||||
inst['memory_mb'] = flavor['memory_mb']
|
inst['memory_mb'] = flavor['memory_mb']
|
||||||
inst['vcpus'] = flavor['vcpus']
|
inst['vcpus'] = flavor['vcpus']
|
||||||
inst['local_gb'] = flavor['local_gb']
|
inst['local_gb'] = flavor['local_gb']
|
||||||
|
|
||||||
ref = self.db_driver.instance_create(ctxt, inst)
|
|
||||||
inst['id'] = ref.internal_id
|
|
||||||
|
|
||||||
inst['mac_address'] = utils.generate_mac()
|
inst['mac_address'] = utils.generate_mac()
|
||||||
|
|
||||||
#TODO(dietz) is this necessary?
|
|
||||||
inst['launch_index'] = 0
|
inst['launch_index'] = 0
|
||||||
|
|
||||||
inst['hostname'] = str(ref.internal_id)
|
ref = self.compute_manager.create_instance(ctxt, **inst)
|
||||||
self.db_driver.instance_update(ctxt, inst['id'], inst)
|
inst['id'] = ref['internal_id']
|
||||||
|
|
||||||
network_manager = utils.import_object(FLAGS.network_manager)
|
inst['hostname'] = str(ref['internal_id'])
|
||||||
address = network_manager.allocate_fixed_ip(ctxt,
|
self.compute_manager.update_instance(ctxt, inst['id'], **inst)
|
||||||
inst['id'])
|
|
||||||
|
address = self.network_manager.allocate_fixed_ip(ctxt,
|
||||||
|
inst['id'])
|
||||||
|
|
||||||
# TODO(vish): This probably should be done in the scheduler
|
# TODO(vish): This probably should be done in the scheduler
|
||||||
# network is setup when host is assigned
|
# network is setup when host is assigned
|
||||||
network_topic = self._get_network_topic(ctxt, network_manager)
|
network_topic = self._get_network_topic(ctxt)
|
||||||
rpc.call(ctxt,
|
rpc.call(ctxt,
|
||||||
network_topic,
|
network_topic,
|
||||||
{"method": "setup_fixed_ip",
|
{"method": "setup_fixed_ip",
|
||||||
"args": {"address": address}})
|
"args": {"address": address}})
|
||||||
return inst
|
return inst
|
||||||
|
|
||||||
def _get_network_topic(self, context, network_manager):
|
def _get_network_topic(self, context):
|
||||||
"""Retrieves the network host for a project"""
|
"""Retrieves the network host for a project"""
|
||||||
network_ref = network_manager.get_network(context)
|
network_ref = self.network_manager.get_network(context)
|
||||||
host = network_ref['host']
|
host = network_ref['host']
|
||||||
if not host:
|
if not host:
|
||||||
host = rpc.call(context,
|
host = rpc.call(context,
|
||||||
|
@ -83,6 +83,46 @@ class ComputeManager(manager.Manager):
|
|||||||
"""This call passes stright through to the virtualization driver."""
|
"""This call passes stright through to the virtualization driver."""
|
||||||
yield self.driver.refresh_security_group(security_group_id)
|
yield self.driver.refresh_security_group(security_group_id)
|
||||||
|
|
||||||
|
def create_instance(self, context, security_groups=[], **kwargs):
|
||||||
|
"""Creates the instance in the datastore and returns the
|
||||||
|
new instance as a mapping
|
||||||
|
|
||||||
|
:param context: The security context
|
||||||
|
:param security_groups: list of security group ids to
|
||||||
|
attach to the instance
|
||||||
|
:param kwargs: All additional keyword args are treated
|
||||||
|
as data fields of the instance to be
|
||||||
|
created
|
||||||
|
|
||||||
|
:retval Returns a mapping of the instance information
|
||||||
|
that has just been created
|
||||||
|
|
||||||
|
"""
|
||||||
|
instance_ref = self.db.instance_create(context, kwargs)
|
||||||
|
inst_id = instance_ref['id']
|
||||||
|
|
||||||
|
elevated = context.elevated()
|
||||||
|
security_groups = kwargs.get('security_groups', [])
|
||||||
|
for security_group_id in security_groups:
|
||||||
|
self.db.instance_add_security_group(elevated,
|
||||||
|
inst_id,
|
||||||
|
security_group_id)
|
||||||
|
return instance_ref
|
||||||
|
|
||||||
|
def update_instance(self, context, instance_id, **kwargs):
|
||||||
|
"""Updates the instance in the datastore.
|
||||||
|
|
||||||
|
:param context: The security context
|
||||||
|
:param instance_id: ID of the instance to update
|
||||||
|
:param kwargs: All additional keyword args are treated
|
||||||
|
as data fields of the instance to be
|
||||||
|
updated
|
||||||
|
|
||||||
|
:retval None
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.db.instance_update(context, instance_id, kwargs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@exception.wrap_exception
|
@exception.wrap_exception
|
||||||
def run_instance(self, context, instance_id, **_kwargs):
|
def run_instance(self, context, instance_id, **_kwargs):
|
||||||
|
@ -15,10 +15,11 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Wrappers around standard crypto, including root and intermediate CAs,
|
Wrappers around standard crypto data elements.
|
||||||
SSH key_pairs and x509 certificates.
|
|
||||||
|
Includes root and intermediate CAs, SSH key_pairs and x509 certificates.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
@ -227,12 +228,12 @@ def mkcacert(subject='nova', years=1):
|
|||||||
|
|
||||||
def compute_md5(fp):
|
def compute_md5(fp):
|
||||||
"""
|
"""
|
||||||
@type fp: file
|
:type fp: file
|
||||||
@param fp: File pointer to the file to MD5 hash. The file pointer will be
|
:param fp: File pointer to the file to MD5 hash. The file pointer will be
|
||||||
reset to the beginning of the file before the method returns.
|
reset to the beginning of the file before the method returns.
|
||||||
|
|
||||||
@rtype: tuple
|
:rtype: tuple
|
||||||
@return: the hex digest version of the MD5 hash
|
:return: the hex digest version of the MD5 hash
|
||||||
"""
|
"""
|
||||||
m = hashlib.md5()
|
m = hashlib.md5()
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
|
@ -236,8 +236,7 @@ def service_get_by_args(context, host, binary):
|
|||||||
@require_admin_context
|
@require_admin_context
|
||||||
def service_create(context, values):
|
def service_create(context, values):
|
||||||
service_ref = models.Service()
|
service_ref = models.Service()
|
||||||
for (key, value) in values.iteritems():
|
service_ref.update(values)
|
||||||
service_ref[key] = value
|
|
||||||
service_ref.save()
|
service_ref.save()
|
||||||
return service_ref
|
return service_ref
|
||||||
|
|
||||||
@ -247,8 +246,7 @@ def service_update(context, service_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
service_ref = service_get(context, service_id, session=session)
|
service_ref = service_get(context, service_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
service_ref.update(values)
|
||||||
service_ref[key] = value
|
|
||||||
service_ref.save(session=session)
|
service_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -279,8 +277,7 @@ def floating_ip_allocate_address(context, host, project_id):
|
|||||||
@require_context
|
@require_context
|
||||||
def floating_ip_create(context, values):
|
def floating_ip_create(context, values):
|
||||||
floating_ip_ref = models.FloatingIp()
|
floating_ip_ref = models.FloatingIp()
|
||||||
for (key, value) in values.iteritems():
|
floating_ip_ref.update(values)
|
||||||
floating_ip_ref[key] = value
|
|
||||||
floating_ip_ref.save()
|
floating_ip_ref.save()
|
||||||
return floating_ip_ref['address']
|
return floating_ip_ref['address']
|
||||||
|
|
||||||
@ -451,8 +448,7 @@ def fixed_ip_associate_pool(context, network_id, instance_id):
|
|||||||
@require_context
|
@require_context
|
||||||
def fixed_ip_create(_context, values):
|
def fixed_ip_create(_context, values):
|
||||||
fixed_ip_ref = models.FixedIp()
|
fixed_ip_ref = models.FixedIp()
|
||||||
for (key, value) in values.iteritems():
|
fixed_ip_ref.update(values)
|
||||||
fixed_ip_ref[key] = value
|
|
||||||
fixed_ip_ref.save()
|
fixed_ip_ref.save()
|
||||||
return fixed_ip_ref['address']
|
return fixed_ip_ref['address']
|
||||||
|
|
||||||
@ -523,8 +519,7 @@ def fixed_ip_update(context, address, values):
|
|||||||
fixed_ip_ref = fixed_ip_get_by_address(context,
|
fixed_ip_ref = fixed_ip_get_by_address(context,
|
||||||
address,
|
address,
|
||||||
session=session)
|
session=session)
|
||||||
for (key, value) in values.iteritems():
|
fixed_ip_ref.update(values)
|
||||||
fixed_ip_ref[key] = value
|
|
||||||
fixed_ip_ref.save(session=session)
|
fixed_ip_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -537,8 +532,7 @@ def fixed_ip_update(context, address, values):
|
|||||||
@require_context
|
@require_context
|
||||||
def instance_create(context, values):
|
def instance_create(context, values):
|
||||||
instance_ref = models.Instance()
|
instance_ref = models.Instance()
|
||||||
for (key, value) in values.iteritems():
|
instance_ref.update(values)
|
||||||
instance_ref[key] = value
|
|
||||||
|
|
||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
@ -731,8 +725,7 @@ def instance_update(context, instance_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
instance_ref = instance_get(context, instance_id, session=session)
|
instance_ref = instance_get(context, instance_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
instance_ref.update(values)
|
||||||
instance_ref[key] = value
|
|
||||||
instance_ref.save(session=session)
|
instance_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -754,8 +747,7 @@ def instance_add_security_group(context, instance_id, security_group_id):
|
|||||||
@require_context
|
@require_context
|
||||||
def key_pair_create(context, values):
|
def key_pair_create(context, values):
|
||||||
key_pair_ref = models.KeyPair()
|
key_pair_ref = models.KeyPair()
|
||||||
for (key, value) in values.iteritems():
|
key_pair_ref.update(values)
|
||||||
key_pair_ref[key] = value
|
|
||||||
key_pair_ref.save()
|
key_pair_ref.save()
|
||||||
return key_pair_ref
|
return key_pair_ref
|
||||||
|
|
||||||
@ -870,8 +862,7 @@ def network_count_reserved_ips(context, network_id):
|
|||||||
@require_admin_context
|
@require_admin_context
|
||||||
def network_create_safe(context, values):
|
def network_create_safe(context, values):
|
||||||
network_ref = models.Network()
|
network_ref = models.Network()
|
||||||
for (key, value) in values.iteritems():
|
network_ref.update(values)
|
||||||
network_ref[key] = value
|
|
||||||
try:
|
try:
|
||||||
network_ref.save()
|
network_ref.save()
|
||||||
return network_ref
|
return network_ref
|
||||||
@ -980,8 +971,7 @@ def network_update(context, network_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
network_ref = network_get(context, network_id, session=session)
|
network_ref = network_get(context, network_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
network_ref.update(values)
|
||||||
network_ref[key] = value
|
|
||||||
network_ref.save(session=session)
|
network_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -1031,8 +1021,7 @@ def export_device_count(context):
|
|||||||
@require_admin_context
|
@require_admin_context
|
||||||
def export_device_create_safe(context, values):
|
def export_device_create_safe(context, values):
|
||||||
export_device_ref = models.ExportDevice()
|
export_device_ref = models.ExportDevice()
|
||||||
for (key, value) in values.iteritems():
|
export_device_ref.update(values)
|
||||||
export_device_ref[key] = value
|
|
||||||
try:
|
try:
|
||||||
export_device_ref.save()
|
export_device_ref.save()
|
||||||
return export_device_ref
|
return export_device_ref
|
||||||
@ -1060,8 +1049,7 @@ def auth_get_token(_context, token_hash):
|
|||||||
|
|
||||||
def auth_create_token(_context, token):
|
def auth_create_token(_context, token):
|
||||||
tk = models.AuthToken()
|
tk = models.AuthToken()
|
||||||
for k, v in token.iteritems():
|
tk.update(token)
|
||||||
tk[k] = v
|
|
||||||
tk.save()
|
tk.save()
|
||||||
return tk
|
return tk
|
||||||
|
|
||||||
@ -1087,8 +1075,7 @@ def quota_get(context, project_id, session=None):
|
|||||||
@require_admin_context
|
@require_admin_context
|
||||||
def quota_create(context, values):
|
def quota_create(context, values):
|
||||||
quota_ref = models.Quota()
|
quota_ref = models.Quota()
|
||||||
for (key, value) in values.iteritems():
|
quota_ref.update(values)
|
||||||
quota_ref[key] = value
|
|
||||||
quota_ref.save()
|
quota_ref.save()
|
||||||
return quota_ref
|
return quota_ref
|
||||||
|
|
||||||
@ -1098,8 +1085,7 @@ def quota_update(context, project_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
quota_ref = quota_get(context, project_id, session=session)
|
quota_ref = quota_get(context, project_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
quota_ref.update(values)
|
||||||
quota_ref[key] = value
|
|
||||||
quota_ref.save(session=session)
|
quota_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -1148,8 +1134,7 @@ def volume_attached(context, volume_id, instance_id, mountpoint):
|
|||||||
@require_context
|
@require_context
|
||||||
def volume_create(context, values):
|
def volume_create(context, values):
|
||||||
volume_ref = models.Volume()
|
volume_ref = models.Volume()
|
||||||
for (key, value) in values.iteritems():
|
volume_ref.update(values)
|
||||||
volume_ref[key] = value
|
|
||||||
|
|
||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
@ -1306,8 +1291,7 @@ def volume_update(context, volume_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
volume_ref = volume_get(context, volume_id, session=session)
|
volume_ref = volume_get(context, volume_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
volume_ref.update(values)
|
||||||
volume_ref[key] = value
|
|
||||||
volume_ref.save(session=session)
|
volume_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -1400,8 +1384,7 @@ def security_group_create(context, values):
|
|||||||
# FIXME(devcamcar): Unless I do this, rules fails with lazy load exception
|
# FIXME(devcamcar): Unless I do this, rules fails with lazy load exception
|
||||||
# once save() is called. This will get cleaned up in next orm pass.
|
# once save() is called. This will get cleaned up in next orm pass.
|
||||||
security_group_ref.rules
|
security_group_ref.rules
|
||||||
for (key, value) in values.iteritems():
|
security_group_ref.update(values)
|
||||||
security_group_ref[key] = value
|
|
||||||
security_group_ref.save()
|
security_group_ref.save()
|
||||||
return security_group_ref
|
return security_group_ref
|
||||||
|
|
||||||
@ -1455,8 +1438,7 @@ def security_group_rule_get(context, security_group_rule_id, session=None):
|
|||||||
@require_context
|
@require_context
|
||||||
def security_group_rule_create(context, values):
|
def security_group_rule_create(context, values):
|
||||||
security_group_rule_ref = models.SecurityGroupIngressRule()
|
security_group_rule_ref = models.SecurityGroupIngressRule()
|
||||||
for (key, value) in values.iteritems():
|
security_group_rule_ref.update(values)
|
||||||
security_group_rule_ref[key] = value
|
|
||||||
security_group_rule_ref.save()
|
security_group_rule_ref.save()
|
||||||
return security_group_rule_ref
|
return security_group_rule_ref
|
||||||
|
|
||||||
@ -1508,8 +1490,7 @@ def user_get_by_access_key(context, access_key, session=None):
|
|||||||
@require_admin_context
|
@require_admin_context
|
||||||
def user_create(_context, values):
|
def user_create(_context, values):
|
||||||
user_ref = models.User()
|
user_ref = models.User()
|
||||||
for (key, value) in values.iteritems():
|
user_ref.update(values)
|
||||||
user_ref[key] = value
|
|
||||||
user_ref.save()
|
user_ref.save()
|
||||||
return user_ref
|
return user_ref
|
||||||
|
|
||||||
@ -1537,8 +1518,7 @@ def user_get_all(context):
|
|||||||
|
|
||||||
def project_create(_context, values):
|
def project_create(_context, values):
|
||||||
project_ref = models.Project()
|
project_ref = models.Project()
|
||||||
for (key, value) in values.iteritems():
|
project_ref.update(values)
|
||||||
project_ref[key] = value
|
|
||||||
project_ref.save()
|
project_ref.save()
|
||||||
return project_ref
|
return project_ref
|
||||||
|
|
||||||
@ -1600,8 +1580,7 @@ def user_update(context, user_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
user_ref = user_get(context, user_id, session=session)
|
user_ref = user_get(context, user_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
user_ref.update(values)
|
||||||
user_ref[key] = value
|
|
||||||
user_ref.save(session=session)
|
user_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
@ -1609,8 +1588,7 @@ def project_update(context, project_id, values):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
project_ref = project_get(context, project_id, session=session)
|
project_ref = project_get(context, project_id, session=session)
|
||||||
for (key, value) in values.iteritems():
|
project_ref.update(values)
|
||||||
project_ref[key] = value
|
|
||||||
project_ref.save(session=session)
|
project_ref.save(session=session)
|
||||||
|
|
||||||
|
|
||||||
|
@ -81,6 +81,16 @@ class NovaBase(object):
|
|||||||
n = self._i.next().name
|
n = self._i.next().name
|
||||||
return n, getattr(self, n)
|
return n, getattr(self, n)
|
||||||
|
|
||||||
|
def update(self, values):
|
||||||
|
"""Make the model object behave like a dict"""
|
||||||
|
for k, v in values.iteritems():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
def iteritems(self):
|
||||||
|
"""Make the model object behave like a dict"""
|
||||||
|
return iter(self)
|
||||||
|
|
||||||
|
|
||||||
# TODO(vish): Store images in the database instead of file system
|
# TODO(vish): Store images in the database instead of file system
|
||||||
#class Image(BASE, NovaBase):
|
#class Image(BASE, NovaBase):
|
||||||
# """Represents an image in the datastore"""
|
# """Represents an image in the datastore"""
|
||||||
|
@ -30,6 +30,7 @@ from nova import exception as exc
|
|||||||
import nova.api.openstack.auth
|
import nova.api.openstack.auth
|
||||||
from nova.image import service
|
from nova.image import service
|
||||||
from nova.image.services import glance
|
from nova.image.services import glance
|
||||||
|
from nova.tests import fake_flags
|
||||||
from nova.wsgi import Router
|
from nova.wsgi import Router
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,22 +24,28 @@ from nova.api.openstack import API
|
|||||||
from nova.api.openstack import faults
|
from nova.api.openstack import faults
|
||||||
from webob import Request
|
from webob import Request
|
||||||
|
|
||||||
|
|
||||||
class APITest(unittest.TestCase):
|
class APITest(unittest.TestCase):
|
||||||
|
|
||||||
def test_exceptions_are_converted_to_faults(self):
|
def test_exceptions_are_converted_to_faults(self):
|
||||||
|
|
||||||
@webob.dec.wsgify
|
@webob.dec.wsgify
|
||||||
def succeed(req):
|
def succeed(req):
|
||||||
return 'Succeeded'
|
return 'Succeeded'
|
||||||
|
|
||||||
@webob.dec.wsgify
|
@webob.dec.wsgify
|
||||||
def raise_webob_exc(req):
|
def raise_webob_exc(req):
|
||||||
raise webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
|
raise webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
|
||||||
|
|
||||||
@webob.dec.wsgify
|
@webob.dec.wsgify
|
||||||
def fail(req):
|
def fail(req):
|
||||||
raise Exception("Threw an exception")
|
raise Exception("Threw an exception")
|
||||||
|
|
||||||
@webob.dec.wsgify
|
@webob.dec.wsgify
|
||||||
def raise_api_fault(req):
|
def raise_api_fault(req):
|
||||||
exc = webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
|
exc = webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
|
||||||
return faults.Fault(exc)
|
return faults.Fault(exc)
|
||||||
|
|
||||||
api = API()
|
api = API()
|
||||||
|
|
||||||
api.application = succeed
|
api.application = succeed
|
||||||
|
@ -91,9 +91,7 @@ class ServersTest(unittest.TestCase):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def instance_create(context, inst):
|
def instance_create(context, inst):
|
||||||
class Foo(object):
|
return {'id': 1, 'internal_id': 1}
|
||||||
internal_id = 1
|
|
||||||
return Foo()
|
|
||||||
|
|
||||||
def fake_method(*args, **kwargs):
|
def fake_method(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
@ -213,10 +213,10 @@ def deferredToThread(f):
|
|||||||
|
|
||||||
def xhtml_escape(value):
|
def xhtml_escape(value):
|
||||||
"""Escapes a string so it is valid within XML or XHTML.
|
"""Escapes a string so it is valid within XML or XHTML.
|
||||||
|
|
||||||
Code is directly from the utf8 function in
|
Code is directly from the utf8 function in
|
||||||
http://github.com/facebook/tornado/blob/master/tornado/escape.py
|
http://github.com/facebook/tornado/blob/master/tornado/escape.py
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return saxutils.escape(value, {'"': """})
|
return saxutils.escape(value, {'"': """})
|
||||||
|
|
||||||
@ -232,4 +232,3 @@ def utf8(value):
|
|||||||
return value.encode("utf-8")
|
return value.encode("utf-8")
|
||||||
assert isinstance(value, str)
|
assert isinstance(value, str)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user