Validate all resource properties
Change-Id: Ib0346fd22636d3eae31c688426b966f07acda19c Signed-off-by: Angus Salkeld <asalkeld@redhat.com>
This commit is contained in:
parent
a83fcc6aa3
commit
03ae5b2d1c
@ -24,4 +24,3 @@ include heat/cloudinit/part-handler.py
|
||||
include heat/db/sqlalchemy/migrate_repo/migrate.cfg
|
||||
graft etc
|
||||
graft docs
|
||||
graft var
|
||||
|
@ -583,7 +583,7 @@ class SourcesHandler(object):
|
||||
cmd_str = ''
|
||||
logger.debug("Decompressing")
|
||||
(r, ext) = os.path.splitext(archive)
|
||||
if ext == '.tgz':
|
||||
if ext == '.tgz':
|
||||
cmd_str = 'tar -C %s -xzf %s' % (dest_dir, archive)
|
||||
elif ext == '.tbz2':
|
||||
cmd_str = 'tar -C %s -xjf %s' % (dest_dir, archive)
|
||||
|
@ -126,7 +126,7 @@ class KeystoneStrategy(BaseStrategy):
|
||||
# 3. In some configurations nova makes redirection to
|
||||
# v2.0 keystone endpoint. Also, new location does not
|
||||
# contain real endpoint, only hostname and port.
|
||||
if 'v2.0' not in auth_url:
|
||||
if 'v2.0' not in auth_url:
|
||||
auth_url = urlparse.urljoin(auth_url, 'v2.0/')
|
||||
else:
|
||||
# If we sucessfully auth'd, then memorize the correct auth_url
|
||||
|
@ -566,9 +566,9 @@ class BaseClient(object):
|
||||
return (e.errno != errno.ESPIPE)
|
||||
|
||||
def _sendable(self, body):
|
||||
return (SENDFILE_SUPPORTED and
|
||||
return (SENDFILE_SUPPORTED and
|
||||
hasattr(body, 'fileno') and
|
||||
self._seekable(body) and
|
||||
self._seekable(body) and
|
||||
not self.use_ssl)
|
||||
|
||||
def _iterable(self, body):
|
||||
|
@ -39,10 +39,15 @@ class CheckedDict(collections.MutableMapping):
|
||||
except ValueError:
|
||||
return float(s)
|
||||
|
||||
num_converter = {'Integer': int,
|
||||
'Number': str_to_num,
|
||||
'Float': float}
|
||||
|
||||
if not key in self.data:
|
||||
raise KeyError('key %s not found' % key)
|
||||
|
||||
if 'Type' in self.data[key]:
|
||||
t = self.data[key]['Type']
|
||||
if self.data[key]['Type'] == 'String':
|
||||
if not isinstance(value, (basestring, unicode)):
|
||||
raise ValueError('%s Value must be a string' % \
|
||||
@ -62,15 +67,15 @@ class CheckedDict(collections.MutableMapping):
|
||||
raise ValueError('Pattern does not match %s' % \
|
||||
(key))
|
||||
|
||||
elif self.data[key]['Type'] == 'Number':
|
||||
# just try convert to an int/float, it will throw a ValueError
|
||||
num = str_to_num(value)
|
||||
elif self.data[key]['Type'] in ['Integer', 'Number', 'Float']:
|
||||
# just try convert and see if it will throw a ValueError
|
||||
num = num_converter[t](value)
|
||||
minn = num
|
||||
maxn = num
|
||||
if 'MaxValue' in self.data[key]:
|
||||
maxn = str_to_num(self.data[key]['MaxValue'])
|
||||
maxn = num_converter[t](self.data[key]['MaxValue'])
|
||||
if 'MinValue' in self.data[key]:
|
||||
minn = str_to_num(self.data[key]['MinValue'])
|
||||
minn = num_converter[t](self.data[key]['MinValue'])
|
||||
if num > maxn or num < minn:
|
||||
raise ValueError('%s is out of range' % key)
|
||||
|
||||
@ -98,18 +103,44 @@ class CheckedDict(collections.MutableMapping):
|
||||
if not self.data[key]['Required']:
|
||||
return None
|
||||
else:
|
||||
raise ValueError('key %s has no value' % key)
|
||||
raise ValueError('%s must be provided' % key)
|
||||
else:
|
||||
raise ValueError('key %s has no value' % key)
|
||||
raise ValueError('%s must be provided' % key)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def __contains__(self, key):
|
||||
return self.data.__contains__(key)
|
||||
return key in self.data
|
||||
|
||||
def __iter__(self):
|
||||
return self.data.__iter__()
|
||||
return iter(self.data)
|
||||
|
||||
def __delitem__(self, k):
|
||||
del self.data[k]
|
||||
|
||||
|
||||
class Properties(CheckedDict):
|
||||
def __init__(self, schema):
|
||||
CheckedDict.__init__(self)
|
||||
self.data = schema
|
||||
|
||||
# set some defaults
|
||||
for s in self.data:
|
||||
if not 'Implemented' in self.data[s]:
|
||||
self.data[s]['Implemented'] = True
|
||||
if not 'Required' in self.data[s]:
|
||||
self.data[s]['Required'] = False
|
||||
|
||||
def validate(self):
|
||||
for key in self.data:
|
||||
# are there missing required Properties
|
||||
if 'Required' in self.data[key] and not 'Value' in self.data[key]:
|
||||
return {'Error': \
|
||||
'%s Property must be provided' % key}
|
||||
|
||||
# are there unimplemented Properties
|
||||
if not self.data[key]['Implemented'] and 'Value' in self.data[key]:
|
||||
return {'Error': \
|
||||
'%s Property not implemented yet' % key}
|
||||
return None
|
||||
|
@ -24,14 +24,14 @@ logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
class ElasticIp(Resource):
|
||||
properties_schema = {'Domain': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'InstanceId': {'Type': 'String'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(ElasticIp, self).__init__(name, json_snippet, stack)
|
||||
self.ipaddress = ''
|
||||
|
||||
if 'Domain' in self.t['Properties']:
|
||||
logger.warn('*** can\'t support Domain %s yet' % \
|
||||
(self.t['Properties']['Domain']))
|
||||
|
||||
def create(self):
|
||||
"""Allocate a floating IP for the current tenant."""
|
||||
if self.state != None:
|
||||
@ -49,7 +49,7 @@ class ElasticIp(Resource):
|
||||
'''
|
||||
Validate the ip address here
|
||||
'''
|
||||
return None
|
||||
return Resource.validate(self)
|
||||
|
||||
def reload(self):
|
||||
'''
|
||||
@ -90,14 +90,26 @@ class ElasticIp(Resource):
|
||||
|
||||
|
||||
class ElasticIpAssociation(Resource):
|
||||
properties_schema = {'InstanceId': {'Type': 'String',
|
||||
'Required': True},
|
||||
'EIP': {'Type': 'String'},
|
||||
'AllocationId': {'Type': 'String',
|
||||
'Implemented': False}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(ElasticIpAssociation, self).__init__(name, json_snippet, stack)
|
||||
|
||||
def FnGetRefId(self):
|
||||
if not 'EIP' in self.t['Properties']:
|
||||
if not 'EIP' in self.properties:
|
||||
return unicode('0.0.0.0')
|
||||
else:
|
||||
return unicode(self.t['Properties']['EIP'])
|
||||
return unicode(self.properties['EIP'])
|
||||
|
||||
def validate(self):
|
||||
'''
|
||||
Validate the ip address here
|
||||
'''
|
||||
return Resource.validate(self)
|
||||
|
||||
def create(self):
|
||||
"""Add a floating IP address to a server."""
|
||||
@ -106,13 +118,14 @@ class ElasticIpAssociation(Resource):
|
||||
return
|
||||
self.state_set(self.CREATE_IN_PROGRESS)
|
||||
super(ElasticIpAssociation, self).create()
|
||||
logger.debug('ElasticIpAssociation %s.add_floating_ip(%s)' % \
|
||||
(self.t['Properties']['InstanceId'],
|
||||
self.t['Properties']['EIP']))
|
||||
|
||||
server = self.nova().servers.get(self.t['Properties']['InstanceId'])
|
||||
server.add_floating_ip(self.t['Properties']['EIP'])
|
||||
self.instance_id_set(self.t['Properties']['EIP'])
|
||||
logger.debug('ElasticIpAssociation %s.add_floating_ip(%s)' % \
|
||||
(self.properties['InstanceId'],
|
||||
self.properties['EIP']))
|
||||
|
||||
server = self.nova().servers.get(self.properties['InstanceId'])
|
||||
server.add_floating_ip(self.properties['EIP'])
|
||||
self.instance_id_set(self.properties['EIP'])
|
||||
self.state_set(self.CREATE_COMPLETE)
|
||||
|
||||
def delete(self):
|
||||
@ -124,7 +137,7 @@ class ElasticIpAssociation(Resource):
|
||||
self.state_set(self.DELETE_IN_PROGRESS)
|
||||
Resource.delete(self)
|
||||
|
||||
server = self.nova().servers.get(self.t['Properties']['InstanceId'])
|
||||
server.remove_floating_ip(self.t['Properties']['EIP'])
|
||||
server = self.nova().servers.get(self.properties['InstanceId'])
|
||||
server.remove_floating_ip(self.properties['EIP'])
|
||||
|
||||
self.state_set(self.DELETE_COMPLETE)
|
||||
|
@ -47,14 +47,49 @@ else:
|
||||
|
||||
|
||||
class Instance(Resource):
|
||||
# AWS does not require KeyName and InstanceType but we seem to
|
||||
properties_schema = {'ImageId': {'Type': 'String',
|
||||
'Required': True},
|
||||
'InstanceType': {'Type': 'String',
|
||||
'Required': True},
|
||||
'KeyName': {'Type': 'String',
|
||||
'Required': True},
|
||||
'AvailabilityZone': {'Type': 'String',
|
||||
'Default': 'nova'},
|
||||
'DisableApiTermination': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'KernelId': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'Monitoring': {'Type': 'Boolean',
|
||||
'Implemented': False},
|
||||
'PlacementGroupName': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'PrivateIpAddress': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'RamDiskId': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'SecurityGroups': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False},
|
||||
'SecurityGroupIds': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False},
|
||||
'SourceDestCheck': {'Type': 'Boolean',
|
||||
'Implemented': False},
|
||||
'SubnetId': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'Tags': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False},
|
||||
'Tenancy': {'Type': 'String',
|
||||
'AllowedValues': ['dedicated', 'default'],
|
||||
'Implemented': False},
|
||||
'UserData': {'Type': 'String'},
|
||||
'Volumes': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(Instance, self).__init__(name, json_snippet, stack)
|
||||
self.ipaddress = '0.0.0.0'
|
||||
self.mime_string = None
|
||||
|
||||
if not 'AvailabilityZone' in self.t['Properties']:
|
||||
self.t['Properties']['AvailabilityZone'] = 'nova'
|
||||
self.itype_oflavor = {'t1.micro': 'm1.tiny',
|
||||
'm1.small': 'm1.small',
|
||||
'm1.medium': 'm1.medium',
|
||||
@ -72,7 +107,7 @@ class Instance(Resource):
|
||||
|
||||
res = None
|
||||
if key == 'AvailabilityZone':
|
||||
res = self.t['Properties']['AvailabilityZone']
|
||||
res = self.properties['AvailabilityZone']
|
||||
elif key == 'PublicIp':
|
||||
res = self.ipaddress
|
||||
elif key == 'PrivateDnsName':
|
||||
@ -137,24 +172,17 @@ class Instance(Resource):
|
||||
return
|
||||
self.state_set(self.CREATE_IN_PROGRESS)
|
||||
Resource.create(self)
|
||||
props = self.t['Properties']
|
||||
required_props = ('KeyName', 'InstanceType', 'ImageId')
|
||||
for key in required_props:
|
||||
if key not in props:
|
||||
raise exception.UserParameterMissing(key=key)
|
||||
|
||||
security_groups = props.get('SecurityGroups')
|
||||
|
||||
userdata = self.t['Properties']['UserData']
|
||||
|
||||
flavor = self.itype_oflavor[self.t['Properties']['InstanceType']]
|
||||
key_name = self.t['Properties']['KeyName']
|
||||
security_groups = self.properties.get('SecurityGroups')
|
||||
userdata = self.properties['UserData']
|
||||
flavor = self.itype_oflavor[self.properties['InstanceType']]
|
||||
key_name = self.properties['KeyName']
|
||||
|
||||
keypairs = [k.name for k in self.nova().keypairs.list()]
|
||||
if key_name not in keypairs:
|
||||
raise exception.UserKeyPairMissing(key_name=key_name)
|
||||
|
||||
image_name = self.t['Properties']['ImageId']
|
||||
image_name = self.properties['ImageId']
|
||||
image_id = None
|
||||
image_list = self.nova().images.list()
|
||||
for o in image_list:
|
||||
@ -193,6 +221,9 @@ class Instance(Resource):
|
||||
'''
|
||||
Validate any of the provided params
|
||||
'''
|
||||
res = Resource.validate(self)
|
||||
if res:
|
||||
return res
|
||||
#check validity of key
|
||||
if self.stack.parms['KeyName']:
|
||||
keypairs = self.nova().keypairs.list()
|
||||
|
@ -123,15 +123,16 @@ class Stack(object):
|
||||
for r in order:
|
||||
try:
|
||||
res = self.resources[r].validate()
|
||||
except Exception as ex:
|
||||
logger.exception('validate')
|
||||
res = str(ex)
|
||||
finally:
|
||||
if res:
|
||||
err_str = 'Malformed Query Response [%s]' % (res)
|
||||
response = {'ValidateTemplateResult': {
|
||||
'Description': err_str,
|
||||
'Parameters': []}}
|
||||
return response
|
||||
except Exception as ex:
|
||||
logger.exception('validate')
|
||||
failed = True
|
||||
|
||||
if response == None:
|
||||
response = {'ValidateTemplateResult': {
|
||||
|
@ -26,8 +26,9 @@ from novaclient.exceptions import BadRequest
|
||||
from novaclient.exceptions import NotFound
|
||||
|
||||
from heat.common import exception
|
||||
from heat.db import api as db_api
|
||||
from heat.common.config import HeatEngineConfigOpts
|
||||
from heat.db import api as db_api
|
||||
from heat.engine import checkeddict
|
||||
|
||||
logger = logging.getLogger(__file__)
|
||||
|
||||
@ -49,6 +50,12 @@ class Resource(object):
|
||||
self.references = []
|
||||
self.stack = stack
|
||||
self.name = name
|
||||
self.properties = checkeddict.Properties(self.properties_schema)
|
||||
if not 'Properties' in self.t:
|
||||
# make a dummy entry to prevent having to check all over the
|
||||
# place for it.
|
||||
self.t['Properties'] = {}
|
||||
|
||||
resource = db_api.resource_get_by_name_and_stack(None, name, stack.id)
|
||||
if resource:
|
||||
self.instance_id = resource.nova_instance
|
||||
@ -59,10 +66,6 @@ class Resource(object):
|
||||
self.state = None
|
||||
self.id = None
|
||||
self._nova = {}
|
||||
if not 'Properties' in self.t:
|
||||
# make a dummy entry to prevent having to check all over the
|
||||
# place for it.
|
||||
self.t['Properties'] = {}
|
||||
|
||||
stack.resolve_static_refs(self.t)
|
||||
stack.resolve_find_in_map(self.t)
|
||||
@ -92,6 +95,9 @@ class Resource(object):
|
||||
self.stack.resolve_attributes(self.t)
|
||||
self.stack.resolve_joins(self.t)
|
||||
self.stack.resolve_base64(self.t)
|
||||
for p in self.t['Properties']:
|
||||
self.properties[p] = self.t['Properties'][p]
|
||||
self.properties.validate()
|
||||
|
||||
def validate(self):
|
||||
logger.info('validating %s name:%s' % (self.t['Type'], self.name))
|
||||
@ -100,6 +106,13 @@ class Resource(object):
|
||||
self.stack.resolve_joins(self.t)
|
||||
self.stack.resolve_base64(self.t)
|
||||
|
||||
try:
|
||||
for p in self.t['Properties']:
|
||||
self.properties[p] = self.t['Properties'][p]
|
||||
except ValueError as ex:
|
||||
return {'Error': '%s' % str(ex)}
|
||||
self.properties.validate()
|
||||
|
||||
def instance_id_set(self, inst):
|
||||
self.instance_id = inst
|
||||
|
||||
@ -185,6 +198,8 @@ class Resource(object):
|
||||
|
||||
|
||||
class GenericResource(Resource):
|
||||
properties_schema = {}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(GenericResource, self).__init__(name, json_snippet, stack)
|
||||
|
||||
|
@ -25,15 +25,17 @@ logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
class SecurityGroup(Resource):
|
||||
properties_schema = {'GroupDescription': {'Type': 'String',
|
||||
'Required': True},
|
||||
'VpcId': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'SecurityGroupIngress': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False},
|
||||
'SecurityGroupEgress': {'Type': 'CommaDelimitedList'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(SecurityGroup, self).__init__(name, json_snippet, stack)
|
||||
|
||||
if 'GroupDescription' in self.t['Properties']:
|
||||
self.description = self.t['Properties']['GroupDescription']
|
||||
else:
|
||||
self.description = ''
|
||||
|
||||
def create(self):
|
||||
if self.state != None:
|
||||
return
|
||||
@ -49,12 +51,12 @@ class SecurityGroup(Resource):
|
||||
|
||||
if not sec:
|
||||
sec = self.nova().security_groups.create(self.name,
|
||||
self.description)
|
||||
self.properties['GroupDescription'])
|
||||
|
||||
self.instance_id_set(sec.id)
|
||||
if 'SecurityGroupIngress' in self.t['Properties']:
|
||||
if 'SecurityGroupIngress' in self.properties:
|
||||
rules_client = self.nova().security_group_rules
|
||||
for i in self.t['Properties']['SecurityGroupIngress']:
|
||||
for i in self.properties['SecurityGroupIngress']:
|
||||
try:
|
||||
rule = rules_client.create(sec.id,
|
||||
i['IpProtocol'],
|
||||
@ -75,7 +77,7 @@ class SecurityGroup(Resource):
|
||||
'''
|
||||
Validate the security group
|
||||
'''
|
||||
return None
|
||||
return Resource.validate(self)
|
||||
|
||||
def delete(self):
|
||||
if self.state == self.DELETE_IN_PROGRESS or \
|
||||
|
@ -24,6 +24,14 @@ logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
class User(Resource):
|
||||
properties_schema = {'Path': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'Groups': {'Type': 'CommaDelimitedList',
|
||||
'Implemented': False},
|
||||
'LoginProfile': {'Type': 'String',
|
||||
'Implemented': False},
|
||||
'Policies': {'Type': 'CommaDelimitedList'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(User, self).__init__(name, json_snippet, stack)
|
||||
self.instance_id = ''
|
||||
@ -44,6 +52,14 @@ class User(Resource):
|
||||
|
||||
|
||||
class AccessKey(Resource):
|
||||
properties_schema = {'Serial': {'Type': 'Integer',
|
||||
'Implemented': False},
|
||||
'UserName': {'Type': 'String',
|
||||
'Required': True},
|
||||
'Status': {'Type': 'String',
|
||||
'Implemented': False,
|
||||
'AllowedValues': ['Active', 'Inactive']}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(AccessKey, self).__init__(name, json_snippet, stack)
|
||||
|
||||
|
@ -25,6 +25,11 @@ logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
class Volume(Resource):
|
||||
properties_schema = {'AvailabilityZone': {'Type': 'String',
|
||||
'Required': True},
|
||||
'Size': {'Type': 'Number'},
|
||||
'SnapshotId': {'Type': 'String'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(Volume, self).__init__(name, json_snippet, stack)
|
||||
|
||||
@ -34,7 +39,7 @@ class Volume(Resource):
|
||||
self.state_set(self.CREATE_IN_PROGRESS)
|
||||
super(Volume, self).create()
|
||||
|
||||
vol = self.nova('volume').volumes.create(self.t['Properties']['Size'],
|
||||
vol = self.nova('volume').volumes.create(self.properties['Size'],
|
||||
display_name=self.name,
|
||||
display_description=self.name)
|
||||
|
||||
@ -51,7 +56,7 @@ class Volume(Resource):
|
||||
'''
|
||||
Validate the volume
|
||||
'''
|
||||
return None
|
||||
return Resource.validate(self)
|
||||
|
||||
def delete(self):
|
||||
if self.state == self.DELETE_IN_PROGRESS or \
|
||||
@ -73,6 +78,14 @@ class Volume(Resource):
|
||||
|
||||
|
||||
class VolumeAttachment(Resource):
|
||||
properties_schema = {'InstanceId': {'Type': 'String',
|
||||
'Required': True},
|
||||
'VolumeId': {'Type': 'String',
|
||||
'Required': True},
|
||||
'Device': {'Type': "String",
|
||||
'Required': True,
|
||||
'AllowedPattern': '/dev/vd[b-z]'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(VolumeAttachment, self).__init__(name, json_snippet, stack)
|
||||
|
||||
@ -83,14 +96,14 @@ class VolumeAttachment(Resource):
|
||||
self.state_set(self.CREATE_IN_PROGRESS)
|
||||
super(VolumeAttachment, self).create()
|
||||
|
||||
server_id = self.t['Properties']['InstanceId']
|
||||
volume_id = self.t['Properties']['VolumeId']
|
||||
server_id = self.properties['InstanceId']
|
||||
volume_id = self.properties['VolumeId']
|
||||
logger.warn('Attaching InstanceId %s VolumeId %s Device %s' %
|
||||
(server_id, volume_id, self.t['Properties']['Device']))
|
||||
(server_id, volume_id, self.properties['Device']))
|
||||
volapi = self.nova().volumes
|
||||
va = volapi.create_server_volume(server_id=server_id,
|
||||
volume_id=volume_id,
|
||||
device=self.t['Properties']['Device'])
|
||||
device=self.properties['Device'])
|
||||
|
||||
vol = self.nova('volume').volumes.get(va.id)
|
||||
while vol.status == 'available' or vol.status == 'attaching':
|
||||
@ -106,11 +119,7 @@ class VolumeAttachment(Resource):
|
||||
'''
|
||||
Validate the mountpoint device
|
||||
'''
|
||||
device_re = re.compile('/dev/vd[b-z]')
|
||||
if re.match(device_re, self.t['Properties']['Device']):
|
||||
return None
|
||||
else:
|
||||
return 'MountPoint.Device must be in format /dev/vd[b-z]'
|
||||
return Resource.validate(self)
|
||||
|
||||
def delete(self):
|
||||
if self.state == self.DELETE_IN_PROGRESS or \
|
||||
@ -119,8 +128,8 @@ class VolumeAttachment(Resource):
|
||||
self.state_set(self.DELETE_IN_PROGRESS)
|
||||
Resource.delete(self)
|
||||
|
||||
server_id = self.t['Properties']['InstanceId']
|
||||
volume_id = self.t['Properties']['VolumeId']
|
||||
server_id = self.properties['InstanceId']
|
||||
volume_id = self.properties['VolumeId']
|
||||
logger.info('VolumeAttachment un-attaching %s %s' % \
|
||||
(server_id, volume_id))
|
||||
|
||||
|
@ -51,12 +51,6 @@ class WaitConditionHandle(Resource):
|
||||
|
||||
self.state_set(self.CREATE_COMPLETE)
|
||||
|
||||
def validate(self):
|
||||
'''
|
||||
Validate the wait condition
|
||||
'''
|
||||
return None
|
||||
|
||||
def delete(self):
|
||||
if self.state == self.DELETE_IN_PROGRESS or \
|
||||
self.state == self.DELETE_COMPLETE:
|
||||
@ -74,6 +68,13 @@ class WaitConditionHandle(Resource):
|
||||
|
||||
|
||||
class WaitCondition(Resource):
|
||||
properties_schema = {'Handle': {'Type': 'String',
|
||||
'Required': True},
|
||||
'Timeout': {'Type': 'Number',
|
||||
'Required': True,
|
||||
'MinValue': '1'},
|
||||
'Count': {'Type': 'Number',
|
||||
'MinValue': '1'}}
|
||||
|
||||
def __init__(self, name, json_snippet, stack):
|
||||
super(WaitCondition, self).__init__(name, json_snippet, stack)
|
||||
@ -83,20 +84,6 @@ class WaitCondition(Resource):
|
||||
self.timeout = int(self.t['Properties']['Timeout'])
|
||||
self.count = int(self.t['Properties'].get('Count', '1'))
|
||||
|
||||
def validate(self):
|
||||
'''
|
||||
Validate the wait condition
|
||||
'''
|
||||
if not 'Handle' in self.t['Properties']:
|
||||
return {'Error': \
|
||||
'Handle Property must be provided'}
|
||||
if self.count < 1:
|
||||
return {'Error': \
|
||||
'Count must be greater than 0'}
|
||||
if self.timeout < 1:
|
||||
return {'Error': \
|
||||
'Timeout must be greater than 0'}
|
||||
|
||||
def _get_handle_resource_id(self):
|
||||
if self.resource_id == None:
|
||||
self.handle_url = self.t['Properties'].get('Handle', None)
|
||||
|
Loading…
x
Reference in New Issue
Block a user