Enable H904 style checking rule
Use parentheses instead of backslash for line continuation. Plus, usage of escaped quotes in long strings is avoided where it makes sense. Change-Id: If2e78012b85a4430c6f03f65784cac2d032cf116
This commit is contained in:
parent
4279bd2923
commit
cb8712281d
@ -76,8 +76,8 @@ class NeutronExtraRouteTest(common.HeatTestCase):
|
|||||||
def test_extraroute(self):
|
def test_extraroute(self):
|
||||||
# add first route
|
# add first route
|
||||||
neutronclient.Client.show_router(
|
neutronclient.Client.show_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8')\
|
'3e46229d-8fce-4733-819a-b5fe630550f8'
|
||||||
.AndReturn({'router': {'routes': []}})
|
).AndReturn({'router': {'routes': []}})
|
||||||
neutronclient.Client.update_router(
|
neutronclient.Client.update_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8',
|
'3e46229d-8fce-4733-819a-b5fe630550f8',
|
||||||
{"router": {
|
{"router": {
|
||||||
@ -87,8 +87,8 @@ class NeutronExtraRouteTest(common.HeatTestCase):
|
|||||||
}}).AndReturn(None)
|
}}).AndReturn(None)
|
||||||
# add second route
|
# add second route
|
||||||
neutronclient.Client.show_router(
|
neutronclient.Client.show_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8')\
|
'3e46229d-8fce-4733-819a-b5fe630550f8'
|
||||||
.AndReturn({'router': {'routes': [{"destination": "192.168.0.0/24",
|
).AndReturn({'router': {'routes': [{"destination": "192.168.0.0/24",
|
||||||
"nexthop": "1.1.1.1"}]}})
|
"nexthop": "1.1.1.1"}]}})
|
||||||
neutronclient.Client.update_router(
|
neutronclient.Client.update_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8',
|
'3e46229d-8fce-4733-819a-b5fe630550f8',
|
||||||
@ -100,8 +100,8 @@ class NeutronExtraRouteTest(common.HeatTestCase):
|
|||||||
}}).AndReturn(None)
|
}}).AndReturn(None)
|
||||||
# first delete
|
# first delete
|
||||||
neutronclient.Client.show_router(
|
neutronclient.Client.show_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8')\
|
'3e46229d-8fce-4733-819a-b5fe630550f8'
|
||||||
.AndReturn({'router':
|
).AndReturn({'router':
|
||||||
{'routes': [{"destination": "192.168.0.0/24",
|
{'routes': [{"destination": "192.168.0.0/24",
|
||||||
"nexthop": "1.1.1.1"},
|
"nexthop": "1.1.1.1"},
|
||||||
{"destination": "192.168.255.0/24",
|
{"destination": "192.168.255.0/24",
|
||||||
@ -115,8 +115,8 @@ class NeutronExtraRouteTest(common.HeatTestCase):
|
|||||||
}}).AndReturn(None)
|
}}).AndReturn(None)
|
||||||
# second delete
|
# second delete
|
||||||
neutronclient.Client.show_router(
|
neutronclient.Client.show_router(
|
||||||
'3e46229d-8fce-4733-819a-b5fe630550f8')\
|
'3e46229d-8fce-4733-819a-b5fe630550f8'
|
||||||
.AndReturn({'router':
|
).AndReturn({'router':
|
||||||
{'routes': [{"destination": "192.168.255.0/24",
|
{'routes': [{"destination": "192.168.255.0/24",
|
||||||
"nexthop": "1.1.1.1"}]}})
|
"nexthop": "1.1.1.1"}]}})
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
@ -541,8 +541,8 @@ class CloudLoadBalancer(resource.Resource):
|
|||||||
|
|
||||||
virtual_ips = self._setup_properties(vips, self.clb.VirtualIP)
|
virtual_ips = self._setup_properties(vips, self.clb.VirtualIP)
|
||||||
|
|
||||||
(session_persistence, connection_logging, metadata) = \
|
(session_persistence, connection_logging, metadata
|
||||||
self._alter_properties_for_api()
|
) = self._alter_properties_for_api()
|
||||||
|
|
||||||
lb_body = {
|
lb_body = {
|
||||||
'port': self.properties[self.PORT],
|
'port': self.properties[self.PORT],
|
||||||
|
@ -208,12 +208,12 @@ class CloudServer(server.Server):
|
|||||||
|
|
||||||
self.client_plugin().refresh_server(server)
|
self.client_plugin().refresh_server(server)
|
||||||
|
|
||||||
if 'rack_connect' in self.context.roles and not \
|
if ('rack_connect' in self.context.roles and not
|
||||||
self._check_rack_connect_complete(server):
|
self._check_rack_connect_complete(server)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if 'rax_managed' in self.context.roles and not \
|
if ('rax_managed' in self.context.roles and not
|
||||||
self._check_managed_cloud_complete(server):
|
self._check_managed_cloud_complete(server)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -32,8 +32,7 @@ from ..resources import cloud_loadbalancer as lb # noqa
|
|||||||
# The following fakes are for pyrax
|
# The following fakes are for pyrax
|
||||||
|
|
||||||
|
|
||||||
cert = """\
|
cert = """-----BEGIN CERTIFICATE-----
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIFBjCCAu4CCQDWdcR5LY/+/jANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
|
MIIFBjCCAu4CCQDWdcR5LY/+/jANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
|
||||||
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
|
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
|
||||||
cyBQdHkgTHRkMB4XDTE0MTAxNjE3MDYxNVoXDTE1MTAxNjE3MDYxNVowRTELMAkG
|
cyBQdHkgTHRkMB4XDTE0MTAxNjE3MDYxNVoXDTE1MTAxNjE3MDYxNVowRTELMAkG
|
||||||
@ -63,8 +62,7 @@ eF5whPl36/GK8HUixCibkCyqEOBBuNqhOz7nVLM0eg5L+TE5coizEBagxVCovYSj
|
|||||||
fQ9zkIgaC5oeH6L0C1FFG1vRNSWokheBk14ztVoJCJyFr6p0/6pD7SeR
|
fQ9zkIgaC5oeH6L0C1FFG1vRNSWokheBk14ztVoJCJyFr6p0/6pD7SeR
|
||||||
-----END CERTIFICATE-----"""
|
-----END CERTIFICATE-----"""
|
||||||
|
|
||||||
private_key = """\
|
private_key = """-----BEGIN PRIVATE KEY-----
|
||||||
-----BEGIN PRIVATE KEY-----
|
|
||||||
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDJuTXD9LTCh25U
|
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDJuTXD9LTCh25U
|
||||||
+lHdZPE8Wff/Ljh8FDT27xbL0sgrqY9CdLxgk427gtiOU/wl0bZyxCLfxGq5TQKn
|
+lHdZPE8Wff/Ljh8FDT27xbL0sgrqY9CdLxgk427gtiOU/wl0bZyxCLfxGq5TQKn
|
||||||
I2wwlrUshCrN8w5ppK3qCAxGvKcgENsnLAlxjMQzfexd/8JS2WoFDTNBcBhy2VgY
|
I2wwlrUshCrN8w5ppK3qCAxGvKcgENsnLAlxjMQzfexd/8JS2WoFDTNBcBhy2VgY
|
||||||
@ -1032,8 +1030,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_session_persistence_delete(self):
|
def test_update_session_persistence_delete(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['sessionPersistence'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
"SOURCE_IP"
|
'sessionPersistence'] = "SOURCE_IP"
|
||||||
expected_body = copy.deepcopy(self.expected_body)
|
expected_body = copy.deepcopy(self.expected_body)
|
||||||
expected_body['sessionPersistence'] = {'persistenceType': "SOURCE_IP"}
|
expected_body['sessionPersistence'] = {'persistenceType': "SOURCE_IP"}
|
||||||
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
||||||
@ -1240,8 +1238,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_connection_logging_delete(self):
|
def test_update_connection_logging_delete(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['connectionLogging'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
True
|
'connectionLogging'] = True
|
||||||
expected_body = copy.deepcopy(self.expected_body)
|
expected_body = copy.deepcopy(self.expected_body)
|
||||||
expected_body['connectionLogging'] = {'enabled': True}
|
expected_body['connectionLogging'] = {'enabled': True}
|
||||||
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
||||||
@ -1267,8 +1265,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_connection_logging_disable(self):
|
def test_update_connection_logging_disable(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['connectionLogging'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
True
|
'connectionLogging'] = True
|
||||||
expected_body = copy.deepcopy(self.expected_body)
|
expected_body = copy.deepcopy(self.expected_body)
|
||||||
expected_body['connectionLogging'] = {'enabled': True}
|
expected_body['connectionLogging'] = {'enabled': True}
|
||||||
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
||||||
@ -1317,8 +1315,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_connection_throttle_delete(self):
|
def test_update_connection_throttle_delete(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['connectionThrottle'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
{'maxConnections': 1000}
|
'connectionThrottle'] = {'maxConnections': 1000}
|
||||||
expected_body = copy.deepcopy(self.expected_body)
|
expected_body = copy.deepcopy(self.expected_body)
|
||||||
expected_body['connectionThrottle'] = {
|
expected_body['connectionThrottle'] = {
|
||||||
'maxConnections': 1000, 'maxConnectionRate': None,
|
'maxConnections': 1000, 'maxConnectionRate': None,
|
||||||
@ -1368,8 +1366,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_content_caching_deleted(self):
|
def test_update_content_caching_deleted(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['contentCaching'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
'ENABLED'
|
'contentCaching'] = 'ENABLED'
|
||||||
# Enabling the content cache is done post-creation, so no need
|
# Enabling the content cache is done post-creation, so no need
|
||||||
# to modify self.expected_body
|
# to modify self.expected_body
|
||||||
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
||||||
@ -1396,8 +1394,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
def test_update_content_caching_disable(self):
|
def test_update_content_caching_disable(self):
|
||||||
template = copy.deepcopy(self.lb_template)
|
template = copy.deepcopy(self.lb_template)
|
||||||
lb_name = template['Resources'].keys()[0]
|
lb_name = template['Resources'].keys()[0]
|
||||||
template['Resources'][lb_name]['Properties']['contentCaching'] = \
|
template['Resources'][lb_name]['Properties'][
|
||||||
'ENABLED'
|
'contentCaching'] = 'ENABLED'
|
||||||
# Enabling the content cache is done post-creation, so no need
|
# Enabling the content cache is done post-creation, so no need
|
||||||
# to modify self.expected_body
|
# to modify self.expected_body
|
||||||
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
rsrc, fake_loadbalancer = self._mock_loadbalancer(template,
|
||||||
|
@ -101,15 +101,15 @@ class CloudServersTest(common.HeatTestCase):
|
|||||||
stack_name = '%s_s' % name
|
stack_name = '%s_s' % name
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['image'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
image_id or 'CentOS 5.2'
|
'image'] = image_id or 'CentOS 5.2'
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['flavor'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
'256 MB Server'
|
'flavor'] = '256 MB Server'
|
||||||
|
|
||||||
server_name = '%s' % name
|
server_name = '%s' % name
|
||||||
if override_name:
|
if override_name:
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['name'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
server_name
|
'name'] = server_name
|
||||||
|
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = cloud_server.CloudServer(server_name,
|
server = cloud_server.CloudServer(server_name,
|
||||||
|
@ -65,7 +65,7 @@ class KeystonePasswordAuthProtocol(object):
|
|||||||
|
|
||||||
def _reject_request(self, env, start_response, auth_url):
|
def _reject_request(self, env, start_response, auth_url):
|
||||||
"""Redirect client to auth server."""
|
"""Redirect client to auth server."""
|
||||||
headers = [('WWW-Authenticate', 'Keystone uri=\'%s\'' % auth_url)]
|
headers = [('WWW-Authenticate', "Keystone uri='%s'" % auth_url)]
|
||||||
resp = exc.HTTPUnauthorized('Authentication required', headers)
|
resp = exc.HTTPUnauthorized('Authentication required', headers)
|
||||||
return resp(env, start_response)
|
return resp(env, start_response)
|
||||||
|
|
||||||
|
@ -124,10 +124,10 @@ engine_opts = [
|
|||||||
'retries.')),
|
'retries.')),
|
||||||
cfg.IntOpt('event_purge_batch_size',
|
cfg.IntOpt('event_purge_batch_size',
|
||||||
default=10,
|
default=10,
|
||||||
help=_('Controls how many events will be pruned whenever a '
|
help=_("Controls how many events will be pruned whenever a "
|
||||||
' stack\'s events exceed max_events_per_stack. Set this'
|
"stack's events exceed max_events_per_stack. Set this "
|
||||||
' lower to keep more events at the expense of more'
|
"lower to keep more events at the expense of more "
|
||||||
' frequent purges.')),
|
"frequent purges.")),
|
||||||
cfg.IntOpt('max_events_per_stack',
|
cfg.IntOpt('max_events_per_stack',
|
||||||
default=1000,
|
default=1000,
|
||||||
help=_('Maximum events that will be available per stack. Older'
|
help=_('Maximum events that will be available per stack. Older'
|
||||||
|
@ -15,8 +15,11 @@ from heat.common.i18n import _
|
|||||||
from heat.common import template_format
|
from heat.common import template_format
|
||||||
|
|
||||||
|
|
||||||
SECTIONS = (PARAMETERS, RESOURCE_REGISTRY, PARAMETER_DEFAULTS) = \
|
SECTIONS = (
|
||||||
('parameters', 'resource_registry', 'parameter_defaults')
|
PARAMETERS, RESOURCE_REGISTRY, PARAMETER_DEFAULTS
|
||||||
|
) = (
|
||||||
|
'parameters', 'resource_registry', 'parameter_defaults'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse(env_str):
|
def parse(env_str):
|
||||||
|
@ -135,8 +135,8 @@ class MissingCredentialError(HeatException):
|
|||||||
|
|
||||||
|
|
||||||
class BadAuthStrategy(HeatException):
|
class BadAuthStrategy(HeatException):
|
||||||
msg_fmt = _("Incorrect auth strategy, expected \"%(expected)s\" but "
|
msg_fmt = _('Incorrect auth strategy, expected "%(expected)s" but '
|
||||||
"received \"%(received)s\"")
|
'received "%(received)s"')
|
||||||
|
|
||||||
|
|
||||||
class AuthBadRequest(HeatException):
|
class AuthBadRequest(HeatException):
|
||||||
@ -216,8 +216,8 @@ class InvalidTemplateAttribute(HeatException):
|
|||||||
|
|
||||||
|
|
||||||
class InvalidTemplateReference(HeatException):
|
class InvalidTemplateReference(HeatException):
|
||||||
msg_fmt = _("The specified reference \"%(resource)s\" (in %(key)s)"
|
msg_fmt = _('The specified reference "%(resource)s" (in %(key)s)'
|
||||||
" is incorrect.")
|
' is incorrect.')
|
||||||
|
|
||||||
|
|
||||||
class UserKeyPairMissing(HeatException):
|
class UserKeyPairMissing(HeatException):
|
||||||
|
@ -326,7 +326,7 @@ class KeystoneClientV3(object):
|
|||||||
if len(domains) == 1:
|
if len(domains) == 1:
|
||||||
return domains[0].id
|
return domains[0].id
|
||||||
elif len(domains) == 0:
|
elif len(domains) == 0:
|
||||||
msg = _('Can\'t find domain id for %s!')
|
msg = _("Can't find domain id for %s!")
|
||||||
LOG.error(msg, domain_name)
|
LOG.error(msg, domain_name)
|
||||||
raise exception.Error(msg % domain_name)
|
raise exception.Error(msg % domain_name)
|
||||||
else:
|
else:
|
||||||
|
@ -543,8 +543,8 @@ def is_json_content_type(request):
|
|||||||
# for back compatible for null or plain content type
|
# for back compatible for null or plain content type
|
||||||
if not content_type or content_type.startswith('text/plain'):
|
if not content_type or content_type.startswith('text/plain'):
|
||||||
content_type = 'application/json'
|
content_type = 'application/json'
|
||||||
if content_type in ('JSON', 'application/json')\
|
if (content_type in ('JSON', 'application/json')
|
||||||
and request.body.startswith('{'):
|
and request.body.startswith('{')):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -565,8 +565,8 @@ class JSONRequestDeserializer(object):
|
|||||||
try:
|
try:
|
||||||
if len(datastring) > cfg.CONF.max_json_body_size:
|
if len(datastring) > cfg.CONF.max_json_body_size:
|
||||||
msg = _('JSON body size (%(len)s bytes) exceeds maximum '
|
msg = _('JSON body size (%(len)s bytes) exceeds maximum '
|
||||||
'allowed size (%(limit)s bytes).') % \
|
'allowed size (%(limit)s bytes).'
|
||||||
{'len': len(datastring),
|
) % {'len': len(datastring),
|
||||||
'limit': cfg.CONF.max_json_body_size}
|
'limit': cfg.CONF.max_json_body_size}
|
||||||
raise exception.RequestLimitExceeded(message=msg)
|
raise exception.RequestLimitExceeded(message=msg)
|
||||||
return json.loads(datastring)
|
return json.loads(datastring)
|
||||||
|
@ -123,10 +123,13 @@ def resource_get(context, resource_id):
|
|||||||
|
|
||||||
|
|
||||||
def resource_get_by_name_and_stack(context, resource_name, stack_id):
|
def resource_get_by_name_and_stack(context, resource_name, stack_id):
|
||||||
result = model_query(context, models.Resource).\
|
result = model_query(
|
||||||
filter_by(name=resource_name).\
|
context, models.Resource
|
||||||
filter_by(stack_id=stack_id).\
|
).filter_by(
|
||||||
options(orm.joinedload("data")).first()
|
name=resource_name
|
||||||
|
).filter_by(
|
||||||
|
stack_id=stack_id
|
||||||
|
).options(orm.joinedload("data")).first()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -258,9 +261,11 @@ def resource_create(context, values):
|
|||||||
|
|
||||||
|
|
||||||
def resource_get_all_by_stack(context, stack_id):
|
def resource_get_all_by_stack(context, stack_id):
|
||||||
results = model_query(context, models.Resource).\
|
results = model_query(
|
||||||
filter_by(stack_id=stack_id).\
|
context, models.Resource
|
||||||
options(orm.joinedload("data")).all()
|
).filter_by(
|
||||||
|
stack_id=stack_id
|
||||||
|
).options(orm.joinedload("data")).all()
|
||||||
|
|
||||||
if not results:
|
if not results:
|
||||||
raise exception.NotFound(_("no resources for stack_id %s were found")
|
raise exception.NotFound(_("no resources for stack_id %s were found")
|
||||||
@ -269,23 +274,22 @@ def resource_get_all_by_stack(context, stack_id):
|
|||||||
|
|
||||||
|
|
||||||
def stack_get_by_name_and_owner_id(context, stack_name, owner_id):
|
def stack_get_by_name_and_owner_id(context, stack_name, owner_id):
|
||||||
query = soft_delete_aware_query(context, models.Stack).\
|
query = soft_delete_aware_query(
|
||||||
filter(sqlalchemy.or_(
|
context, models.Stack
|
||||||
|
).filter(sqlalchemy.or_(
|
||||||
models.Stack.tenant == context.tenant_id,
|
models.Stack.tenant == context.tenant_id,
|
||||||
models.Stack.stack_user_project_id == context.tenant_id
|
models.Stack.stack_user_project_id == context.tenant_id)
|
||||||
)).\
|
).filter_by(name=stack_name).filter_by(owner_id=owner_id)
|
||||||
filter_by(name=stack_name).\
|
|
||||||
filter_by(owner_id=owner_id)
|
|
||||||
return query.first()
|
return query.first()
|
||||||
|
|
||||||
|
|
||||||
def stack_get_by_name(context, stack_name):
|
def stack_get_by_name(context, stack_name):
|
||||||
query = soft_delete_aware_query(context, models.Stack).\
|
query = soft_delete_aware_query(
|
||||||
filter(sqlalchemy.or_(
|
context, models.Stack
|
||||||
|
).filter(sqlalchemy.or_(
|
||||||
models.Stack.tenant == context.tenant_id,
|
models.Stack.tenant == context.tenant_id,
|
||||||
models.Stack.stack_user_project_id == context.tenant_id
|
models.Stack.stack_user_project_id == context.tenant_id)
|
||||||
)).\
|
).filter_by(name=stack_name)
|
||||||
filter_by(name=stack_name)
|
|
||||||
return query.first()
|
return query.first()
|
||||||
|
|
||||||
|
|
||||||
@ -310,8 +314,8 @@ def stack_get(context, stack_id, show_deleted=False, tenant_safe=True,
|
|||||||
|
|
||||||
|
|
||||||
def stack_get_all_by_owner_id(context, owner_id):
|
def stack_get_all_by_owner_id(context, owner_id):
|
||||||
results = soft_delete_aware_query(context, models.Stack).\
|
results = soft_delete_aware_query(
|
||||||
filter_by(owner_id=owner_id).all()
|
context, models.Stack).filter_by(owner_id=owner_id).all()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -353,13 +357,13 @@ def _paginate_query(context, query, model, limit=None, sort_keys=None,
|
|||||||
def _query_stack_get_all(context, tenant_safe=True, show_deleted=False,
|
def _query_stack_get_all(context, tenant_safe=True, show_deleted=False,
|
||||||
show_nested=False):
|
show_nested=False):
|
||||||
if show_nested:
|
if show_nested:
|
||||||
query = soft_delete_aware_query(context, models.Stack,
|
query = soft_delete_aware_query(
|
||||||
show_deleted=show_deleted).\
|
context, models.Stack, show_deleted=show_deleted
|
||||||
filter_by(backup=False)
|
).filter_by(backup=False)
|
||||||
else:
|
else:
|
||||||
query = soft_delete_aware_query(context, models.Stack,
|
query = soft_delete_aware_query(
|
||||||
show_deleted=show_deleted).\
|
context, models.Stack, show_deleted=show_deleted
|
||||||
filter_by(owner_id=None)
|
).filter_by(owner_id=None)
|
||||||
|
|
||||||
if tenant_safe:
|
if tenant_safe:
|
||||||
query = query.filter_by(tenant=context.tenant_id)
|
query = query.filter_by(tenant=context.tenant_id)
|
||||||
@ -450,9 +454,10 @@ def stack_lock_steal(stack_id, old_engine_id, new_engine_id):
|
|||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
lock = session.query(models.StackLock).get(stack_id)
|
lock = session.query(models.StackLock).get(stack_id)
|
||||||
rows_affected = session.query(models.StackLock).\
|
rows_affected = session.query(
|
||||||
filter_by(stack_id=stack_id, engine_id=old_engine_id).\
|
models.StackLock
|
||||||
update({"engine_id": new_engine_id})
|
).filter_by(stack_id=stack_id, engine_id=old_engine_id
|
||||||
|
).update({"engine_id": new_engine_id})
|
||||||
if not rows_affected:
|
if not rows_affected:
|
||||||
return lock.engine_id if lock is not None else True
|
return lock.engine_id if lock is not None else True
|
||||||
|
|
||||||
@ -460,9 +465,9 @@ def stack_lock_steal(stack_id, old_engine_id, new_engine_id):
|
|||||||
def stack_lock_release(stack_id, engine_id):
|
def stack_lock_release(stack_id, engine_id):
|
||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
rows_affected = session.query(models.StackLock).\
|
rows_affected = session.query(
|
||||||
filter_by(stack_id=stack_id, engine_id=engine_id).\
|
models.StackLock
|
||||||
delete()
|
).filter_by(stack_id=stack_id, engine_id=engine_id).delete()
|
||||||
if not rows_affected:
|
if not rows_affected:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -523,8 +528,9 @@ def event_get(context, event_id):
|
|||||||
def event_get_all(context):
|
def event_get_all(context):
|
||||||
stacks = soft_delete_aware_query(context, models.Stack)
|
stacks = soft_delete_aware_query(context, models.Stack)
|
||||||
stack_ids = [stack.id for stack in stacks]
|
stack_ids = [stack.id for stack in stacks]
|
||||||
results = model_query(context, models.Event).\
|
results = model_query(
|
||||||
filter(models.Event.stack_id.in_(stack_ids)).all()
|
context, models.Event
|
||||||
|
).filter(models.Event.stack_id.in_(stack_ids)).all()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -532,16 +538,16 @@ def event_get_all_by_tenant(context, limit=None, marker=None,
|
|||||||
sort_keys=None, sort_dir=None, filters=None):
|
sort_keys=None, sort_dir=None, filters=None):
|
||||||
query = model_query(context, models.Event)
|
query = model_query(context, models.Event)
|
||||||
query = db_filters.exact_filter(query, models.Event, filters)
|
query = db_filters.exact_filter(query, models.Event, filters)
|
||||||
query = query.join(models.Event.stack).\
|
query = query.join(
|
||||||
filter_by(tenant=context.tenant_id).filter_by(deleted_at=None)
|
models.Event.stack
|
||||||
|
).filter_by(tenant=context.tenant_id).filter_by(deleted_at=None)
|
||||||
filters = None
|
filters = None
|
||||||
return _events_filter_and_page_query(context, query, limit, marker,
|
return _events_filter_and_page_query(context, query, limit, marker,
|
||||||
sort_keys, sort_dir, filters).all()
|
sort_keys, sort_dir, filters).all()
|
||||||
|
|
||||||
|
|
||||||
def _query_all_by_stack(context, stack_id):
|
def _query_all_by_stack(context, stack_id):
|
||||||
query = model_query(context, models.Event).\
|
query = model_query(context, models.Event).filter_by(stack_id=stack_id)
|
||||||
filter_by(stack_id=stack_id)
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
@ -568,8 +574,8 @@ def _events_paginate_query(context, query, model, limit=None, sort_keys=None,
|
|||||||
if marker:
|
if marker:
|
||||||
# not to use model_query(context, model).get(marker), because
|
# not to use model_query(context, model).get(marker), because
|
||||||
# user can only see the ID(column 'uuid') and the ID as the marker
|
# user can only see the ID(column 'uuid') and the ID as the marker
|
||||||
model_marker = model_query(context, model).filter_by(uuid=marker).\
|
model_marker = model_query(
|
||||||
first()
|
context, model).filter_by(uuid=marker).first()
|
||||||
try:
|
try:
|
||||||
query = utils.paginate_query(query, model, limit, sort_keys,
|
query = utils.paginate_query(query, model, limit, sort_keys,
|
||||||
model_marker, sort_dir)
|
model_marker, sort_dir)
|
||||||
@ -634,8 +640,8 @@ def watch_rule_get(context, watch_rule_id):
|
|||||||
|
|
||||||
|
|
||||||
def watch_rule_get_by_name(context, watch_rule_name):
|
def watch_rule_get_by_name(context, watch_rule_name):
|
||||||
result = model_query(context, models.WatchRule).\
|
result = model_query(
|
||||||
filter_by(name=watch_rule_name).first()
|
context, models.WatchRule).filter_by(name=watch_rule_name).first()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -645,8 +651,8 @@ def watch_rule_get_all(context):
|
|||||||
|
|
||||||
|
|
||||||
def watch_rule_get_all_by_stack(context, stack_id):
|
def watch_rule_get_all_by_stack(context, stack_id):
|
||||||
results = model_query(context, models.WatchRule).\
|
results = model_query(
|
||||||
filter_by(stack_id=stack_id).all()
|
context, models.WatchRule).filter_by(stack_id=stack_id).all()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -745,12 +751,12 @@ def software_deployment_get(context, deployment_id):
|
|||||||
|
|
||||||
def software_deployment_get_all(context, server_id=None):
|
def software_deployment_get_all(context, server_id=None):
|
||||||
sd = models.SoftwareDeployment
|
sd = models.SoftwareDeployment
|
||||||
query = model_query(context, sd).\
|
query = model_query(
|
||||||
filter(sqlalchemy.or_(
|
context, sd
|
||||||
|
).filter(sqlalchemy.or_(
|
||||||
sd.tenant == context.tenant_id,
|
sd.tenant == context.tenant_id,
|
||||||
sd.stack_user_project_id == context.tenant_id
|
sd.stack_user_project_id == context.tenant_id)
|
||||||
)).\
|
).order_by(sd.created_at)
|
||||||
order_by(sd.created_at)
|
|
||||||
if server_id:
|
if server_id:
|
||||||
query = query.filter_by(server_id=server_id)
|
query = query.filter_by(server_id=server_id)
|
||||||
return query.all()
|
return query.all()
|
||||||
@ -837,10 +843,11 @@ def purge_deleted(age, granularity='days'):
|
|||||||
raw_template = sqlalchemy.Table('raw_template', meta, autoload=True)
|
raw_template = sqlalchemy.Table('raw_template', meta, autoload=True)
|
||||||
user_creds = sqlalchemy.Table('user_creds', meta, autoload=True)
|
user_creds = sqlalchemy.Table('user_creds', meta, autoload=True)
|
||||||
|
|
||||||
stmt = sqlalchemy.select([stack.c.id,
|
stmt = sqlalchemy.select(
|
||||||
|
[stack.c.id,
|
||||||
stack.c.raw_template_id,
|
stack.c.raw_template_id,
|
||||||
stack.c.user_creds_id]).\
|
stack.c.user_creds_id]
|
||||||
where(stack.c.deleted_at < time_line)
|
).where(stack.c.deleted_at < time_line)
|
||||||
deleted_stacks = engine.execute(stmt)
|
deleted_stacks = engine.execute(stmt)
|
||||||
|
|
||||||
for s in deleted_stacks:
|
for s in deleted_stacks:
|
||||||
@ -848,8 +855,8 @@ def purge_deleted(age, granularity='days'):
|
|||||||
engine.execute(event_del)
|
engine.execute(event_del)
|
||||||
stack_del = stack.delete().where(stack.c.id == s[0])
|
stack_del = stack.delete().where(stack.c.id == s[0])
|
||||||
engine.execute(stack_del)
|
engine.execute(stack_del)
|
||||||
raw_template_del = raw_template.delete().\
|
raw_template_del = raw_template.delete().where(
|
||||||
where(raw_template.c.id == s[1])
|
raw_template.c.id == s[1])
|
||||||
engine.execute(raw_template_del)
|
engine.execute(raw_template_del)
|
||||||
user_creds_del = user_creds.delete().where(user_creds.c.id == s[2])
|
user_creds_del = user_creds.delete().where(user_creds.c.id == s[2])
|
||||||
engine.execute(user_creds_del)
|
engine.execute(user_creds_del)
|
||||||
|
@ -32,18 +32,19 @@ def downgrade(migrate_engine):
|
|||||||
|
|
||||||
# Remove soft deleted data
|
# Remove soft deleted data
|
||||||
not_deleted = None
|
not_deleted = None
|
||||||
stmt = sqlalchemy.select([stack.c.id,
|
stmt = sqlalchemy.select(
|
||||||
|
[stack.c.id,
|
||||||
stack.c.raw_template_id,
|
stack.c.raw_template_id,
|
||||||
stack.c.user_creds_id]).\
|
stack.c.user_creds_id]
|
||||||
where(stack.c.deleted_at != not_deleted)
|
).where(stack.c.deleted_at != not_deleted)
|
||||||
deleted_stacks = migrate_engine.execute(stmt)
|
deleted_stacks = migrate_engine.execute(stmt)
|
||||||
for s in deleted_stacks:
|
for s in deleted_stacks:
|
||||||
event_del = event.delete().where(event.c.stack_id == s[0])
|
event_del = event.delete().where(event.c.stack_id == s[0])
|
||||||
migrate_engine.execute(event_del)
|
migrate_engine.execute(event_del)
|
||||||
stack_del = stack.delete().where(stack.c.id == s[0])
|
stack_del = stack.delete().where(stack.c.id == s[0])
|
||||||
migrate_engine.execute(stack_del)
|
migrate_engine.execute(stack_del)
|
||||||
raw_template_del = raw_template.delete().\
|
raw_template_del = raw_template.delete(
|
||||||
where(raw_template.c.id == s[1])
|
).where(raw_template.c.id == s[1])
|
||||||
migrate_engine.execute(raw_template_del)
|
migrate_engine.execute(raw_template_del)
|
||||||
user_creds_del = user_creds.delete().where(user_creds.c.id == s[2])
|
user_creds_del = user_creds.delete().where(user_creds.c.id == s[2])
|
||||||
migrate_engine.execute(user_creds_del)
|
migrate_engine.execute(user_creds_del)
|
||||||
|
@ -88,8 +88,8 @@ def upgrade(migrate_engine):
|
|||||||
# NOTE(chenxiao): For DB2, setting "ID" column "autoincrement=True"
|
# NOTE(chenxiao): For DB2, setting "ID" column "autoincrement=True"
|
||||||
# can't make sense after above "tmp_id=>id" transformation,
|
# can't make sense after above "tmp_id=>id" transformation,
|
||||||
# so should work around it.
|
# so should work around it.
|
||||||
sql = "ALTER TABLE EVENT ALTER COLUMN ID SET GENERATED BY " \
|
sql = ("ALTER TABLE EVENT ALTER COLUMN ID SET GENERATED BY "
|
||||||
"DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1)"
|
"DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1)")
|
||||||
migrate_engine.execute(sql)
|
migrate_engine.execute(sql)
|
||||||
else:
|
else:
|
||||||
event_table.c.tmp_id.alter(sqlalchemy.Integer, autoincrement=True)
|
event_table.c.tmp_id.alter(sqlalchemy.Integer, autoincrement=True)
|
||||||
|
@ -24,8 +24,8 @@ def upgrade(migrate_engine):
|
|||||||
# Set backup flag for backup stacks, which are the only ones named "foo*"
|
# Set backup flag for backup stacks, which are the only ones named "foo*"
|
||||||
not_deleted = None
|
not_deleted = None
|
||||||
stmt = sqlalchemy.select([stack.c.id,
|
stmt = sqlalchemy.select([stack.c.id,
|
||||||
stack.c.name]).\
|
stack.c.name]
|
||||||
where(stack.c.deleted_at == not_deleted)
|
).where(stack.c.deleted_at == not_deleted)
|
||||||
stacks = migrate_engine.execute(stmt)
|
stacks = migrate_engine.execute(stmt)
|
||||||
for s in stacks:
|
for s in stacks:
|
||||||
if s.name.endswith('*'):
|
if s.name.endswith('*'):
|
||||||
|
@ -35,9 +35,11 @@ _RESOURCE_KEYS = (
|
|||||||
class CfnTemplate(template.Template):
|
class CfnTemplate(template.Template):
|
||||||
'''A stack template.'''
|
'''A stack template.'''
|
||||||
|
|
||||||
SECTIONS = (VERSION, ALTERNATE_VERSION, DESCRIPTION, MAPPINGS,
|
SECTIONS = (
|
||||||
PARAMETERS, RESOURCES, OUTPUTS) = \
|
VERSION, ALTERNATE_VERSION,
|
||||||
('AWSTemplateFormatVersion', 'HeatTemplateFormatVersion',
|
DESCRIPTION, MAPPINGS, PARAMETERS, RESOURCES, OUTPUTS
|
||||||
|
) = (
|
||||||
|
'AWSTemplateFormatVersion', 'HeatTemplateFormatVersion',
|
||||||
'Description', 'Mappings', 'Parameters', 'Resources', 'Outputs'
|
'Description', 'Mappings', 'Parameters', 'Resources', 'Outputs'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -197,8 +197,8 @@ class ResourceRegistry(object):
|
|||||||
if name.endswith('*'):
|
if name.endswith('*'):
|
||||||
# delete all matching entries.
|
# delete all matching entries.
|
||||||
for res_name in registry.keys():
|
for res_name in registry.keys():
|
||||||
if isinstance(registry[res_name], ResourceInfo) and \
|
if (isinstance(registry[res_name], ResourceInfo) and
|
||||||
res_name.startswith(name[:-1]):
|
res_name.startswith(name[:-1])):
|
||||||
LOG.warn(_LW('Removing %(item)s from %(path)s'), {
|
LOG.warn(_LW('Removing %(item)s from %(path)s'), {
|
||||||
'item': res_name,
|
'item': res_name,
|
||||||
'path': descriptive_path})
|
'path': descriptive_path})
|
||||||
|
@ -51,14 +51,14 @@ class Event(object):
|
|||||||
'''Retrieve an Event from the database.'''
|
'''Retrieve an Event from the database.'''
|
||||||
from heat.engine import stack as parser
|
from heat.engine import stack as parser
|
||||||
|
|
||||||
ev = event if event is not None else\
|
ev = (event if event is not None else
|
||||||
db_api.event_get(context, event_id)
|
db_api.event_get(context, event_id))
|
||||||
if ev is None:
|
if ev is None:
|
||||||
message = _('No event exists with id "%s"') % str(event_id)
|
message = _('No event exists with id "%s"') % str(event_id)
|
||||||
raise exception.NotFound(message)
|
raise exception.NotFound(message)
|
||||||
|
|
||||||
st = stack if stack is not None else\
|
st = (stack if stack is not None else
|
||||||
parser.Stack.load(context, ev.stack_id)
|
parser.Stack.load(context, ev.stack_id))
|
||||||
|
|
||||||
return cls(context, st, ev.resource_action, ev.resource_status,
|
return cls(context, st, ev.resource_action, ev.resource_status,
|
||||||
ev.resource_status_reason, ev.physical_resource_id,
|
ev.resource_status_reason, ev.physical_resource_id,
|
||||||
|
@ -39,10 +39,13 @@ class HOTemplate20130523(template.Template):
|
|||||||
A Heat Orchestration Template format stack template.
|
A Heat Orchestration Template format stack template.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
SECTIONS = (VERSION, DESCRIPTION, PARAMETER_GROUPS, PARAMETERS,
|
SECTIONS = (
|
||||||
RESOURCES, OUTPUTS, MAPPINGS) = \
|
VERSION, DESCRIPTION, PARAMETER_GROUPS,
|
||||||
('heat_template_version', 'description', 'parameter_groups',
|
PARAMETERS, RESOURCES, OUTPUTS, MAPPINGS
|
||||||
'parameters', 'resources', 'outputs', '__undefined__')
|
) = (
|
||||||
|
'heat_template_version', 'description', 'parameter_groups',
|
||||||
|
'parameters', 'resources', 'outputs', '__undefined__'
|
||||||
|
)
|
||||||
|
|
||||||
SECTIONS_NO_DIRECT_ACCESS = set([PARAMETERS, VERSION])
|
SECTIONS_NO_DIRECT_ACCESS = set([PARAMETERS, VERSION])
|
||||||
|
|
||||||
|
@ -522,8 +522,8 @@ class Parameters(collections.Mapping):
|
|||||||
raise exception.InvalidTemplateParameter(key=name)
|
raise exception.InvalidTemplateParameter(key=name)
|
||||||
|
|
||||||
def _pseudo_parameters(self, stack_identifier):
|
def _pseudo_parameters(self, stack_identifier):
|
||||||
stack_id = stack_identifier.arn() \
|
stack_id = (stack_identifier.arn()
|
||||||
if stack_identifier is not None else 'None'
|
if stack_identifier is not None else 'None')
|
||||||
stack_name = stack_identifier and stack_identifier.stack_name
|
stack_name = stack_identifier and stack_identifier.stack_name
|
||||||
|
|
||||||
yield Parameter(self.PARAM_STACK_ID,
|
yield Parameter(self.PARAM_STACK_ID,
|
||||||
|
@ -339,8 +339,8 @@ class AutoScalingGroup(instgrp.InstanceGroup, cooldown.CooldownMixin):
|
|||||||
# availability zones, it will be possible to specify multiple subnets.
|
# availability zones, it will be possible to specify multiple subnets.
|
||||||
# For now, only one subnet can be specified. The bug #1096017 tracks
|
# For now, only one subnet can be specified. The bug #1096017 tracks
|
||||||
# this issue.
|
# this issue.
|
||||||
if self.properties.get(self.VPCZONE_IDENTIFIER) and \
|
if (self.properties.get(self.VPCZONE_IDENTIFIER) and
|
||||||
len(self.properties[self.VPCZONE_IDENTIFIER]) != 1:
|
len(self.properties[self.VPCZONE_IDENTIFIER]) != 1):
|
||||||
raise exception.NotSupported(feature=_("Anything other than one "
|
raise exception.NotSupported(feature=_("Anything other than one "
|
||||||
"VPCZoneIdentifier"))
|
"VPCZoneIdentifier"))
|
||||||
|
|
||||||
|
@ -63,9 +63,9 @@ common_properties_schema = {
|
|||||||
),
|
),
|
||||||
REPEAT_ACTIONS: properties.Schema(
|
REPEAT_ACTIONS: properties.Schema(
|
||||||
properties.Schema.BOOLEAN,
|
properties.Schema.BOOLEAN,
|
||||||
_('False to trigger actions when the threshold is reached AND '
|
_("False to trigger actions when the threshold is reached AND "
|
||||||
'the alarm\'s state has changed. By default, actions are called '
|
"the alarm's state has changed. By default, actions are called "
|
||||||
'each time the threshold is reached.'),
|
"each time the threshold is reached."),
|
||||||
default='true',
|
default='true',
|
||||||
update_allowed=True
|
update_allowed=True
|
||||||
)
|
)
|
||||||
|
@ -140,8 +140,8 @@ class ElasticIp(resource.Resource):
|
|||||||
server.remove_floating_ip(self._ipaddress())
|
server.remove_floating_ip(self._ipaddress())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
is_not_found = self.client_plugin('nova').is_not_found(e)
|
is_not_found = self.client_plugin('nova').is_not_found(e)
|
||||||
is_unprocessable_entity = self.client_plugin('nova').\
|
is_unprocessable_entity = self.client_plugin(
|
||||||
is_unprocessable_entity(e)
|
'nova').is_unprocessable_entity(e)
|
||||||
|
|
||||||
if (not is_not_found and not is_unprocessable_entity):
|
if (not is_not_found and not is_unprocessable_entity):
|
||||||
raise
|
raise
|
||||||
@ -259,8 +259,7 @@ class ElasticIpAssociation(resource.Resource):
|
|||||||
router = vpc.VPC.router_for_vpc(self.neutron(), network_id)
|
router = vpc.VPC.router_for_vpc(self.neutron(), network_id)
|
||||||
if router is not None:
|
if router is not None:
|
||||||
floatingip = self.neutron().show_floatingip(float_id)
|
floatingip = self.neutron().show_floatingip(float_id)
|
||||||
floating_net_id = \
|
floating_net_id = floatingip['floatingip']['floating_network_id']
|
||||||
floatingip['floatingip']['floating_network_id']
|
|
||||||
self.neutron().add_gateway_router(
|
self.neutron().add_gateway_router(
|
||||||
router['id'], {'network_id': floating_net_id})
|
router['id'], {'network_id': floating_net_id})
|
||||||
|
|
||||||
|
@ -510,9 +510,8 @@ class Instance(resource.Resource):
|
|||||||
# if SubnetId property in Instance, ensure subnet exists
|
# if SubnetId property in Instance, ensure subnet exists
|
||||||
if subnet_id:
|
if subnet_id:
|
||||||
neutronclient = self.neutron()
|
neutronclient = self.neutron()
|
||||||
network_id = \
|
network_id = self.client_plugin(
|
||||||
self.client_plugin('neutron').network_id_from_subnet_id(
|
'neutron').network_id_from_subnet_id(subnet_id)
|
||||||
subnet_id)
|
|
||||||
# if subnet verified, create a port to use this subnet
|
# if subnet verified, create a port to use this subnet
|
||||||
# if port is not created explicitly, nova will choose
|
# if port is not created explicitly, nova will choose
|
||||||
# the first subnet in the given network.
|
# the first subnet in the given network.
|
||||||
@ -525,9 +524,8 @@ class Instance(resource.Resource):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if security_groups:
|
if security_groups:
|
||||||
props['security_groups'] = \
|
props['security_groups'] = self.client_plugin(
|
||||||
self.client_plugin('neutron').get_secgroup_uuids(
|
'neutron').get_secgroup_uuids(security_groups)
|
||||||
security_groups)
|
|
||||||
|
|
||||||
port = neutronclient.create_port({'port': props})['port']
|
port = neutronclient.create_port({'port': props})['port']
|
||||||
|
|
||||||
|
@ -526,8 +526,8 @@ backend servers
|
|||||||
if res:
|
if res:
|
||||||
return res
|
return res
|
||||||
|
|
||||||
if cfg.CONF.loadbalancer_template and \
|
if (cfg.CONF.loadbalancer_template and
|
||||||
not os.access(cfg.CONF.loadbalancer_template, os.R_OK):
|
not os.access(cfg.CONF.loadbalancer_template, os.R_OK)):
|
||||||
msg = _('Custom LoadBalancer template can not be found')
|
msg = _('Custom LoadBalancer template can not be found')
|
||||||
raise exception.StackValidationFailed(message=msg)
|
raise exception.StackValidationFailed(message=msg)
|
||||||
|
|
||||||
|
@ -282,13 +282,13 @@ class Port(neutron.NeutronResource):
|
|||||||
# 'default' securityGroup. If has the 'security_groups' and the
|
# 'default' securityGroup. If has the 'security_groups' and the
|
||||||
# value is [], which means to create the port without securityGroup.
|
# value is [], which means to create the port without securityGroup.
|
||||||
if props.get(self.SECURITY_GROUPS) is not None:
|
if props.get(self.SECURITY_GROUPS) is not None:
|
||||||
props[self.SECURITY_GROUPS] = self.client_plugin().\
|
props[self.SECURITY_GROUPS] = self.client_plugin(
|
||||||
get_secgroup_uuids(props.get(self.SECURITY_GROUPS))
|
).get_secgroup_uuids(props.get(self.SECURITY_GROUPS))
|
||||||
else:
|
else:
|
||||||
# And the update should has the same behavior.
|
# And the update should has the same behavior.
|
||||||
if prepare_for_update:
|
if prepare_for_update:
|
||||||
props[self.SECURITY_GROUPS] = self.client_plugin().\
|
props[self.SECURITY_GROUPS] = self.client_plugin(
|
||||||
get_secgroup_uuids(['default'])
|
).get_secgroup_uuids(['default'])
|
||||||
|
|
||||||
if not props[self.FIXED_IPS]:
|
if not props[self.FIXED_IPS]:
|
||||||
del(props[self.FIXED_IPS])
|
del(props[self.FIXED_IPS])
|
||||||
|
@ -143,8 +143,8 @@ class NovaFloatingIpAssociation(resource.Resource):
|
|||||||
try:
|
try:
|
||||||
server = self.nova().servers.get(self.properties[self.SERVER])
|
server = self.nova().servers.get(self.properties[self.SERVER])
|
||||||
if server:
|
if server:
|
||||||
fl_ip = self.nova().floating_ips.\
|
fl_ip = self.nova().floating_ips.get(
|
||||||
get(self.properties[self.FLOATING_IP])
|
self.properties[self.FLOATING_IP])
|
||||||
self.nova().servers.remove_floating_ip(server, fl_ip.ip)
|
self.nova().servers.remove_floating_ip(server, fl_ip.ip)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.client_plugin().ignore_not_found(e)
|
self.client_plugin().ignore_not_found(e)
|
||||||
|
@ -340,8 +340,8 @@ class CinderVolume(aws_vol.Volume):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
# Scheduler hints are only supported from Cinder API v2
|
# Scheduler hints are only supported from Cinder API v2
|
||||||
if self.properties.get(self.CINDER_SCHEDULER_HINTS) \
|
if (self.properties.get(self.CINDER_SCHEDULER_HINTS)
|
||||||
and self.cinder().volume_api_version == 1:
|
and self.cinder().volume_api_version == 1):
|
||||||
raise exception.StackValidationFailed(
|
raise exception.StackValidationFailed(
|
||||||
message=_('Scheduler hints are not supported by the current '
|
message=_('Scheduler hints are not supported by the current '
|
||||||
'volume API.'))
|
'volume API.'))
|
||||||
|
@ -101,11 +101,11 @@ class HeatWaitConditionHandle(wc_base.BaseWaitConditionHandle):
|
|||||||
return self.data().get('endpoint')
|
return self.data().get('endpoint')
|
||||||
elif key == self.CURL_CLI:
|
elif key == self.CURL_CLI:
|
||||||
# Construct curl command for template-author convenience
|
# Construct curl command for template-author convenience
|
||||||
return ('curl -i -X POST '
|
return ("curl -i -X POST "
|
||||||
'-H \'X-Auth-Token: %(token)s\' '
|
"-H 'X-Auth-Token: %(token)s' "
|
||||||
'-H \'Content-Type: application/json\' '
|
"-H 'Content-Type: application/json' "
|
||||||
'-H \'Accept: application/json\' '
|
"-H 'Accept: application/json' "
|
||||||
'%(endpoint)s' %
|
"%(endpoint)s" %
|
||||||
dict(token=self.data().get('token'),
|
dict(token=self.data().get('token'),
|
||||||
endpoint=self.data().get('endpoint')))
|
endpoint=self.data().get('endpoint')))
|
||||||
|
|
||||||
|
@ -146,18 +146,18 @@ class ResourceGroup(stack_resource.StackResource):
|
|||||||
schema={
|
schema={
|
||||||
REMOVAL_RSRC_LIST: properties.Schema(
|
REMOVAL_RSRC_LIST: properties.Schema(
|
||||||
properties.Schema.LIST,
|
properties.Schema.LIST,
|
||||||
_('List of resources to be removed '
|
_("List of resources to be removed "
|
||||||
'when doing an update which requires removal of '
|
"when doing an update which requires removal of "
|
||||||
'specific resources. '
|
"specific resources. "
|
||||||
'The resource may be specified several ways: '
|
"The resource may be specified several ways: "
|
||||||
'(1) The resource name, as in the nested stack, '
|
"(1) The resource name, as in the nested stack, "
|
||||||
'(2) The resource reference returned from '
|
"(2) The resource reference returned from "
|
||||||
'get_resource in a template, as available via '
|
"get_resource in a template, as available via "
|
||||||
'the \'refs\' attribute '
|
"the 'refs' attribute "
|
||||||
'Note this is destructive on update when specified; '
|
"Note this is destructive on update when specified; "
|
||||||
'even if the count is not being reduced, and once '
|
"even if the count is not being reduced, and once "
|
||||||
'a resource name is removed, it\'s name is never '
|
"a resource name is removed, it's name is never "
|
||||||
'reused in subsequent updates'
|
"reused in subsequent updates"
|
||||||
),
|
),
|
||||||
default=[]
|
default=[]
|
||||||
),
|
),
|
||||||
|
@ -841,8 +841,8 @@ class Server(stack_user.StackUser):
|
|||||||
else:
|
else:
|
||||||
# remove not updated networks from old and new networks lists,
|
# remove not updated networks from old and new networks lists,
|
||||||
# also get list these networks
|
# also get list these networks
|
||||||
not_updated_networks = \
|
not_updated_networks = self._get_network_matches(
|
||||||
self._get_network_matches(old_networks, new_networks)
|
old_networks, new_networks)
|
||||||
|
|
||||||
self.update_networks_matching_iface_port(
|
self.update_networks_matching_iface_port(
|
||||||
old_networks + not_updated_networks, interfaces)
|
old_networks + not_updated_networks, interfaces)
|
||||||
@ -986,8 +986,8 @@ class Server(stack_user.StackUser):
|
|||||||
# record if any networks include explicit ports
|
# record if any networks include explicit ports
|
||||||
networks_with_port = False
|
networks_with_port = False
|
||||||
for network in networks:
|
for network in networks:
|
||||||
networks_with_port = networks_with_port or \
|
networks_with_port = (networks_with_port or
|
||||||
network.get(self.NETWORK_PORT)
|
network.get(self.NETWORK_PORT))
|
||||||
if network.get(self.NETWORK_UUID) and network.get(self.NETWORK_ID):
|
if network.get(self.NETWORK_UUID) and network.get(self.NETWORK_ID):
|
||||||
msg = _('Properties "%(uuid)s" and "%(id)s" are both set '
|
msg = _('Properties "%(uuid)s" and "%(id)s" are both set '
|
||||||
'to the network "%(network)s" for the server '
|
'to the network "%(network)s" for the server '
|
||||||
|
@ -150,8 +150,8 @@ class SoftwareComponent(sc.SoftwareConfig):
|
|||||||
actions = config.get(self.CONFIG_ACTIONS)
|
actions = config.get(self.CONFIG_ACTIONS)
|
||||||
if any(action in config_actions for action in actions):
|
if any(action in config_actions for action in actions):
|
||||||
msg = _('Defining more than one configuration for the same '
|
msg = _('Defining more than one configuration for the same '
|
||||||
'action in SoftwareComponent "%s" is not allowed.')\
|
'action in SoftwareComponent "%s" is not allowed.'
|
||||||
% self.name
|
) % self.name
|
||||||
raise exception.StackValidationFailed(message=msg)
|
raise exception.StackValidationFailed(message=msg)
|
||||||
config_actions.update(actions)
|
config_actions.update(actions)
|
||||||
|
|
||||||
|
@ -215,8 +215,8 @@ class SoftwareDeployment(signal_responder.SignalResponder):
|
|||||||
config = self.rpc_client().show_software_config(
|
config = self.rpc_client().show_software_config(
|
||||||
self.context, config_id)
|
self.context, config_id)
|
||||||
|
|
||||||
if action not in self.properties[self.DEPLOY_ACTIONS]\
|
if (action not in self.properties[self.DEPLOY_ACTIONS]
|
||||||
and not config[rpc_api.SOFTWARE_CONFIG_GROUP] == 'component':
|
and not config[rpc_api.SOFTWARE_CONFIG_GROUP] == 'component'):
|
||||||
return
|
return
|
||||||
|
|
||||||
props = self._build_properties(
|
props = self._build_properties(
|
||||||
|
@ -106,7 +106,7 @@ class SwiftSignalHandle(resource.Resource):
|
|||||||
elif key == self.ENDPOINT:
|
elif key == self.ENDPOINT:
|
||||||
return self.data().get(self.ENDPOINT)
|
return self.data().get(self.ENDPOINT)
|
||||||
elif key == self.CURL_CLI:
|
elif key == self.CURL_CLI:
|
||||||
return ('curl -i -X PUT \'%s\'' %
|
return ("curl -i -X PUT '%s'" %
|
||||||
self.data().get(self.ENDPOINT))
|
self.data().get(self.ENDPOINT))
|
||||||
|
|
||||||
def handle_delete(self):
|
def handle_delete(self):
|
||||||
|
@ -127,8 +127,8 @@ class ResourceDefinitionCore(object):
|
|||||||
This returns a new resource definition, with all of the functions
|
This returns a new resource definition, with all of the functions
|
||||||
parsed in the context of the specified stack and template.
|
parsed in the context of the specified stack and template.
|
||||||
"""
|
"""
|
||||||
assert not getattr(self, '_frozen', False), \
|
assert not getattr(self, '_frozen', False
|
||||||
"Cannot re-parse a frozen definition"
|
), "Cannot re-parse a frozen definition"
|
||||||
|
|
||||||
def reparse_snippet(snippet):
|
def reparse_snippet(snippet):
|
||||||
return template.parse(stack, copy.deepcopy(snippet))
|
return template.parse(stack, copy.deepcopy(snippet))
|
||||||
|
@ -901,8 +901,8 @@ class Stack(collections.Mapping):
|
|||||||
# rights to delete the trust unless an admin
|
# rights to delete the trust unless an admin
|
||||||
trustor_id = user_creds.get('trustor_user_id')
|
trustor_id = user_creds.get('trustor_user_id')
|
||||||
if self.context.user_id != trustor_id:
|
if self.context.user_id != trustor_id:
|
||||||
LOG.debug('Context user_id doesn\'t match '
|
LOG.debug("Context user_id doesn't match "
|
||||||
'trustor, using stored context')
|
"trustor, using stored context")
|
||||||
sc = self.stored_context()
|
sc = self.stored_context()
|
||||||
sc.clients.client('keystone').delete_trust(
|
sc.clients.client('keystone').delete_trust(
|
||||||
trust_id)
|
trust_id)
|
||||||
|
@ -73,8 +73,8 @@ class StackLock(object):
|
|||||||
'stack': self.stack.id})
|
'stack': self.stack.id})
|
||||||
return
|
return
|
||||||
|
|
||||||
if lock_engine_id == self.engine_id or \
|
if (lock_engine_id == self.engine_id or
|
||||||
self.engine_alive(self.context, lock_engine_id):
|
self.engine_alive(self.context, lock_engine_id)):
|
||||||
LOG.debug("Lock on stack %(stack)s is owned by engine "
|
LOG.debug("Lock on stack %(stack)s is owned by engine "
|
||||||
"%(engine)s" % {'stack': self.stack.id,
|
"%(engine)s" % {'stack': self.stack.id,
|
||||||
'engine': lock_engine_id})
|
'engine': lock_engine_id})
|
||||||
|
@ -154,8 +154,8 @@ class StackResource(resource.Resource):
|
|||||||
def _parse_nested_stack(self, stack_name, child_template, child_params,
|
def _parse_nested_stack(self, stack_name, child_template, child_params,
|
||||||
timeout_mins=None, adopt_data=None):
|
timeout_mins=None, adopt_data=None):
|
||||||
if self.stack.nested_depth >= cfg.CONF.max_nested_stack_depth:
|
if self.stack.nested_depth >= cfg.CONF.max_nested_stack_depth:
|
||||||
msg = _("Recursion depth exceeds %d.") % \
|
msg = _("Recursion depth exceeds %d."
|
||||||
cfg.CONF.max_nested_stack_depth
|
) % cfg.CONF.max_nested_stack_depth
|
||||||
raise exception.RequestLimitExceeded(message=msg)
|
raise exception.RequestLimitExceeded(message=msg)
|
||||||
|
|
||||||
parsed_template = self._parse_child_template(child_template)
|
parsed_template = self._parse_child_template(child_template)
|
||||||
|
@ -106,8 +106,8 @@ class WaitConditionTest(common.HeatTestCase):
|
|||||||
id = identifier.ResourceIdentifier('test_tenant', stack.name,
|
id = identifier.ResourceIdentifier('test_tenant', stack.name,
|
||||||
stack.id, '', 'WaitHandle')
|
stack.id, '', 'WaitHandle')
|
||||||
self.m.StubOutWithMock(aws_wch.WaitConditionHandle, 'identifier')
|
self.m.StubOutWithMock(aws_wch.WaitConditionHandle, 'identifier')
|
||||||
aws_wch.WaitConditionHandle.identifier().\
|
aws_wch.WaitConditionHandle.identifier(
|
||||||
MultipleTimes().AndReturn(id)
|
).MultipleTimes().AndReturn(id)
|
||||||
|
|
||||||
if stub_status:
|
if stub_status:
|
||||||
self.m.StubOutWithMock(aws_wch.WaitConditionHandle,
|
self.m.StubOutWithMock(aws_wch.WaitConditionHandle,
|
||||||
|
@ -152,8 +152,8 @@ class HeatTestCase(testscenarios.WithScenarios,
|
|||||||
self.m.StubOutWithMock(glance.ImageConstraint, 'validate')
|
self.m.StubOutWithMock(glance.ImageConstraint, 'validate')
|
||||||
if num is None:
|
if num is None:
|
||||||
glance.ImageConstraint.validate(
|
glance.ImageConstraint.validate(
|
||||||
mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().\
|
mox.IgnoreArg(), mox.IgnoreArg()
|
||||||
AndReturn(True)
|
).MultipleTimes().AndReturn(True)
|
||||||
else:
|
else:
|
||||||
for x in range(num):
|
for x in range(num):
|
||||||
glance.ImageConstraint.validate(
|
glance.ImageConstraint.validate(
|
||||||
|
@ -33,11 +33,11 @@ class FakeClient(object):
|
|||||||
expected = (method, url)
|
expected = (method, url)
|
||||||
called = self.client.callstack[pos][0:2]
|
called = self.client.callstack[pos][0:2]
|
||||||
|
|
||||||
assert self.client.callstack, \
|
assert self.client.callstack, ("Expected %s %s "
|
||||||
"Expected %s %s but no calls were made." % expected
|
"but no calls were made." % expected)
|
||||||
|
|
||||||
assert expected == called, 'Expected %s %s; got %s %s' % \
|
assert expected == called, 'Expected %s %s; got %s %s' % (
|
||||||
(expected + called)
|
expected + called)
|
||||||
|
|
||||||
if body is not None:
|
if body is not None:
|
||||||
assert self.client.callstack[pos][2] == body
|
assert self.client.callstack[pos][2] == body
|
||||||
@ -48,8 +48,8 @@ class FakeClient(object):
|
|||||||
"""
|
"""
|
||||||
expected = (method, url)
|
expected = (method, url)
|
||||||
|
|
||||||
assert self.client.callstack, \
|
assert self.client.callstack, ("Expected %s %s but no calls "
|
||||||
"Expected %s %s but no calls were made." % expected
|
"were made." % expected)
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
for entry in self.client.callstack:
|
for entry in self.client.callstack:
|
||||||
@ -57,8 +57,8 @@ class FakeClient(object):
|
|||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
|
|
||||||
assert found, 'Expected %s %s; got %s' % \
|
assert found, 'Expected %s %s; got %s' % (expected,
|
||||||
(expected, self.client.callstack)
|
self.client.callstack)
|
||||||
if body is not None:
|
if body is not None:
|
||||||
try:
|
try:
|
||||||
assert entry[2] == body
|
assert entry[2] == body
|
||||||
@ -133,8 +133,8 @@ class FakeKeystoneClient(object):
|
|||||||
|
|
||||||
def url_for(self, **kwargs):
|
def url_for(self, **kwargs):
|
||||||
if self.only_services is not None:
|
if self.only_services is not None:
|
||||||
if 'service_type' in kwargs and \
|
if ('service_type' in kwargs and
|
||||||
kwargs['service_type'] not in self.only_services:
|
kwargs['service_type'] not in self.only_services):
|
||||||
# keystone client throws keystone exceptions, not cinder
|
# keystone client throws keystone exceptions, not cinder
|
||||||
# exceptions.
|
# exceptions.
|
||||||
raise exceptions.EndpointNotFound()
|
raise exceptions.EndpointNotFound()
|
||||||
|
@ -160,7 +160,7 @@ class StackUserResource(stack_user.StackUser):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceWithCustomConstraint(GenericResource):
|
class ResourceWithCustomConstraint(GenericResource):
|
||||||
properties_schema = \
|
properties_schema = {
|
||||||
{'Foo': properties.Schema(
|
'Foo': properties.Schema(
|
||||||
properties.Schema.STRING,
|
properties.Schema.STRING,
|
||||||
constraints=[constraints.CustomConstraint('neutron.network')])}
|
constraints=[constraints.CustomConstraint('neutron.network')])}
|
||||||
|
@ -99,8 +99,8 @@ class HeatWaitConditionTest(common.HeatTestCase):
|
|||||||
stack.id, '', 'wait_handle')
|
stack.id, '', 'wait_handle')
|
||||||
self.m.StubOutWithMock(heat_wch.HeatWaitConditionHandle,
|
self.m.StubOutWithMock(heat_wch.HeatWaitConditionHandle,
|
||||||
'identifier')
|
'identifier')
|
||||||
heat_wch.HeatWaitConditionHandle.\
|
heat_wch.HeatWaitConditionHandle.identifier(
|
||||||
identifier().MultipleTimes().AndReturn(id)
|
).MultipleTimes().AndReturn(id)
|
||||||
|
|
||||||
if stub_status:
|
if stub_status:
|
||||||
self.m.StubOutWithMock(heat_wch.HeatWaitConditionHandle,
|
self.m.StubOutWithMock(heat_wch.HeatWaitConditionHandle,
|
||||||
|
@ -65,9 +65,9 @@ class InstantiationDataTest(common.HeatTestCase):
|
|||||||
data = {"AWSTemplateFormatVersion": "2010-09-09",
|
data = {"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
"key1": ["val1[0]", "val1[1]"],
|
"key1": ["val1[0]", "val1[1]"],
|
||||||
"key2": "val2"}
|
"key2": "val2"}
|
||||||
json_repr = '{"AWSTemplateFormatVersion" : "2010-09-09",' \
|
json_repr = ('{"AWSTemplateFormatVersion" : "2010-09-09",'
|
||||||
'"key1": [ "val1[0]", "val1[1]" ], ' \
|
'"key1": [ "val1[0]", "val1[1]" ], '
|
||||||
'"key2": "val2" }'
|
'"key2": "val2" }')
|
||||||
parsed = stacks.InstantiationData.format_parse(json_repr, 'foo')
|
parsed = stacks.InstantiationData.format_parse(json_repr, 'foo')
|
||||||
self.assertEqual(data, parsed)
|
self.assertEqual(data, parsed)
|
||||||
|
|
||||||
@ -108,8 +108,8 @@ parameters:
|
|||||||
self.assertEqual(template, data.template())
|
self.assertEqual(template, data.template())
|
||||||
|
|
||||||
def test_template_string_json(self):
|
def test_template_string_json(self):
|
||||||
template = '{"heat_template_version": "2013-05-23",' \
|
template = ('{"heat_template_version": "2013-05-23",'
|
||||||
'"foo": "bar", "blarg": "wibble"}'
|
'"foo": "bar", "blarg": "wibble"}')
|
||||||
body = {'template': template}
|
body = {'template': template}
|
||||||
data = stacks.InstantiationData(body)
|
data = stacks.InstantiationData(body)
|
||||||
self.assertEqual(json.loads(template), data.template())
|
self.assertEqual(json.loads(template), data.template())
|
||||||
|
@ -299,14 +299,14 @@ class AutoScalingGroupTest(common.HeatTestCase):
|
|||||||
self.m.StubOutWithMock(self.fc.servers, 'get')
|
self.m.StubOutWithMock(self.fc.servers, 'get')
|
||||||
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
|
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
|
||||||
|
|
||||||
self.fc.servers.get(mox.IgnoreArg()).\
|
self.fc.servers.get(
|
||||||
MultipleTimes().AndReturn(return_server)
|
mox.IgnoreArg()).MultipleTimes().AndReturn(return_server)
|
||||||
self.fc.client.post_servers_1234_action(
|
self.fc.client.post_servers_1234_action(
|
||||||
body={'resize': {'flavorRef': 3}}).\
|
body={'resize': {'flavorRef': 3}}
|
||||||
MultipleTimes().AndReturn((202, None))
|
).MultipleTimes().AndReturn((202, None))
|
||||||
self.fc.client.post_servers_1234_action(
|
self.fc.client.post_servers_1234_action(
|
||||||
body={'confirmResize': None}).\
|
body={'confirmResize': None}
|
||||||
MultipleTimes().AndReturn((202, None))
|
).MultipleTimes().AndReturn((202, None))
|
||||||
|
|
||||||
self._stub_grp_replace(num_creates_expected_on_updt,
|
self._stub_grp_replace(num_creates_expected_on_updt,
|
||||||
num_deletes_expected_on_updt,
|
num_deletes_expected_on_updt,
|
||||||
|
@ -326,8 +326,8 @@ class CeilometerAlarmTest(common.HeatTestCase):
|
|||||||
properties = t['Resources']['MEMAlarmHigh']['Properties']
|
properties = t['Resources']['MEMAlarmHigh']['Properties']
|
||||||
# Test for bug/1383521, where meter_name is in NOVA_METERS
|
# Test for bug/1383521, where meter_name is in NOVA_METERS
|
||||||
properties[alarm.CeilometerAlarm.METER_NAME] = 'memory.usage'
|
properties[alarm.CeilometerAlarm.METER_NAME] = 'memory.usage'
|
||||||
properties['matching_metadata'] =\
|
properties['matching_metadata'] = {'metadata.user_metadata.groupname':
|
||||||
{'metadata.user_metadata.groupname': 'foo'}
|
'foo'}
|
||||||
|
|
||||||
self.stack = self.create_stack(template=json.dumps(t))
|
self.stack = self.create_stack(template=json.dumps(t))
|
||||||
|
|
||||||
|
@ -40,8 +40,8 @@ class CinderClientPluginTests(common.HeatTestCase):
|
|||||||
volume_id = str(uuid.uuid4())
|
volume_id = str(uuid.uuid4())
|
||||||
my_volume = self.m.CreateMockAnything()
|
my_volume = self.m.CreateMockAnything()
|
||||||
self.cinder_client.volumes = self.m.CreateMockAnything()
|
self.cinder_client.volumes = self.m.CreateMockAnything()
|
||||||
self.cinder_client.volumes.get(volume_id).MultipleTimes().\
|
self.cinder_client.volumes.get(
|
||||||
AndReturn(my_volume)
|
volume_id).MultipleTimes().AndReturn(my_volume)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.assertEqual(my_volume, self.cinder_plugin.get_volume(volume_id))
|
self.assertEqual(my_volume, self.cinder_plugin.get_volume(volume_id))
|
||||||
@ -53,8 +53,8 @@ class CinderClientPluginTests(common.HeatTestCase):
|
|||||||
snapshot_id = str(uuid.uuid4())
|
snapshot_id = str(uuid.uuid4())
|
||||||
my_snapshot = self.m.CreateMockAnything()
|
my_snapshot = self.m.CreateMockAnything()
|
||||||
self.cinder_client.volume_snapshots = self.m.CreateMockAnything()
|
self.cinder_client.volume_snapshots = self.m.CreateMockAnything()
|
||||||
self.cinder_client.volume_snapshots.get(snapshot_id).MultipleTimes().\
|
self.cinder_client.volume_snapshots.get(
|
||||||
AndReturn(my_snapshot)
|
snapshot_id).MultipleTimes().AndReturn(my_snapshot)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.assertEqual(my_snapshot,
|
self.assertEqual(my_snapshot,
|
||||||
|
@ -332,14 +332,16 @@ class EIPTest(common.HeatTestCase):
|
|||||||
self._mock_server_get(mock_server=server, multiple=True)
|
self._mock_server_get(mock_server=server, multiple=True)
|
||||||
|
|
||||||
self.m.StubOutWithMock(self.fc.servers, 'add_floating_ip')
|
self.m.StubOutWithMock(self.fc.servers, 'add_floating_ip')
|
||||||
self.fc.servers.add_floating_ip(server, floating_ip.ip, None).\
|
self.fc.servers.add_floating_ip(
|
||||||
AndRaise(nova_exceptions.BadRequest(400))
|
server, floating_ip.ip, None
|
||||||
|
).AndRaise(nova_exceptions.BadRequest(400))
|
||||||
|
|
||||||
self.m.StubOutWithMock(self.fc.servers, 'remove_floating_ip')
|
self.m.StubOutWithMock(self.fc.servers, 'remove_floating_ip')
|
||||||
msg = ("ClientException: Floating ip 172.24.4.13 is not associated "
|
msg = ("ClientException: Floating ip 172.24.4.13 is not associated "
|
||||||
"with instance 1234.")
|
"with instance 1234.")
|
||||||
self.fc.servers.remove_floating_ip(server, floating_ip.ip).\
|
self.fc.servers.remove_floating_ip(
|
||||||
AndRaise(nova_exceptions.ClientException(422, msg))
|
server, floating_ip.ip
|
||||||
|
).AndRaise(nova_exceptions.ClientException(422, msg))
|
||||||
self.m.StubOutWithMock(self.fc.floating_ips, 'delete')
|
self.m.StubOutWithMock(self.fc.floating_ips, 'delete')
|
||||||
self.fc.floating_ips.delete(mox.IsA(object))
|
self.fc.floating_ips.delete(mox.IsA(object))
|
||||||
|
|
||||||
|
@ -200,8 +200,8 @@ def setup_keystone_mocks(mocks, stack):
|
|||||||
def setup_mock_for_image_constraint(mocks, imageId_input,
|
def setup_mock_for_image_constraint(mocks, imageId_input,
|
||||||
imageId_output=744):
|
imageId_output=744):
|
||||||
mocks.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
mocks.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
||||||
glance.GlanceClientPlugin.get_image_id(imageId_input).\
|
glance.GlanceClientPlugin.get_image_id(
|
||||||
MultipleTimes().AndReturn(imageId_output)
|
imageId_input).MultipleTimes().AndReturn(imageId_output)
|
||||||
|
|
||||||
|
|
||||||
def setup_mocks(mocks, stack, mock_image_constraint=True):
|
def setup_mocks(mocks, stack, mock_image_constraint=True):
|
||||||
@ -856,8 +856,8 @@ class StackServiceCreateUpdateDeleteTest(common.HeatTestCase):
|
|||||||
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
||||||
|
|
||||||
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
||||||
stack_lock.StackLock.engine_alive(self.ctx, "other-engine-fake-uuid")\
|
stack_lock.StackLock.engine_alive(
|
||||||
.AndReturn(True)
|
self.ctx, "other-engine-fake-uuid").AndReturn(True)
|
||||||
|
|
||||||
self.m.StubOutWithMock(rpc_client._CallContext, 'call')
|
self.m.StubOutWithMock(rpc_client._CallContext, 'call')
|
||||||
rpc_client._CallContext.call(
|
rpc_client._CallContext.call(
|
||||||
@ -889,8 +889,8 @@ class StackServiceCreateUpdateDeleteTest(common.HeatTestCase):
|
|||||||
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
||||||
|
|
||||||
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
||||||
stack_lock.StackLock.engine_alive(self.ctx, "other-engine-fake-uuid")\
|
stack_lock.StackLock.engine_alive(
|
||||||
.AndReturn(True)
|
self.ctx, "other-engine-fake-uuid").AndReturn(True)
|
||||||
|
|
||||||
self.m.StubOutWithMock(rpc_client._CallContext, 'call')
|
self.m.StubOutWithMock(rpc_client._CallContext, 'call')
|
||||||
rpc_client._CallContext.call(
|
rpc_client._CallContext.call(
|
||||||
@ -921,8 +921,8 @@ class StackServiceCreateUpdateDeleteTest(common.HeatTestCase):
|
|||||||
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
stack_lock.StackLock.try_acquire().AndReturn("other-engine-fake-uuid")
|
||||||
|
|
||||||
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
self.m.StubOutWithMock(stack_lock.StackLock, 'engine_alive')
|
||||||
stack_lock.StackLock.engine_alive(self.ctx, "other-engine-fake-uuid")\
|
stack_lock.StackLock.engine_alive(
|
||||||
.AndReturn(False)
|
self.ctx, "other-engine-fake-uuid").AndReturn(False)
|
||||||
|
|
||||||
self.m.StubOutWithMock(stack_lock.StackLock, 'acquire')
|
self.m.StubOutWithMock(stack_lock.StackLock, 'acquire')
|
||||||
stack_lock.StackLock.acquire().AndReturn(None)
|
stack_lock.StackLock.acquire().AndReturn(None)
|
||||||
@ -1878,8 +1878,10 @@ class StackServiceTest(common.HeatTestCase):
|
|||||||
@stack_context('service_list_all_test_stack')
|
@stack_context('service_list_all_test_stack')
|
||||||
def test_stack_list_all(self):
|
def test_stack_list_all(self):
|
||||||
self.m.StubOutWithMock(parser.Stack, '_from_db')
|
self.m.StubOutWithMock(parser.Stack, '_from_db')
|
||||||
parser.Stack._from_db(self.ctx, mox.IgnoreArg(), resolve_data=False)\
|
parser.Stack._from_db(
|
||||||
.AndReturn(self.stack)
|
self.ctx, mox.IgnoreArg(),
|
||||||
|
resolve_data=False
|
||||||
|
).AndReturn(self.stack)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
sl = self.eng.list_stacks(self.ctx)
|
sl = self.eng.list_stacks(self.ctx)
|
||||||
@ -2458,9 +2460,9 @@ class StackServiceTest(common.HeatTestCase):
|
|||||||
|
|
||||||
stack_not_found_exc = exception.StackNotFound(stack_name='test')
|
stack_not_found_exc = exception.StackNotFound(stack_name='test')
|
||||||
self.m.StubOutWithMock(service.EngineService, '_get_stack')
|
self.m.StubOutWithMock(service.EngineService, '_get_stack')
|
||||||
service.EngineService \
|
service.EngineService._get_stack(
|
||||||
._get_stack(self.ctx, non_exist_identifier, show_deleted=True) \
|
self.ctx, non_exist_identifier, show_deleted=True
|
||||||
.AndRaise(stack_not_found_exc)
|
).AndRaise(stack_not_found_exc)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
ex = self.assertRaises(dispatcher.ExpectedException,
|
ex = self.assertRaises(dispatcher.ExpectedException,
|
||||||
@ -2860,8 +2862,8 @@ class StackServiceTest(common.HeatTestCase):
|
|||||||
|
|
||||||
self.m.StubOutWithMock(watchrule.WatchRule, 'set_watch_state')
|
self.m.StubOutWithMock(watchrule.WatchRule, 'set_watch_state')
|
||||||
for state in ["HGJHGJHG", "1234", "!\*(&%"]:
|
for state in ["HGJHGJHG", "1234", "!\*(&%"]:
|
||||||
watchrule.WatchRule.set_watch_state(state)\
|
watchrule.WatchRule.set_watch_state(
|
||||||
.InAnyOrder().AndRaise(ValueError)
|
state).InAnyOrder().AndRaise(ValueError)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
for state in ["HGJHGJHG", "1234", "!\*(&%"]:
|
for state in ["HGJHGJHG", "1234", "!\*(&%"]:
|
||||||
@ -2876,8 +2878,9 @@ class StackServiceTest(common.HeatTestCase):
|
|||||||
state = watchrule.WatchRule.ALARM # State valid
|
state = watchrule.WatchRule.ALARM # State valid
|
||||||
|
|
||||||
self.m.StubOutWithMock(watchrule.WatchRule, 'load')
|
self.m.StubOutWithMock(watchrule.WatchRule, 'load')
|
||||||
watchrule.WatchRule.load(self.ctx, "nonexistent")\
|
watchrule.WatchRule.load(
|
||||||
.AndRaise(exception.WatchRuleNotFound(watch_name='test'))
|
self.ctx, "nonexistent"
|
||||||
|
).AndRaise(exception.WatchRuleNotFound(watch_name='test'))
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
ex = self.assertRaises(dispatcher.ExpectedException,
|
ex = self.assertRaises(dispatcher.ExpectedException,
|
||||||
@ -3018,8 +3021,8 @@ class StackServiceTest(common.HeatTestCase):
|
|||||||
self.eng._validate_new_stack,
|
self.eng._validate_new_stack,
|
||||||
self.ctx, 'test_existing_stack',
|
self.ctx, 'test_existing_stack',
|
||||||
parsed_template)
|
parsed_template)
|
||||||
msg = \
|
msg = (u'u\'"Type" is not a valid keyword '
|
||||||
u'u\'"Type" is not a valid keyword inside a resource definition\''
|
'inside a resource definition\'')
|
||||||
self.assertEqual(msg, six.text_type(ex))
|
self.assertEqual(msg, six.text_type(ex))
|
||||||
|
|
||||||
def test_validate_new_stack_checks_incorrect_sections(self):
|
def test_validate_new_stack_checks_incorrect_sections(self):
|
||||||
|
@ -104,8 +104,8 @@ class FaultMiddlewareTest(common.HeatTestCase):
|
|||||||
serialized, ["heat.common.exception"])
|
serialized, ["heat.common.exception"])
|
||||||
wrapper = fault.FaultWrapper(None)
|
wrapper = fault.FaultWrapper(None)
|
||||||
msg = wrapper._error(remote_error)
|
msg = wrapper._error(remote_error)
|
||||||
expected_message, expected_traceback = six.text_type(remote_error).\
|
expected_message, expected_traceback = six.text_type(
|
||||||
split('\n', 1)
|
remote_error).split('\n', 1)
|
||||||
expected = {'code': 404,
|
expected = {'code': 404,
|
||||||
'error': {'message': expected_message,
|
'error': {'message': expected_message,
|
||||||
'traceback': expected_traceback,
|
'traceback': expected_traceback,
|
||||||
@ -211,8 +211,8 @@ class FaultMiddlewareTest(common.HeatTestCase):
|
|||||||
|
|
||||||
wrapper = fault.FaultWrapper(None)
|
wrapper = fault.FaultWrapper(None)
|
||||||
msg = wrapper._error(remote_error)
|
msg = wrapper._error(remote_error)
|
||||||
expected_message, expected_traceback = six.text_type(remote_error).\
|
expected_message, expected_traceback = six.text_type(
|
||||||
split('\n', 1)
|
remote_error).split('\n', 1)
|
||||||
expected = {'code': 404,
|
expected = {'code': 404,
|
||||||
'error': {'message': expected_message,
|
'error': {'message': expected_message,
|
||||||
'traceback': expected_traceback,
|
'traceback': expected_traceback,
|
||||||
|
@ -68,8 +68,8 @@ class GlanceUtilsTests(common.HeatTestCase):
|
|||||||
self.glance_client.images.get(img_name).AndRaise(
|
self.glance_client.images.get(img_name).AndRaise(
|
||||||
glance_exceptions.HTTPNotFound())
|
glance_exceptions.HTTPNotFound())
|
||||||
filters = {'name': img_name}
|
filters = {'name': img_name}
|
||||||
self.glance_client.images.list(filters=filters).MultipleTimes().\
|
self.glance_client.images.list(
|
||||||
AndReturn([my_image])
|
filters=filters).MultipleTimes().AndReturn([my_image])
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.assertEqual(img_id, self.glance_plugin.get_image_id(img_name))
|
self.assertEqual(img_id, self.glance_plugin.get_image_id(img_name))
|
||||||
@ -101,8 +101,8 @@ class GlanceUtilsTests(common.HeatTestCase):
|
|||||||
self.glance_client.images.get(img_name).AndRaise(
|
self.glance_client.images.get(img_name).AndRaise(
|
||||||
glance_exceptions.HTTPNotFound())
|
glance_exceptions.HTTPNotFound())
|
||||||
filters = {'name': img_name}
|
filters = {'name': img_name}
|
||||||
self.glance_client.images.list(filters=filters).MultipleTimes().\
|
self.glance_client.images.list(
|
||||||
AndReturn([])
|
filters=filters).MultipleTimes().AndReturn([])
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.assertRaises(exception.ImageNotFound,
|
self.assertRaises(exception.ImageNotFound,
|
||||||
@ -118,8 +118,8 @@ class GlanceUtilsTests(common.HeatTestCase):
|
|||||||
|
|
||||||
self.glance_client.images = self.m.CreateMockAnything()
|
self.glance_client.images = self.m.CreateMockAnything()
|
||||||
filters = {'name': img_name}
|
filters = {'name': img_name}
|
||||||
self.glance_client.images.list(filters=filters).MultipleTimes().\
|
self.glance_client.images.list(
|
||||||
AndReturn(image_list)
|
filters=filters).MultipleTimes().AndReturn(image_list)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
self.assertRaises(exception.PhysicalResourceNameAmbiguity,
|
self.assertRaises(exception.PhysicalResourceNameAmbiguity,
|
||||||
self.glance_plugin.get_image_id, img_name)
|
self.glance_plugin.get_image_id, img_name)
|
||||||
|
@ -203,7 +203,7 @@ class KeystoneClientTest(common.HeatTestCase):
|
|||||||
err = self.assertRaises(exception.Error,
|
err = self.assertRaises(exception.Error,
|
||||||
heat_ks_client.create_stack_user,
|
heat_ks_client.create_stack_user,
|
||||||
'auser', password='password')
|
'auser', password='password')
|
||||||
self.assertIn('Can\'t find role heat_stack_user', six.text_type(err))
|
self.assertIn("Can't find role heat_stack_user", six.text_type(err))
|
||||||
|
|
||||||
def _mock_roles_list(self, heat_stack_user='heat_stack_user'):
|
def _mock_roles_list(self, heat_stack_user='heat_stack_user'):
|
||||||
mock_roles_list = []
|
mock_roles_list = []
|
||||||
@ -287,7 +287,7 @@ class KeystoneClientTest(common.HeatTestCase):
|
|||||||
err = self.assertRaises(exception.Error,
|
err = self.assertRaises(exception.Error,
|
||||||
heat_ks_client.create_stack_domain_user,
|
heat_ks_client.create_stack_domain_user,
|
||||||
username='duser', project_id='aproject')
|
username='duser', project_id='aproject')
|
||||||
self.assertIn('Can\'t find role heat_stack_user', six.text_type(err))
|
self.assertIn("Can't find role heat_stack_user", six.text_type(err))
|
||||||
|
|
||||||
def test_delete_stack_domain_user(self):
|
def test_delete_stack_domain_user(self):
|
||||||
"""Test deleting a stack domain user."""
|
"""Test deleting a stack domain user."""
|
||||||
@ -570,7 +570,7 @@ class KeystoneClientTest(common.HeatTestCase):
|
|||||||
heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)
|
heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)
|
||||||
exc = self.assertRaises(exception.MissingCredentialError,
|
exc = self.assertRaises(exception.MissingCredentialError,
|
||||||
heat_ks_client.create_trust_context)
|
heat_ks_client.create_trust_context)
|
||||||
expected = 'Missing required credential: roles [\'heat_stack_owner\']'
|
expected = "Missing required credential: roles ['heat_stack_owner']"
|
||||||
self.assertIn(expected, six.text_type(exc))
|
self.assertIn(expected, six.text_type(exc))
|
||||||
|
|
||||||
def test_init_domain_cfg_not_set_fallback(self):
|
def test_init_domain_cfg_not_set_fallback(self):
|
||||||
@ -1423,8 +1423,8 @@ class KeystoneClientTest(common.HeatTestCase):
|
|||||||
"""
|
"""
|
||||||
self._stubs_v3()
|
self._stubs_v3()
|
||||||
self.mock_ks_v3_client.service_catalog = self.m.CreateMockAnything()
|
self.mock_ks_v3_client.service_catalog = self.m.CreateMockAnything()
|
||||||
self.mock_ks_v3_client.service_catalog.url_for(**expected_kwargs)\
|
self.mock_ks_v3_client.service_catalog.url_for(
|
||||||
.AndReturn(service_url)
|
**expected_kwargs).AndReturn(service_url)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
ctx = ctx or utils.dummy_context()
|
ctx = ctx or utils.dummy_context()
|
||||||
|
@ -928,8 +928,8 @@ class StackTest(test_parser.StackTest):
|
|||||||
{'Type': 'ResourceWithPropsType',
|
{'Type': 'ResourceWithPropsType',
|
||||||
'Properties': {'Foo': 'xyz'}},
|
'Properties': {'Foo': 'xyz'}},
|
||||||
{'Type': 'ResourceWithPropsType',
|
{'Type': 'ResourceWithPropsType',
|
||||||
'Properties': {'Foo': 'abc'}}).WithSideEffects(check_props) \
|
'Properties': {'Foo': 'abc'}}
|
||||||
.AndRaise(resource.UpdateReplace)
|
).WithSideEffects(check_props).AndRaise(resource.UpdateReplace)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.stack.update(updated_stack)
|
self.stack.update(updated_stack)
|
||||||
|
@ -113,8 +113,8 @@ class IdentifierTest(testtools.TestCase):
|
|||||||
self.assertEqual('/p', hi.path)
|
self.assertEqual('/p', hi.path)
|
||||||
|
|
||||||
def test_arn_url_parse_qs(self):
|
def test_arn_url_parse_qs(self):
|
||||||
url = self.url_prefix +\
|
url = (self.url_prefix +
|
||||||
'arn%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i/p?foo=bar'
|
'arn%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i/p?foo=bar')
|
||||||
hi = identifier.HeatIdentifier.from_arn_url(url)
|
hi = identifier.HeatIdentifier.from_arn_url(url)
|
||||||
self.assertEqual('t', hi.tenant)
|
self.assertEqual('t', hi.tenant)
|
||||||
self.assertEqual('s', hi.stack_name)
|
self.assertEqual('s', hi.stack_name)
|
||||||
|
@ -97,10 +97,10 @@ class InstancesTest(common.HeatTestCase):
|
|||||||
def _get_test_template(self, stack_name, image_id=None):
|
def _get_test_template(self, stack_name, image_id=None):
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['ImageId'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
image_id or 'CentOS 5.2'
|
'ImageId'] = image_id or 'CentOS 5.2'
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['InstanceType'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
'256 MB Server'
|
'InstanceType'] = '256 MB Server'
|
||||||
|
|
||||||
return tmpl, stack
|
return tmpl, stack
|
||||||
|
|
||||||
@ -381,9 +381,9 @@ class InstancesTest(common.HeatTestCase):
|
|||||||
create = scheduler.TaskRunner(instance.create)
|
create = scheduler.TaskRunner(instance.create)
|
||||||
error = self.assertRaises(exception.ResourceFailure, create)
|
error = self.assertRaises(exception.ResourceFailure, create)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'StackValidationFailed: Property error : WebServer: '
|
"StackValidationFailed: Property error : WebServer: "
|
||||||
'ImageId Error validating value \'Slackware\': '
|
"ImageId Error validating value 'Slackware': "
|
||||||
'The Image (Slackware) could not be found.',
|
"The Image (Slackware) could not be found.",
|
||||||
six.text_type(error))
|
six.text_type(error))
|
||||||
|
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
@ -221,14 +221,14 @@ class InstanceGroupTest(common.HeatTestCase):
|
|||||||
self.m.StubOutWithMock(self.fc.servers, 'get')
|
self.m.StubOutWithMock(self.fc.servers, 'get')
|
||||||
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
|
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
|
||||||
|
|
||||||
self.fc.servers.get(mox.IgnoreArg()).\
|
self.fc.servers.get(
|
||||||
MultipleTimes().AndReturn(return_server)
|
mox.IgnoreArg()).MultipleTimes().AndReturn(return_server)
|
||||||
self.fc.client.post_servers_1234_action(
|
self.fc.client.post_servers_1234_action(
|
||||||
body={'resize': {'flavorRef': 3}}).\
|
body={'resize': {'flavorRef': 3}}
|
||||||
MultipleTimes().AndReturn((202, None))
|
).MultipleTimes().AndReturn((202, None))
|
||||||
self.fc.client.post_servers_1234_action(
|
self.fc.client.post_servers_1234_action(
|
||||||
body={'confirmResize': None}).\
|
body={'confirmResize': None}
|
||||||
MultipleTimes().AndReturn((202, None))
|
).MultipleTimes().AndReturn((202, None))
|
||||||
|
|
||||||
self._stub_grp_replace(num_creates_expected_on_updt,
|
self._stub_grp_replace(num_creates_expected_on_updt,
|
||||||
num_deletes_expected_on_updt)
|
num_deletes_expected_on_updt)
|
||||||
|
@ -43,8 +43,8 @@ class LifecyclePluginUtilsTests(common.HeatTestCase):
|
|||||||
self.m.UnsetStubs()
|
self.m.UnsetStubs()
|
||||||
self.m.StubOutWithMock(resources.global_env(),
|
self.m.StubOutWithMock(resources.global_env(),
|
||||||
'get_stack_lifecycle_plugins')
|
'get_stack_lifecycle_plugins')
|
||||||
resources.global_env().get_stack_lifecycle_plugins().\
|
resources.global_env().get_stack_lifecycle_plugins(
|
||||||
MultipleTimes().AndReturn(lcp_mappings)
|
).MultipleTimes().AndReturn(lcp_mappings)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
# reset cache
|
# reset cache
|
||||||
lifecycle_plugin_utils.pp_class_instances = None
|
lifecycle_plugin_utils.pp_class_instances = None
|
||||||
|
@ -123,8 +123,8 @@ class LoadBalancerTest(common.HeatTestCase):
|
|||||||
|
|
||||||
def _mock_get_image_id_success(self, imageId_input, imageId):
|
def _mock_get_image_id_success(self, imageId_input, imageId):
|
||||||
self.m.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
self.m.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
||||||
glance.GlanceClientPlugin.get_image_id(imageId_input).\
|
glance.GlanceClientPlugin.get_image_id(
|
||||||
MultipleTimes().AndReturn(imageId)
|
imageId_input).MultipleTimes().AndReturn(imageId)
|
||||||
|
|
||||||
def _create_stubs(self, key_name='test', stub_meta=True):
|
def _create_stubs(self, key_name='test', stub_meta=True):
|
||||||
server_name = utils.PhysName(
|
server_name = utils.PhysName(
|
||||||
|
@ -96,8 +96,8 @@ Outputs:
|
|||||||
return stack
|
return stack
|
||||||
|
|
||||||
def test_nested_stack_create(self):
|
def test_nested_stack_create(self):
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
stack = self.create_stack(self.test_template)
|
stack = self.create_stack(self.test_template)
|
||||||
@ -124,8 +124,9 @@ Outputs:
|
|||||||
def test_nested_stack_adopt(self):
|
def test_nested_stack_adopt(self):
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -166,8 +167,9 @@ Outputs:
|
|||||||
def test_nested_stack_adopt_fail(self):
|
def test_nested_stack_adopt_fail(self):
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -200,8 +202,8 @@ Outputs:
|
|||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
def test_nested_stack_create_with_timeout(self):
|
def test_nested_stack_create_with_timeout(self):
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
timeout_template = template_format.parse(
|
timeout_template = template_format.parse(
|
||||||
@ -217,8 +219,9 @@ Outputs:
|
|||||||
cfg.CONF.set_override('max_resources_per_stack', 1)
|
cfg.CONF.set_override('max_resources_per_stack', 1)
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -245,8 +248,9 @@ Outputs:
|
|||||||
cfg.CONF.set_override('max_resources_per_stack', 2)
|
cfg.CONF.set_override('max_resources_per_stack', 2)
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -270,10 +274,10 @@ Outputs:
|
|||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
def test_nested_stack_update(self):
|
def test_nested_stack_update(self):
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
urlfetch.get('https://server.test/new.template').MultipleTimes().\
|
urlfetch.get('https://server.test/new.template'
|
||||||
AndReturn(self.update_template)
|
).MultipleTimes().AndReturn(self.update_template)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
@ -312,10 +316,11 @@ Outputs:
|
|||||||
def test_nested_stack_update_equals_resource_limit(self):
|
def test_nested_stack_update_equals_resource_limit(self):
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
urlfetch.get('https://server.test/new.template').MultipleTimes().\
|
urlfetch.get('https://server.test/new.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -352,10 +357,11 @@ Outputs:
|
|||||||
def test_nested_stack_update_exceeds_limit(self):
|
def test_nested_stack_update_exceeds_limit(self):
|
||||||
resource._register_class('GenericResource',
|
resource._register_class('GenericResource',
|
||||||
generic_rsrc.GenericResource)
|
generic_rsrc.GenericResource)
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
urlfetch.get('https://server.test/new.template').MultipleTimes().\
|
urlfetch.get('https://server.test/new.template'
|
||||||
AndReturn('''
|
).MultipleTimes().AndReturn(
|
||||||
|
'''
|
||||||
HeatTemplateFormatVersion: '2012-12-12'
|
HeatTemplateFormatVersion: '2012-12-12'
|
||||||
Parameters:
|
Parameters:
|
||||||
KeyName:
|
KeyName:
|
||||||
@ -567,8 +573,8 @@ Resources:
|
|||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
def test_nested_stack_delete(self):
|
def test_nested_stack_delete(self):
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
stack = self.create_stack(self.test_template)
|
stack = self.create_stack(self.test_template)
|
||||||
@ -583,8 +589,8 @@ Resources:
|
|||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
def test_nested_stack_delete_then_delete_parent_stack(self):
|
def test_nested_stack_delete_then_delete_parent_stack(self):
|
||||||
urlfetch.get('https://server.test/the.template').MultipleTimes().\
|
urlfetch.get('https://server.test/the.template'
|
||||||
AndReturn(self.nested_template)
|
).MultipleTimes().AndReturn(self.nested_template)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
stack = self.create_stack(self.test_template)
|
stack = self.create_stack(self.test_template)
|
||||||
|
@ -225,11 +225,11 @@ class AutoScalingTest(common.HeatTestCase):
|
|||||||
memberc_ret_block = copy.deepcopy(memberc_block)
|
memberc_ret_block = copy.deepcopy(memberc_block)
|
||||||
memberc_ret_block['member']['id'] = str(uuid.uuid4())
|
memberc_ret_block['member']['id'] = str(uuid.uuid4())
|
||||||
|
|
||||||
neutronclient.Client.create_health_monitor(mon_block).\
|
neutronclient.Client.create_health_monitor(
|
||||||
AndReturn(mon_ret_block)
|
mon_block).AndReturn(mon_ret_block)
|
||||||
|
|
||||||
neutronclient.Client.create_pool(pool_block).\
|
neutronclient.Client.create_pool(
|
||||||
AndReturn(pool_ret_block)
|
pool_block).AndReturn(pool_ret_block)
|
||||||
|
|
||||||
neutronclient.Client.associate_health_monitor(
|
neutronclient.Client.associate_health_monitor(
|
||||||
pool_ret_block['pool']['id'],
|
pool_ret_block['pool']['id'],
|
||||||
@ -237,57 +237,57 @@ class AutoScalingTest(common.HeatTestCase):
|
|||||||
'id': mon_ret_block['health_monitor']['id']
|
'id': mon_ret_block['health_monitor']['id']
|
||||||
}}).AndReturn(None)
|
}}).AndReturn(None)
|
||||||
|
|
||||||
neutronclient.Client.create_vip(vip_block).\
|
neutronclient.Client.create_vip(
|
||||||
AndReturn(vip_ret_block)
|
vip_block).AndReturn(vip_ret_block)
|
||||||
|
|
||||||
neutronclient.Client.show_pool(pool_ret_block['pool']['id']).\
|
neutronclient.Client.show_pool(
|
||||||
AndReturn(pool_ret_block)
|
pool_ret_block['pool']['id']).AndReturn(pool_ret_block)
|
||||||
|
|
||||||
neutronclient.Client.show_vip(vip_ret_block['vip']['id']).\
|
neutronclient.Client.show_vip(
|
||||||
AndReturn(vip_ret_block)
|
vip_ret_block['vip']['id']).AndReturn(vip_ret_block)
|
||||||
|
|
||||||
parser.Stack.validate()
|
parser.Stack.validate()
|
||||||
instid = str(uuid.uuid4())
|
instid = str(uuid.uuid4())
|
||||||
instance.Instance.handle_create().AndReturn(instid)
|
instance.Instance.handle_create().AndReturn(instid)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(False)
|
mox.IgnoreArg()).AndReturn(False)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(True)
|
mox.IgnoreArg()).AndReturn(True)
|
||||||
self.stub_ImageConstraint_validate()
|
self.stub_ImageConstraint_validate()
|
||||||
self.stub_FlavorConstraint_validate()
|
self.stub_FlavorConstraint_validate()
|
||||||
nova.NovaClientPlugin.server_to_ipaddress(
|
nova.NovaClientPlugin.server_to_ipaddress(
|
||||||
mox.IgnoreArg()).AndReturn('1.2.3.4')
|
mox.IgnoreArg()).AndReturn('1.2.3.4')
|
||||||
|
|
||||||
neutronclient.Client.create_member(membera_block).\
|
neutronclient.Client.create_member(
|
||||||
AndReturn(membera_ret_block)
|
membera_block).AndReturn(membera_ret_block)
|
||||||
|
|
||||||
# Start of update
|
# Start of update
|
||||||
parser.Stack.validate()
|
parser.Stack.validate()
|
||||||
instid = str(uuid.uuid4())
|
instid = str(uuid.uuid4())
|
||||||
instance.Instance.handle_create().AndReturn(instid)
|
instance.Instance.handle_create().AndReturn(instid)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(False)
|
mox.IgnoreArg()).AndReturn(False)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(True)
|
mox.IgnoreArg()).AndReturn(True)
|
||||||
|
|
||||||
instid = str(uuid.uuid4())
|
instid = str(uuid.uuid4())
|
||||||
instance.Instance.handle_create().AndReturn(instid)
|
instance.Instance.handle_create().AndReturn(instid)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(False)
|
mox.IgnoreArg()).AndReturn(False)
|
||||||
instance.Instance.check_create_complete(mox.IgnoreArg())\
|
instance.Instance.check_create_complete(
|
||||||
.AndReturn(True)
|
mox.IgnoreArg()).AndReturn(True)
|
||||||
|
|
||||||
nova.NovaClientPlugin.server_to_ipaddress(
|
nova.NovaClientPlugin.server_to_ipaddress(
|
||||||
mox.IgnoreArg()).AndReturn('1.2.3.5')
|
mox.IgnoreArg()).AndReturn('1.2.3.5')
|
||||||
|
|
||||||
neutronclient.Client.create_member(memberb_block).\
|
neutronclient.Client.create_member(
|
||||||
AndReturn(memberb_ret_block)
|
memberb_block).AndReturn(memberb_ret_block)
|
||||||
|
|
||||||
nova.NovaClientPlugin.server_to_ipaddress(
|
nova.NovaClientPlugin.server_to_ipaddress(
|
||||||
mox.IgnoreArg()).AndReturn('1.2.3.6')
|
mox.IgnoreArg()).AndReturn('1.2.3.6')
|
||||||
|
|
||||||
neutronclient.Client.create_member(memberc_block).\
|
neutronclient.Client.create_member(
|
||||||
AndReturn(memberc_ret_block)
|
memberc_block).AndReturn(memberc_ret_block)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
|
@ -52,8 +52,8 @@ class nokeyTest(common.HeatTestCase):
|
|||||||
stack = utils.parse_stack(t, stack_name=stack_name)
|
stack = utils.parse_stack(t, stack_name=stack_name)
|
||||||
|
|
||||||
t['Resources']['WebServer']['Properties']['ImageId'] = 'CentOS 5.2'
|
t['Resources']['WebServer']['Properties']['ImageId'] = 'CentOS 5.2'
|
||||||
t['Resources']['WebServer']['Properties']['InstanceType'] = \
|
t['Resources']['WebServer']['Properties'][
|
||||||
'256 MB Server'
|
'InstanceType'] = '256 MB Server'
|
||||||
resource_defns = stack.t.resource_definitions(stack)
|
resource_defns = stack.t.resource_definitions(stack)
|
||||||
instance = instances.Instance('create_instance_name',
|
instance = instances.Instance('create_instance_name',
|
||||||
resource_defns['WebServer'], stack)
|
resource_defns['WebServer'], stack)
|
||||||
|
@ -173,10 +173,10 @@ class ScaleNotificationTest(common.HeatTestCase):
|
|||||||
self.patchobject(nova.KeypairConstraint, 'validate')
|
self.patchobject(nova.KeypairConstraint, 'validate')
|
||||||
self.patchobject(glance.ImageConstraint, 'validate')
|
self.patchobject(glance.ImageConstraint, 'validate')
|
||||||
self.patchobject(nova.FlavorConstraint, 'validate')
|
self.patchobject(nova.FlavorConstraint, 'validate')
|
||||||
self.patchobject(instance.Instance, 'handle_create')\
|
self.patchobject(instance.Instance, 'handle_create'
|
||||||
.return_value = True
|
).return_value = True
|
||||||
self.patchobject(instance.Instance, 'check_create_complete')\
|
self.patchobject(instance.Instance, 'check_create_complete'
|
||||||
.return_value = True
|
).return_value = True
|
||||||
self.patchobject(stack_resource.StackResource,
|
self.patchobject(stack_resource.StackResource,
|
||||||
'check_update_complete').return_value = True
|
'check_update_complete').return_value = True
|
||||||
|
|
||||||
|
@ -374,8 +374,8 @@ class ParameterTest(testtools.TestCase):
|
|||||||
'AllowedPattern': '[a-z]*'}
|
'AllowedPattern': '[a-z]*'}
|
||||||
err = self.assertRaises(exception.StackValidationFailed,
|
err = self.assertRaises(exception.StackValidationFailed,
|
||||||
self.new_parameter, 'testparam', schema, '234')
|
self.new_parameter, 'testparam', schema, '234')
|
||||||
expected = 'Parameter \'testparam\' is invalid: '\
|
expected = ("Parameter 'testparam' is invalid: "
|
||||||
'"234" does not match pattern "[a-z]*"'
|
'"234" does not match pattern "[a-z]*"')
|
||||||
self.assertEqual(expected, six.text_type(err))
|
self.assertEqual(expected, six.text_type(err))
|
||||||
|
|
||||||
|
|
||||||
|
@ -2310,8 +2310,8 @@ class StackTest(common.HeatTestCase):
|
|||||||
{'Type': 'ResourceWithPropsType',
|
{'Type': 'ResourceWithPropsType',
|
||||||
'Properties': {'Foo': 'xyz'}},
|
'Properties': {'Foo': 'xyz'}},
|
||||||
{'Type': 'ResourceWithPropsType',
|
{'Type': 'ResourceWithPropsType',
|
||||||
'Properties': {'Foo': 'abc'}}).WithSideEffects(check_props) \
|
'Properties': {'Foo': 'abc'}}
|
||||||
.AndRaise(resource.UpdateReplace)
|
).WithSideEffects(check_props).AndRaise(resource.UpdateReplace)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
self.stack.update(updated_stack)
|
self.stack.update(updated_stack)
|
||||||
@ -3082,8 +3082,8 @@ class StackTest(common.HeatTestCase):
|
|||||||
|
|
||||||
self.m.StubOutWithMock(generic_rsrc.ResourceWithProps, 'handle_create')
|
self.m.StubOutWithMock(generic_rsrc.ResourceWithProps, 'handle_create')
|
||||||
|
|
||||||
generic_rsrc.ResourceWithProps.handle_create().MultipleTimes().\
|
generic_rsrc.ResourceWithProps.handle_create().MultipleTimes(
|
||||||
AndReturn(None)
|
).AndReturn(None)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
@ -3696,7 +3696,7 @@ class StackTest(common.HeatTestCase):
|
|||||||
|
|
||||||
def test_stack_name_invalid(self):
|
def test_stack_name_invalid(self):
|
||||||
stack_names = ['_foo', '1bad', '.kcats', 'test stack', ' teststack',
|
stack_names = ['_foo', '1bad', '.kcats', 'test stack', ' teststack',
|
||||||
'^-^', '\"stack\"', '1234', 'cat|dog', '$(foo)',
|
'^-^', '"stack"', '1234', 'cat|dog', '$(foo)',
|
||||||
'test/stack', 'test\stack', 'test::stack', 'test;stack',
|
'test/stack', 'test\stack', 'test::stack', 'test;stack',
|
||||||
'test~stack', '#test']
|
'test~stack', '#test']
|
||||||
for stack_name in stack_names:
|
for stack_name in stack_names:
|
||||||
|
@ -917,8 +917,8 @@ class PropertyTest(testtools.TestCase):
|
|||||||
p = properties.Property({'Type': 'List', 'Schema': list_schema})
|
p = properties.Property({'Type': 'List', 'Schema': list_schema})
|
||||||
ex = self.assertRaises(exception.StackValidationFailed,
|
ex = self.assertRaises(exception.StackValidationFailed,
|
||||||
p.get_value, [42, 'fish'], True)
|
p.get_value, [42, 'fish'], True)
|
||||||
self.assertEqual('Property error : 1 Value \'fish\' is not '
|
self.assertEqual("Property error : 1 Value 'fish' is not "
|
||||||
'an integer', six.text_type(ex))
|
"an integer", six.text_type(ex))
|
||||||
|
|
||||||
|
|
||||||
class PropertiesTest(testtools.TestCase):
|
class PropertiesTest(testtools.TestCase):
|
||||||
@ -1613,8 +1613,9 @@ class PropertiesValidationTest(testtools.TestCase):
|
|||||||
|
|
||||||
prop_expected = {'foo': {'Ref': 'foo'}}
|
prop_expected = {'foo': {'Ref': 'foo'}}
|
||||||
param_expected = {'foo': {'Type': 'Json'}}
|
param_expected = {'foo': {'Type': 'Json'}}
|
||||||
(parameters, props) = \
|
(parameters,
|
||||||
properties.Properties.schema_to_parameters_and_properties(schema)
|
props) = properties.Properties.schema_to_parameters_and_properties(
|
||||||
|
schema)
|
||||||
self.assertEqual(param_expected, parameters)
|
self.assertEqual(param_expected, parameters)
|
||||||
self.assertEqual(prop_expected, props)
|
self.assertEqual(prop_expected, props)
|
||||||
|
|
||||||
@ -1628,8 +1629,9 @@ class PropertiesValidationTest(testtools.TestCase):
|
|||||||
|
|
||||||
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
|
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
|
||||||
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
|
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
|
||||||
(parameters, props) = \
|
(parameters,
|
||||||
properties.Properties.schema_to_parameters_and_properties(schema)
|
props) = properties.Properties.schema_to_parameters_and_properties(
|
||||||
|
schema)
|
||||||
self.assertEqual(param_expected, parameters)
|
self.assertEqual(param_expected, parameters)
|
||||||
self.assertEqual(prop_expected, props)
|
self.assertEqual(prop_expected, props)
|
||||||
|
|
||||||
@ -1648,8 +1650,9 @@ class PropertiesValidationTest(testtools.TestCase):
|
|||||||
|
|
||||||
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
|
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
|
||||||
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
|
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
|
||||||
(parameters, props) = \
|
(parameters,
|
||||||
properties.Properties.schema_to_parameters_and_properties(schema)
|
props) = properties.Properties.schema_to_parameters_and_properties(
|
||||||
|
schema)
|
||||||
self.assertEqual(param_expected, parameters)
|
self.assertEqual(param_expected, parameters)
|
||||||
self.assertEqual(prop_expected, props)
|
self.assertEqual(prop_expected, props)
|
||||||
|
|
||||||
@ -1661,8 +1664,9 @@ class PropertiesValidationTest(testtools.TestCase):
|
|||||||
prop_expected = {'foo': {'Ref': 'foo'}}
|
prop_expected = {'foo': {'Ref': 'foo'}}
|
||||||
param_expected = {'foo': {'Type': 'String'}}
|
param_expected = {'foo': {'Type': 'String'}}
|
||||||
|
|
||||||
(parameters, props) = \
|
(parameters,
|
||||||
properties.Properties.schema_to_parameters_and_properties(schema)
|
props) = properties.Properties.schema_to_parameters_and_properties(
|
||||||
|
schema)
|
||||||
self.assertEqual(param_expected, parameters)
|
self.assertEqual(param_expected, parameters)
|
||||||
self.assertEqual(prop_expected, props)
|
self.assertEqual(prop_expected, props)
|
||||||
|
|
||||||
@ -1769,8 +1773,9 @@ class PropertiesValidationTest(testtools.TestCase):
|
|||||||
def test_schema_to_template_empty_schema(self):
|
def test_schema_to_template_empty_schema(self):
|
||||||
schema = {}
|
schema = {}
|
||||||
|
|
||||||
(parameters, props) = \
|
(parameters,
|
||||||
properties.Properties.schema_to_parameters_and_properties(schema)
|
props) = properties.Properties.schema_to_parameters_and_properties(
|
||||||
|
schema)
|
||||||
self.assertEqual({}, parameters)
|
self.assertEqual({}, parameters)
|
||||||
self.assertEqual({}, props)
|
self.assertEqual({}, props)
|
||||||
|
|
||||||
|
@ -163,8 +163,10 @@ class ProviderTemplateTest(common.HeatTestCase):
|
|||||||
# verify List conversion
|
# verify List conversion
|
||||||
self.assertEqual("one,two,three", converted_params.get("AList"))
|
self.assertEqual("one,two,three", converted_params.get("AList"))
|
||||||
# verify Member List conversion
|
# verify Member List conversion
|
||||||
mem_exp = '.member.0.key=name,.member.0.value=three,' \
|
mem_exp = ('.member.0.key=name,'
|
||||||
'.member.1.key=name,.member.1.value=four'
|
'.member.0.value=three,'
|
||||||
|
'.member.1.key=name,'
|
||||||
|
'.member.1.value=four')
|
||||||
self.assertEqual(mem_exp, converted_params.get("MemList"))
|
self.assertEqual(mem_exp, converted_params.get("MemList"))
|
||||||
# verify Number conversion
|
# verify Number conversion
|
||||||
self.assertEqual(5, converted_params.get("ANum"))
|
self.assertEqual(5, converted_params.get("ANum"))
|
||||||
@ -492,8 +494,9 @@ class ProviderTemplateTest(common.HeatTestCase):
|
|||||||
self.assertTrue(test_templ, "Empty test template")
|
self.assertTrue(test_templ, "Empty test template")
|
||||||
self.m.StubOutWithMock(urlfetch, "get")
|
self.m.StubOutWithMock(urlfetch, "get")
|
||||||
urlfetch.get(test_templ_name,
|
urlfetch.get(test_templ_name,
|
||||||
allowed_schemes=('file',))\
|
allowed_schemes=('file',)
|
||||||
.AndRaise(urlfetch.URLFetchError(_('Failed to retrieve template')))
|
).AndRaise(urlfetch.URLFetchError(
|
||||||
|
_('Failed to retrieve template')))
|
||||||
urlfetch.get(test_templ_name,
|
urlfetch.get(test_templ_name,
|
||||||
allowed_schemes=('http', 'https')).AndReturn(test_templ)
|
allowed_schemes=('http', 'https')).AndReturn(test_templ)
|
||||||
parsed_test_templ = template_format.parse(test_templ)
|
parsed_test_templ = template_format.parse(test_templ)
|
||||||
@ -607,8 +610,9 @@ class ProviderTemplateTest(common.HeatTestCase):
|
|||||||
self.m.StubOutWithMock(urlfetch, "get")
|
self.m.StubOutWithMock(urlfetch, "get")
|
||||||
urlfetch.get(test_templ_name,
|
urlfetch.get(test_templ_name,
|
||||||
allowed_schemes=('http', 'https',
|
allowed_schemes=('http', 'https',
|
||||||
'file'))\
|
'file')
|
||||||
.AndRaise(urlfetch.URLFetchError(_('Failed to retrieve template')))
|
).AndRaise(urlfetch.URLFetchError(
|
||||||
|
_('Failed to retrieve template')))
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
||||||
@ -635,8 +639,9 @@ class ProviderTemplateTest(common.HeatTestCase):
|
|||||||
|
|
||||||
self.m.StubOutWithMock(urlfetch, "get")
|
self.m.StubOutWithMock(urlfetch, "get")
|
||||||
urlfetch.get(test_templ_name,
|
urlfetch.get(test_templ_name,
|
||||||
allowed_schemes=('http', 'https'))\
|
allowed_schemes=('http', 'https')
|
||||||
.AndRaise(urlfetch.URLFetchError(_('Failed to retrieve template')))
|
).AndRaise(urlfetch.URLFetchError(
|
||||||
|
_('Failed to retrieve template')))
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
||||||
@ -686,8 +691,8 @@ class ProviderTemplateTest(common.HeatTestCase):
|
|||||||
|
|
||||||
self.m.StubOutWithMock(urlfetch, "get")
|
self.m.StubOutWithMock(urlfetch, "get")
|
||||||
urlfetch.get(test_templ_name,
|
urlfetch.get(test_templ_name,
|
||||||
allowed_schemes=('http', 'https'))\
|
allowed_schemes=('http', 'https')
|
||||||
.AndReturn(wrong_template)
|
).AndReturn(wrong_template)
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
definition = rsrc_defn.ResourceDefinition('test_t_res',
|
||||||
|
@ -254,8 +254,8 @@ class RemoteStackTest(tests_common.HeatTestCase):
|
|||||||
|
|
||||||
ex = self.assertRaises(exception.StackValidationFailed,
|
ex = self.assertRaises(exception.StackValidationFailed,
|
||||||
rsrc.validate)
|
rsrc.validate)
|
||||||
msg = 'Cannot establish connection to Heat endpoint at region "%s"'\
|
msg = ('Cannot establish connection to Heat endpoint '
|
||||||
% self.bad_region
|
'at region "%s"' % self.bad_region)
|
||||||
self.assertIn(msg, six.text_type(ex))
|
self.assertIn(msg, six.text_type(ex))
|
||||||
|
|
||||||
def test_remote_validation_failed(self):
|
def test_remote_validation_failed(self):
|
||||||
@ -373,8 +373,8 @@ class RemoteStackTest(tests_common.HeatTestCase):
|
|||||||
remote_stack_id = rsrc.resource_id
|
remote_stack_id = rsrc.resource_id
|
||||||
error = self.assertRaises(exception.ResourceFailure,
|
error = self.assertRaises(exception.ResourceFailure,
|
||||||
scheduler.TaskRunner(rsrc.delete))
|
scheduler.TaskRunner(rsrc.delete))
|
||||||
error_msg = 'ResourceInError: Went to status DELETE_FAILED due to '\
|
error_msg = ('ResourceInError: Went to status DELETE_FAILED due to '
|
||||||
'"Remote stack deletion failed"'
|
'"Remote stack deletion failed"')
|
||||||
self.assertIn(error_msg, six.text_type(error))
|
self.assertIn(error_msg, six.text_type(error))
|
||||||
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
|
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
|
||||||
self.heat.stacks.delete.assert_called_with(stack_id=remote_stack_id)
|
self.heat.stacks.delete.assert_called_with(stack_id=remote_stack_id)
|
||||||
|
@ -631,7 +631,7 @@ class ResourceGroupTest(common.HeatTestCase):
|
|||||||
resg = resource_group.ResourceGroup('test', snip, stack)
|
resg = resource_group.ResourceGroup('test', snip, stack)
|
||||||
exc = self.assertRaises(exception.StackValidationFailed,
|
exc = self.assertRaises(exception.StackValidationFailed,
|
||||||
resg.validate)
|
resg.validate)
|
||||||
errstr = 'removal_policies "\'notallowed\'" is not a list'
|
errstr = "removal_policies \"'notallowed'\" is not a list"
|
||||||
self.assertIn(errstr, six.text_type(exc))
|
self.assertIn(errstr, six.text_type(exc))
|
||||||
|
|
||||||
def test_invalid_removal_policies_nomap(self):
|
def test_invalid_removal_policies_nomap(self):
|
||||||
|
@ -167,14 +167,14 @@ class ServersTest(common.HeatTestCase):
|
|||||||
image_id=None):
|
image_id=None):
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['image'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
image_id or 'CentOS 5.2'
|
'image'] = image_id or 'CentOS 5.2'
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['flavor'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
'256 MB Server'
|
'flavor'] = '256 MB Server'
|
||||||
|
|
||||||
if server_name is not None:
|
if server_name is not None:
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['name'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
server_name
|
'name'] = server_name
|
||||||
|
|
||||||
return tmpl, stack
|
return tmpl, stack
|
||||||
|
|
||||||
@ -235,8 +235,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
imageId_input).MultipleTimes().AndReturn(imageId)
|
imageId_input).MultipleTimes().AndReturn(imageId)
|
||||||
|
|
||||||
if server_rebuild:
|
if server_rebuild:
|
||||||
glance.GlanceClientPlugin.get_image_id('F17-x86_64-gold').\
|
glance.GlanceClientPlugin.get_image_id(
|
||||||
MultipleTimes().AndReturn(744)
|
'F17-x86_64-gold').MultipleTimes().AndReturn(744)
|
||||||
|
|
||||||
def _mock_get_image_id_fail(self, image_id, exp):
|
def _mock_get_image_id_fail(self, image_id, exp):
|
||||||
self.m.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
self.m.StubOutWithMock(glance.GlanceClientPlugin, 'get_image_id')
|
||||||
@ -244,8 +244,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
|
|
||||||
def _mock_get_keypair_success(self, keypair_input, keypair):
|
def _mock_get_keypair_success(self, keypair_input, keypair):
|
||||||
self.m.StubOutWithMock(nova.NovaClientPlugin, 'get_keypair')
|
self.m.StubOutWithMock(nova.NovaClientPlugin, 'get_keypair')
|
||||||
nova.NovaClientPlugin.get_keypair(keypair_input).MultipleTimes().\
|
nova.NovaClientPlugin.get_keypair(
|
||||||
AndReturn(keypair)
|
keypair_input).MultipleTimes().AndReturn(keypair)
|
||||||
|
|
||||||
def _server_validate_mock(self, server):
|
def _server_validate_mock(self, server):
|
||||||
self.m.StubOutWithMock(nova.NovaClientPlugin, '_create')
|
self.m.StubOutWithMock(nova.NovaClientPlugin, '_create')
|
||||||
@ -331,8 +331,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'create_metadata_test_stack'
|
stack_name = 'create_metadata_test_stack'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl['Resources']['WebServer']['Properties']['metadata'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
{'a': 1}
|
'metadata'] = {'a': 1}
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('create_metadata_test_server',
|
server = servers.Server('create_metadata_test_server',
|
||||||
resource_defns['WebServer'], stack)
|
resource_defns['WebServer'], stack)
|
||||||
@ -426,9 +426,9 @@ class ServersTest(common.HeatTestCase):
|
|||||||
create = scheduler.TaskRunner(server.create)
|
create = scheduler.TaskRunner(server.create)
|
||||||
error = self.assertRaises(exception.ResourceFailure, create)
|
error = self.assertRaises(exception.ResourceFailure, create)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'StackValidationFailed: Property error : WebServer: '
|
"StackValidationFailed: Property error : WebServer: "
|
||||||
'image Error validating value \'Slackware\': '
|
"image Error validating value 'Slackware': "
|
||||||
'The Image (Slackware) could not be found.',
|
"The Image (Slackware) could not be found.",
|
||||||
six.text_type(error))
|
six.text_type(error))
|
||||||
|
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
@ -477,9 +477,9 @@ class ServersTest(common.HeatTestCase):
|
|||||||
create = scheduler.TaskRunner(server.create)
|
create = scheduler.TaskRunner(server.create)
|
||||||
error = self.assertRaises(exception.ResourceFailure, create)
|
error = self.assertRaises(exception.ResourceFailure, create)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'StackValidationFailed: Property error : WebServer: '
|
"StackValidationFailed: Property error : WebServer: "
|
||||||
'image Error validating value \'1\': '
|
"image Error validating value '1': "
|
||||||
'The Image (1) could not be found.',
|
"The Image (1) could not be found.",
|
||||||
six.text_type(error))
|
six.text_type(error))
|
||||||
|
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
@ -524,8 +524,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'raw_userdata_s'
|
stack_name = 'raw_userdata_s'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data_format'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
'RAW'
|
'user_data_format'] = 'RAW'
|
||||||
|
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('WebServer',
|
server = servers.Server('WebServer',
|
||||||
@ -556,10 +556,10 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'raw_userdata_s'
|
stack_name = 'raw_userdata_s'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data_format'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
'RAW'
|
'user_data_format'] = 'RAW'
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
'8c813873-f6ee-4809-8eec-959ef39acb55'
|
'user_data'] = '8c813873-f6ee-4809-8eec-959ef39acb55'
|
||||||
|
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('WebServer',
|
server = servers.Server('WebServer',
|
||||||
@ -597,8 +597,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
sc_id = '8c813873-f6ee-4809-8eec-959ef39acb55'
|
sc_id = '8c813873-f6ee-4809-8eec-959ef39acb55'
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data_format'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
'RAW'
|
'user_data_format'] = 'RAW'
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data'] = sc_id
|
tmpl['Resources']['WebServer']['Properties']['user_data'] = sc_id
|
||||||
|
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
@ -635,8 +635,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'software_config_s'
|
stack_name = 'software_config_s'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl['Resources']['WebServer']['Properties']['user_data_format'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
'SOFTWARE_CONFIG'
|
'user_data_format'] = 'SOFTWARE_CONFIG'
|
||||||
|
|
||||||
stack.stack_user_project_id = '8888'
|
stack.stack_user_project_id = '8888'
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
@ -1006,8 +1006,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
error = self.assertRaises(exception.StackValidationFailed,
|
error = self.assertRaises(exception.StackValidationFailed,
|
||||||
server.validate)
|
server.validate)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'Property error : WebServer: key_name Error validating '
|
"Property error : WebServer: key_name Error validating "
|
||||||
'value \'test2\': The Key (test2) could not be found.',
|
"value 'test2': The Key (test2) could not be found.",
|
||||||
six.text_type(error))
|
six.text_type(error))
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
@ -1049,8 +1049,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
|
|
||||||
tmpl['Resources']['WebServer']['Properties']['networks'] = [
|
tmpl['Resources']['WebServer']['Properties']['networks'] = [
|
||||||
{'port': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}]
|
{'port': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}]
|
||||||
tmpl['Resources']['WebServer']['Properties']['security_groups'] = \
|
tmpl['Resources']['WebServer']['Properties'][
|
||||||
['my_security_group']
|
'security_groups'] = ['my_security_group']
|
||||||
|
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('server_validate_net_security_groups',
|
server = servers.Server('server_validate_net_security_groups',
|
||||||
@ -1988,8 +1988,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
|
|
||||||
ex = self.assertRaises(exception.StackValidationFailed,
|
ex = self.assertRaises(exception.StackValidationFailed,
|
||||||
server.validate)
|
server.validate)
|
||||||
msg = 'Either volume_id or snapshot_id must be specified for device' +\
|
msg = ("Either volume_id or snapshot_id must be specified "
|
||||||
' mapping vdb'
|
"for device mapping vdb")
|
||||||
self.assertEqual(msg, six.text_type(ex))
|
self.assertEqual(msg, six.text_type(ex))
|
||||||
|
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
@ -2012,8 +2012,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
|
|
||||||
ex = self.assertRaises(exception.StackValidationFailed,
|
ex = self.assertRaises(exception.StackValidationFailed,
|
||||||
server.validate)
|
server.validate)
|
||||||
msg = 'Neither image nor bootable volume is specified for instance %s'\
|
msg = ('Neither image nor bootable volume is specified '
|
||||||
% server.name
|
'for instance %s' % server.name)
|
||||||
self.assertEqual(msg, six.text_type(ex))
|
self.assertEqual(msg, six.text_type(ex))
|
||||||
|
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
@ -2069,8 +2069,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'srv_val'
|
stack_name = 'srv_val'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['personality'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
{"/fake/path1": "fake contents1",
|
'personality'] = {"/fake/path1": "fake contents1",
|
||||||
"/fake/path2": "fake_contents2",
|
"/fake/path2": "fake_contents2",
|
||||||
"/fake/path3": "fake_contents3",
|
"/fake/path3": "fake_contents3",
|
||||||
"/fake/path4": "fake_contents4",
|
"/fake/path4": "fake_contents4",
|
||||||
@ -2098,8 +2098,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'srv_val'
|
stack_name = 'srv_val'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['personality'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
{"/fake/path1": "fake contents1",
|
'personality'] = {"/fake/path1": "fake contents1",
|
||||||
"/fake/path2": "fake_contents2",
|
"/fake/path2": "fake_contents2",
|
||||||
"/fake/path3": "fake_contents3",
|
"/fake/path3": "fake_contents3",
|
||||||
"/fake/path4": "fake_contents4",
|
"/fake/path4": "fake_contents4",
|
||||||
@ -2123,8 +2123,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'srv_val'
|
stack_name = 'srv_val'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['personality'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
{"/fake/path1": "a" * 10240}
|
'personality'] = {"/fake/path1": "a" * 10240}
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('server_create_image_err',
|
server = servers.Server('server_create_image_err',
|
||||||
resource_defns['WebServer'], stack)
|
resource_defns['WebServer'], stack)
|
||||||
@ -2144,8 +2144,8 @@ class ServersTest(common.HeatTestCase):
|
|||||||
stack_name = 'srv_val'
|
stack_name = 'srv_val'
|
||||||
(tmpl, stack) = self._setup_test_stack(stack_name)
|
(tmpl, stack) = self._setup_test_stack(stack_name)
|
||||||
|
|
||||||
tmpl.t['Resources']['WebServer']['Properties']['personality'] = \
|
tmpl.t['Resources']['WebServer']['Properties'][
|
||||||
{"/fake/path1": "a" * 10241}
|
'personality'] = {"/fake/path1": "a" * 10241}
|
||||||
resource_defns = tmpl.resource_definitions(stack)
|
resource_defns = tmpl.resource_definitions(stack)
|
||||||
server = servers.Server('server_create_image_err',
|
server = servers.Server('server_create_image_err',
|
||||||
resource_defns['WebServer'], stack)
|
resource_defns['WebServer'], stack)
|
||||||
@ -2160,9 +2160,9 @@ class ServersTest(common.HeatTestCase):
|
|||||||
|
|
||||||
exc = self.assertRaises(exception.StackValidationFailed,
|
exc = self.assertRaises(exception.StackValidationFailed,
|
||||||
server.validate)
|
server.validate)
|
||||||
self.assertEqual("The contents of personality file \"/fake/path1\" "
|
self.assertEqual('The contents of personality file "/fake/path1" '
|
||||||
"is larger than the maximum allowed personality "
|
'is larger than the maximum allowed personality '
|
||||||
"file size (10240 bytes).", six.text_type(exc))
|
'file size (10240 bytes).', six.text_type(exc))
|
||||||
self.m.VerifyAll()
|
self.m.VerifyAll()
|
||||||
|
|
||||||
def test_resolve_attribute_server_not_found(self):
|
def test_resolve_attribute_server_not_found(self):
|
||||||
|
@ -89,8 +89,8 @@ class SoftwareComponentTest(common.HeatTestCase):
|
|||||||
self.component.resource_id = None
|
self.component.resource_id = None
|
||||||
self.assertIsNone(self.component._resolve_attribute('configs'))
|
self.assertIsNone(self.component._resolve_attribute('configs'))
|
||||||
self.component.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
|
self.component.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
|
||||||
configs = self.\
|
configs = self.template['resources']['mysql_component'
|
||||||
template['resources']['mysql_component']['properties']['configs']
|
]['properties']['configs']
|
||||||
# configs list is stored in 'config' property of SoftwareConfig
|
# configs list is stored in 'config' property of SoftwareConfig
|
||||||
value = {'config': {'configs': configs}}
|
value = {'config': {'configs': configs}}
|
||||||
self.rpc_client.show_software_config.return_value = value
|
self.rpc_client.show_software_config.return_value = value
|
||||||
|
@ -125,8 +125,8 @@ class SqlAlchemyTest(common.HeatTestCase):
|
|||||||
userdata=mox.IgnoreArg(), scheduler_hints=None,
|
userdata=mox.IgnoreArg(), scheduler_hints=None,
|
||||||
meta=None, nics=None,
|
meta=None, nics=None,
|
||||||
availability_zone=None,
|
availability_zone=None,
|
||||||
block_device_mapping=None).MultipleTimes().\
|
block_device_mapping=None
|
||||||
AndReturn(fc.servers.list()[4])
|
).MultipleTimes().AndReturn(fc.servers.list()[4])
|
||||||
return fc
|
return fc
|
||||||
|
|
||||||
def _mock_delete(self, mocks):
|
def _mock_delete(self, mocks):
|
||||||
|
@ -303,8 +303,8 @@ class StackResourceTest(common.HeatTestCase):
|
|||||||
'test',
|
'test',
|
||||||
resource_defns[self.ws_resname],
|
resource_defns[self.ws_resname],
|
||||||
self.parent_stack)
|
self.parent_stack)
|
||||||
stk_resource.child_template = \
|
stk_resource.child_template = mock.Mock(
|
||||||
mock.Mock(return_value=templatem.Template(self.simple_template))
|
return_value=templatem.Template(self.simple_template))
|
||||||
stk_resource.child_params = mock.Mock()
|
stk_resource.child_params = mock.Mock()
|
||||||
exc = exception.RequestLimitExceeded(message='Validation Failed')
|
exc = exception.RequestLimitExceeded(message='Validation Failed')
|
||||||
validation_mock = mock.Mock(side_effect=exc)
|
validation_mock = mock.Mock(side_effect=exc)
|
||||||
@ -320,8 +320,8 @@ class StackResourceTest(common.HeatTestCase):
|
|||||||
'test',
|
'test',
|
||||||
resource_defns[self.ws_resname],
|
resource_defns[self.ws_resname],
|
||||||
self.parent_stack)
|
self.parent_stack)
|
||||||
stk_resource.child_template = \
|
stk_resource.child_template = mock.Mock(
|
||||||
mock.Mock(return_value=self.simple_template)
|
return_value=self.simple_template)
|
||||||
stk_resource.child_params = mock.Mock()
|
stk_resource.child_params = mock.Mock()
|
||||||
exc = exception.RequestLimitExceeded(message='Validation Failed')
|
exc = exception.RequestLimitExceeded(message='Validation Failed')
|
||||||
validation_mock = mock.Mock(side_effect=exc)
|
validation_mock = mock.Mock(side_effect=exc)
|
||||||
|
@ -735,10 +735,10 @@ class SwiftSignalTest(common.HeatTestCase):
|
|||||||
|
|
||||||
st.create()
|
st.create()
|
||||||
self.assertEqual(('CREATE', 'COMPLETE'), st.state)
|
self.assertEqual(('CREATE', 'COMPLETE'), st.state)
|
||||||
expected = ('curl -i -X PUT \'http://fake-host.com:8080/v1/'
|
expected = ("curl -i -X PUT 'http://fake-host.com:8080/v1/"
|
||||||
'AUTH_test_tenant/%s/test_st-test_wait_condition_'
|
"AUTH_test_tenant/%s/test_st-test_wait_condition_"
|
||||||
'handle-abcdefghijkl\?temp_url_sig=[0-9a-f]{40}&'
|
"handle-abcdefghijkl\?temp_url_sig=[0-9a-f]{40}&"
|
||||||
'temp_url_expires=[0-9]{10}\'') % st.id
|
"temp_url_expires=[0-9]{10}'") % st.id
|
||||||
self.assertThat(handle.FnGetAtt('curl_cli'),
|
self.assertThat(handle.FnGetAtt('curl_cli'),
|
||||||
matchers.MatchesRegex(expected))
|
matchers.MatchesRegex(expected))
|
||||||
|
|
||||||
|
@ -68,10 +68,7 @@ test_template_ref = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -109,10 +106,7 @@ test_template_findinmap_valid = '''
|
|||||||
"Description" : "test.",
|
"Description" : "test.",
|
||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2 KeyPair to' + \
|
|
||||||
'enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -152,10 +146,8 @@ test_template_findinmap_invalid = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2 KeyPair to enable SSH ' + \
|
"Type" : "String"
|
||||||
'access to the instances",' + \
|
|
||||||
''' "Type" : "String"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -180,12 +172,13 @@ test_template_findinmap_invalid = '''
|
|||||||
"WikiDatabase": {
|
"WikiDatabase": {
|
||||||
"Type": "AWS::EC2::Instance",
|
"Type": "AWS::EC2::Instance",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
''' + \
|
"ImageId" : {
|
||||||
'"ImageId" : { "Fn::FindInMap" : [ "DistroArch2AMI", { "Ref" : ' + \
|
"Fn::FindInMap" : [
|
||||||
'"LinuxDistribution" },' + \
|
"DistroArch2AMI", { "Ref" : "LinuxDistribution" },
|
||||||
'{ "Fn::FindInMap" : [ "AWSInstanceType2Arch", { "Ref" : ' + \
|
{ "Fn::FindInMap" : [
|
||||||
'"InstanceType" }, "Arch" ] } ] },' + \
|
"AWSInstanceType2Arch",
|
||||||
'''
|
{ "Ref" : "InstanceType" }, "Arch" ] } ]
|
||||||
|
},
|
||||||
"InstanceType": "m1.large",
|
"InstanceType": "m1.large",
|
||||||
"KeyName": { "Ref" : "KeyName"}
|
"KeyName": { "Ref" : "KeyName"}
|
||||||
}
|
}
|
||||||
@ -242,10 +235,7 @@ test_template_invalid_property = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2 KeyPai",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -271,10 +261,7 @@ test_template_unimplemented_property = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -300,10 +287,7 @@ test_template_invalid_deletion_policy = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -329,10 +313,7 @@ test_template_snapshot_deletion_policy = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -375,10 +356,7 @@ test_unregistered_key = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -403,10 +381,7 @@ test_template_image = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -431,10 +406,7 @@ test_template_invalid_secgroups = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -461,10 +433,7 @@ test_template_invalid_secgroupids = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -508,18 +477,12 @@ test_template_unique_logical_name = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
"Type" : "String"
|
||||||
},
|
},
|
||||||
"AName" : {
|
"AName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
"Type" : "String",
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -544,13 +507,10 @@ test_template_cfn_parameter_label = '''
|
|||||||
"Parameters" : {
|
"Parameters" : {
|
||||||
|
|
||||||
"KeyName" : {
|
"KeyName" : {
|
||||||
''' + \
|
"Description" : "Name of an existing EC2KeyPair",
|
||||||
'"Description" : "Name of an existing EC2' + \
|
|
||||||
'KeyPair to enable SSH access to the instances",' + \
|
|
||||||
'''
|
|
||||||
"Type" : "String",
|
"Type" : "String",
|
||||||
"Label" : "Nova KeyPair Name"
|
"Label" : "Nova KeyPair Name"
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
"Resources" : {
|
"Resources" : {
|
||||||
@ -930,8 +890,7 @@ class validateTest(common.HeatTestCase):
|
|||||||
# API layer in heat.engine.api.format_validate_parameter.
|
# API layer in heat.engine.api.format_validate_parameter.
|
||||||
expected = {'KeyName': {
|
expected = {'KeyName': {
|
||||||
'Type': 'String',
|
'Type': 'String',
|
||||||
'Description': 'Name of an existing EC2KeyPair to enable SSH '
|
'Description': 'Name of an existing EC2KeyPair',
|
||||||
'access to the instances',
|
|
||||||
'NoEcho': 'false',
|
'NoEcho': 'false',
|
||||||
'Label': 'KeyName'}}
|
'Label': 'KeyName'}}
|
||||||
self.assertEqual(expected, res['Parameters'])
|
self.assertEqual(expected, res['Parameters'])
|
||||||
@ -1002,8 +961,7 @@ class validateTest(common.HeatTestCase):
|
|||||||
|
|
||||||
expected = {'KeyName': {
|
expected = {'KeyName': {
|
||||||
'Type': 'String',
|
'Type': 'String',
|
||||||
'Description': 'Name of an existing EC2KeyPair to enable SSH '
|
'Description': 'Name of an existing EC2KeyPair',
|
||||||
'access to the instances',
|
|
||||||
'NoEcho': 'false',
|
'NoEcho': 'false',
|
||||||
'Label': 'Nova KeyPair Name'}}
|
'Label': 'Nova KeyPair Name'}}
|
||||||
self.assertEqual(expected, parameters)
|
self.assertEqual(expected, parameters)
|
||||||
|
@ -106,8 +106,9 @@ class VersionNegotiationMiddlewareTest(common.HeatTestCase):
|
|||||||
major_version = 1
|
major_version = 1
|
||||||
minor_version = 0
|
minor_version = 0
|
||||||
request = webob.Request({'PATH_INFO': 'resource'})
|
request = webob.Request({'PATH_INFO': 'resource'})
|
||||||
request.headers['Accept'] = 'application/vnd.openstack.' \
|
request.headers['Accept'] = (
|
||||||
'orchestration-v{0}.{1}'.format(major_version, minor_version)
|
'application/vnd.openstack.orchestration-v{0}.{1}'.format(
|
||||||
|
major_version, minor_version))
|
||||||
|
|
||||||
response = version_negotiation.process_request(request)
|
response = version_negotiation.process_request(request)
|
||||||
|
|
||||||
@ -119,8 +120,8 @@ class VersionNegotiationMiddlewareTest(common.HeatTestCase):
|
|||||||
version_negotiation = vn.VersionNegotiationFilter(
|
version_negotiation = vn.VersionNegotiationFilter(
|
||||||
self._version_controller_factory, None, None)
|
self._version_controller_factory, None, None)
|
||||||
request = webob.Request({'PATH_INFO': 'resource'})
|
request = webob.Request({'PATH_INFO': 'resource'})
|
||||||
request.headers['Accept'] = 'application/vnd.openstack.' \
|
request.headers['Accept'] = (
|
||||||
'orchestration-v2.0'
|
'application/vnd.openstack.orchestration-v2.0')
|
||||||
|
|
||||||
response = version_negotiation.process_request(request)
|
response = version_negotiation.process_request(request)
|
||||||
|
|
||||||
|
@ -75,8 +75,8 @@ class WatchRuleTest(common.HeatTestCase):
|
|||||||
if action_expected:
|
if action_expected:
|
||||||
dummy_action = DummyAction()
|
dummy_action = DummyAction()
|
||||||
self.m.StubOutWithMock(parser.Stack, 'resource_by_refid')
|
self.m.StubOutWithMock(parser.Stack, 'resource_by_refid')
|
||||||
parser.Stack.resource_by_refid(mox.IgnoreArg()).\
|
parser.Stack.resource_by_refid(
|
||||||
MultipleTimes().AndReturn(dummy_action)
|
mox.IgnoreArg()).MultipleTimes().AndReturn(dummy_action)
|
||||||
|
|
||||||
self.m.ReplayAll()
|
self.m.ReplayAll()
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ class JSONRequestDeserializerTest(common.HeatTestCase):
|
|||||||
error = self.assertRaises(exception.RequestLimitExceeded,
|
error = self.assertRaises(exception.RequestLimitExceeded,
|
||||||
wsgi.JSONRequestDeserializer().from_json,
|
wsgi.JSONRequestDeserializer().from_json,
|
||||||
body)
|
body)
|
||||||
msg = 'Request limit exceeded: JSON body size ' + \
|
msg = ('Request limit exceeded: JSON body size '
|
||||||
'(%s bytes) exceeds maximum allowed size (%s bytes).' % \
|
'(%s bytes) exceeds maximum allowed size (%s bytes).' % (
|
||||||
(len(body), cfg.CONF.max_json_body_size)
|
len(body), cfg.CONF.max_json_body_size))
|
||||||
self.assertEqual(msg, six.text_type(error))
|
self.assertEqual(msg, six.text_type(error))
|
||||||
|
3
tox.ini
3
tox.ini
@ -55,8 +55,7 @@ commands = oslo-config-generator --config-file=config-generator.conf
|
|||||||
# H404 multi line docstring should start with a summary
|
# H404 multi line docstring should start with a summary
|
||||||
# H405 multi line docstring summary not separated with an empty line
|
# H405 multi line docstring summary not separated with an empty line
|
||||||
# H803 no full stop at the end of the commit message
|
# H803 no full stop at the end of the commit message
|
||||||
# H904 Wrap long lines in parentheses instead of a backslash
|
ignore = H404,H405,H803
|
||||||
ignore = H404,H405,H803,H904
|
|
||||||
show-source = true
|
show-source = true
|
||||||
exclude=.venv,.git,.tox,dist,*openstack/common*,*lib/python*,*egg,tools,build
|
exclude=.venv,.git,.tox,dist,*openstack/common*,*lib/python*,*egg,tools,build
|
||||||
max-complexity=20
|
max-complexity=20
|
||||||
|
Loading…
Reference in New Issue
Block a user