merge latestest downstream Changes
Change-Id: Ic5abb5998a32cd04eba38752563d273f309692bb
This commit is contained in:
parent
5b34b259f7
commit
208555554a
99
bandit.yaml
Normal file
99
bandit.yaml
Normal file
@ -0,0 +1,99 @@
|
||||
### Bandit config file generated from:
|
||||
# '/usr/local/bin/bandit-config-generator -o bandit.yaml'
|
||||
|
||||
### This config may optionally select a subset of tests to run or skip by
|
||||
### filling out the 'tests' and 'skips' lists given below. If no tests are
|
||||
### specified for inclusion then it is assumed all tests are desired. The skips
|
||||
|
||||
### Bandit config file generated from:
|
||||
# '/usr/local/bin/bandit-config-generator -o bandit.yaml'
|
||||
|
||||
### This config may optionally select a subset of tests to run or skip by
|
||||
### filling out the 'tests' and 'skips' lists given below. If no tests are
|
||||
### specified for inclusion then it is assumed all tests are desired. The skips
|
||||
### set will remove specific tests from the include set. This can be controlled
|
||||
### using the -t/-s CLI options. Note that the same test ID should not appear
|
||||
### in both 'tests' and 'skips', this would be nonsensical and is detected by
|
||||
### Bandit at runtime.
|
||||
|
||||
# Available tests:
|
||||
# B101 : assert_used
|
||||
# B102 : exec_used
|
||||
# B103 : set_bad_file_permissions
|
||||
# B104 : hardcoded_bind_all_interfaces
|
||||
# B105 : hardcoded_password_string
|
||||
# B106 : hardcoded_password_funcarg
|
||||
# B107 : hardcoded_password_default
|
||||
# B108 : hardcoded_tmp_directory
|
||||
# B109 : password_config_option_not_marked_secret
|
||||
# B110 : try_except_pass
|
||||
# B111 : execute_with_run_as_root_equals_true
|
||||
# B112 : try_except_continue
|
||||
# B201 : flask_debug_true
|
||||
# B301 : pickle
|
||||
# B302 : marshal
|
||||
# B303 : md5
|
||||
# B304 : ciphers
|
||||
# B305 : cipher_modes
|
||||
# B306 : mktemp_q
|
||||
# B307 : eval
|
||||
# B308 : mark_safe
|
||||
# B309 : httpsconnection
|
||||
# B310 : urllib_urlopen
|
||||
# B311 : random
|
||||
# B312 : telnetlib
|
||||
# B313 : xml_bad_cElementTree
|
||||
# B314 : xml_bad_ElementTree
|
||||
# B315 : xml_bad_expatreader
|
||||
# B316 : xml_bad_expatbuilder
|
||||
# B317 : xml_bad_sax
|
||||
# B318 : xml_bad_minidom
|
||||
# B319 : xml_bad_pulldom
|
||||
# B320 : xml_bad_etree
|
||||
# B321 : ftplib
|
||||
# B322 : input
|
||||
# B401 : import_telnetlib
|
||||
# B402 : import_ftplib
|
||||
# B403 : import_pickle
|
||||
# B404 : import_subprocess
|
||||
# B405 : import_xml_etree
|
||||
# B406 : import_xml_sax
|
||||
# B407 : import_xml_expat
|
||||
# B408 : import_xml_minidom
|
||||
# B409 : import_xml_pulldom
|
||||
# B410 : import_lxml
|
||||
# B411 : import_xmlrpclib
|
||||
# B412 : import_httpoxy
|
||||
# B501 : request_with_no_cert_validation
|
||||
# B502 : ssl_with_bad_version
|
||||
# B503 : ssl_with_bad_defaults
|
||||
# B504 : ssl_with_no_version
|
||||
# B505 : weak_cryptographic_key
|
||||
# B506 : yaml_load
|
||||
# B601 : paramiko_calls
|
||||
# B602 : subprocess_popen_with_shell_equals_true
|
||||
# B603 : subprocess_without_shell_equals_true
|
||||
# B604 : any_other_function_with_shell_equals_true
|
||||
# B605 : start_process_with_a_shell
|
||||
# B606 : start_process_with_no_shell
|
||||
# B607 : start_process_with_partial_path
|
||||
# B608 : hardcoded_sql_expressions
|
||||
# B609 : linux_commands_wildcard_injection
|
||||
# B701 : jinja2_autoescape_false
|
||||
# B702 : use_of_mako_templates
|
||||
|
||||
# (optional) list included test IDs here, eg '[B101, B406]':
|
||||
tests:
|
||||
|
||||
# (optional) list skipped test IDs here, eg '[B101, B406]':
|
||||
skips:
|
||||
|
||||
# globs of files which should be analyzed
|
||||
include:
|
||||
- '*.py'
|
||||
- '*.pyw'
|
||||
|
||||
# a list of strings, which if found in the path will cause files to be excluded
|
||||
# for example /tests/ - will exclude all files in test folder.
|
||||
exclude_dirs:
|
||||
- '/tests/'
|
@ -1,4 +1,3 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -20,26 +19,23 @@ from keystoneclient.v2_0 import client as keystone_v2
|
||||
from keystoneclient.v3 import client as keystone_v3
|
||||
from oslo_config import cfg
|
||||
|
||||
from ord.common import exceptions as exc
|
||||
from ord.openstack.common import log as logging
|
||||
|
||||
|
||||
# FIXME: we definetly must change this group name. It very confusing.
|
||||
# FIXME(db2242): we definetly must change this group name. It very confusing.
|
||||
OPT_GROUP = cfg.OptGroup(name='ord_credentials', title='ORD Credentials')
|
||||
SERVICE_OPTS = [
|
||||
cfg.StrOpt('project_id', default='',
|
||||
help="project id used by ranger-agent "
|
||||
"driver of service vm extension"),
|
||||
help="project id used by nova driver of service vm extension"),
|
||||
cfg.StrOpt('auth_url', default='http://0.0.0.0:5000/v2.0',
|
||||
help="auth URL used by ranger-agent "
|
||||
"driver of service vm extension"),
|
||||
help="auth URL used by nova driver of service vm extension"),
|
||||
cfg.StrOpt('user_name', default='',
|
||||
help="user name used by ranger-agent "
|
||||
"driver of service vm extension"),
|
||||
cfg.StrOpt('password', default='',
|
||||
help="password used by ranger-agent "
|
||||
"driver of service vm extension"),
|
||||
help="user name used by nova driver of service vm extension"),
|
||||
cfg.StrOpt('password', default='', secret=True,
|
||||
help="password used by nova driver of service vm extension"),
|
||||
cfg.StrOpt('tenant_name', default='',
|
||||
help="tenant name used by ranger-agent driver of service vm "
|
||||
help="tenant name used by nova driver of service vm "
|
||||
"extension"),
|
||||
cfg.FloatOpt("openstack_client_http_timeout", default=180.0,
|
||||
help="HTTP timeout for any of OpenStack service in seconds"),
|
||||
@ -103,9 +99,14 @@ class Clients(object):
|
||||
params['tenant_id'] = CONF.project_id
|
||||
else:
|
||||
params['tenant_name'] = CONF.tenant_name
|
||||
client = create_keystone_client(params)
|
||||
if client.auth_ref is None:
|
||||
client.authenticate()
|
||||
try:
|
||||
client = create_keystone_client(params)
|
||||
if client.auth_ref is None:
|
||||
client.authenticate()
|
||||
except Exception as e:
|
||||
LOG.critical("Failed to initialize Keystone %s ", e)
|
||||
raise exc.KeystoneInitializationException(e.message)
|
||||
|
||||
return client
|
||||
|
||||
@cached
|
||||
@ -129,9 +130,17 @@ class Clients(object):
|
||||
insecure=CONF.https_insecure,
|
||||
cacert=CONF.https_cacert)
|
||||
return client, kc
|
||||
except Exception:
|
||||
kc = self.keystone()
|
||||
attempt = attempt - 1
|
||||
except Exception as ex:
|
||||
try:
|
||||
kc = self.keystone()
|
||||
except Exception as e:
|
||||
LOG.critical("Failed to initialize Keystone %s ", e)
|
||||
raise exc.KeystoneInitializationException(e.message)
|
||||
if attempt >= 0:
|
||||
attempt = attempt - 1
|
||||
else:
|
||||
LOG.critical("Failed to initialize Heat Client %s ", ex)
|
||||
raise exc.HEATIntegrationError(ex.message)
|
||||
|
||||
@cached
|
||||
def glance(self, kc, version='2'):
|
||||
@ -154,6 +163,14 @@ class Clients(object):
|
||||
insecure=CONF.https_insecure,
|
||||
cacert=CONF.https_cacert)
|
||||
return client, kc
|
||||
except Exception:
|
||||
kc = self.keystone()
|
||||
attempt = attempt - 1
|
||||
except Exception as ex:
|
||||
try:
|
||||
kc = self.keystone()
|
||||
except Exception as e:
|
||||
LOG.critical("Failed to initialize Keystone %s ", e)
|
||||
raise exc.KeystoneInitializationException(e.message)
|
||||
if attempt >= 0:
|
||||
attempt = attempt - 1
|
||||
else:
|
||||
LOG.critical("Failed to initialize Client Client %s ", ex)
|
||||
raise exc.HEATIntegrationError(ex.message)
|
||||
|
@ -1,5 +1,4 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
# Copyright 2016 ATT
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
@ -59,29 +58,33 @@ class TemplateRepoClient(object):
|
||||
repopath = os.path.join(os.environ['HOME'], local_repo)
|
||||
|
||||
repo = cfg.CONF.orm.orm_template_repo_url
|
||||
LOG.debug(
|
||||
LOG.info(
|
||||
"%s Setting up repo initiated ...", os.path.basename(repo))
|
||||
|
||||
# create the git repo directory if not exists
|
||||
if not os.path.isdir(repopath):
|
||||
os.makedirs(repopath)
|
||||
|
||||
# initialize repo directory as a git repo
|
||||
cmd = 'git init {0}'.format(repopath)
|
||||
self.run_git('GitRepoInit', cmd)
|
||||
|
||||
# set remote origin
|
||||
cmd = 'git -C {0} remote add origin {1}'.format(
|
||||
repopath, repo)
|
||||
self.git_repo_status = self.run_git('GitRepoInit', cmd)
|
||||
|
||||
# fetch origin
|
||||
cmd = 'git -C {0} fetch origin'.format(
|
||||
repopath)
|
||||
self.git_repo_status = self.run_git('GitRepoInit', cmd)
|
||||
|
||||
LOG.debug(
|
||||
"%s repo setup successfully", os.path.basename(repo))
|
||||
try:
|
||||
# initialize repo directory as a git repo
|
||||
cmd = 'git init {0}'.format(repopath)
|
||||
self.run_git('GitRepoInit', cmd)
|
||||
try:
|
||||
# set remote origin
|
||||
cmd = 'git -C {0} remote add origin {1}'.format(
|
||||
repopath, repo)
|
||||
self.run_git('GitRepoInit', cmd)
|
||||
except Exception as repoexp:
|
||||
pass
|
||||
# fetch origin
|
||||
cmd = 'git -C {0} fetch origin'.format(
|
||||
repopath)
|
||||
self.run_git('GitRepoInit', cmd)
|
||||
except Exception as repoexp:
|
||||
self.git_repo_status = False
|
||||
LOG.critical("Failed to initialize Repo %s " % repoexp)
|
||||
LOG.info(
|
||||
"%s Setting up repo status (completed = %s)",
|
||||
os.path.basename(repo), self.git_repo_status)
|
||||
|
||||
def pull_template(self, local_repo, pathtotemplate):
|
||||
"""Get template from repo.
|
||||
@ -128,7 +131,7 @@ class TemplateRepoClient(object):
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
shlex.split(cmd), stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
shell=False, stderr=subprocess.PIPE)
|
||||
|
||||
[stdout, stderr] = process.communicate()
|
||||
|
||||
@ -153,19 +156,20 @@ class TemplateRepoClient(object):
|
||||
proc_result["stderr"] = stderr.decode("UTF-8")
|
||||
proc_result["timed_out"] = timed_out
|
||||
|
||||
if proc_result["returncode"] == 0 or \
|
||||
proc_result["returncode"] == 128:
|
||||
if proc_result["returncode"] == 0:
|
||||
retry_left = 0
|
||||
process.returncode = 0
|
||||
self.git_repo_status = True
|
||||
else:
|
||||
retry_left -= 1
|
||||
LOG.warning("stderr: %s", proc_result['stderr'])
|
||||
if 'remote origin already exists' in proc_result["stderr"]:
|
||||
retry_left = 0
|
||||
else:
|
||||
retry_left -= 1
|
||||
LOG.warning("stderr: %s", proc_result)
|
||||
LOG.warning("Retrying cmd '%s'. Retries left: %s",
|
||||
cmd, retry_left)
|
||||
|
||||
self.git_repo_status = True
|
||||
if process.returncode != 0:
|
||||
self.git_repo_status = False
|
||||
self.check_git_errors(label, proc_result)
|
||||
|
||||
def check_git_errors(self, label, result):
|
||||
@ -174,18 +178,21 @@ class TemplateRepoClient(object):
|
||||
if result['timed_out']:
|
||||
raise excp.RepoTimeoutException(label=label)
|
||||
|
||||
if 'service not known' in stderr:
|
||||
elif 'service not known' in stderr:
|
||||
raise excp.RepoIncorrectURL(label=label)
|
||||
|
||||
if 'does not exist' in stderr:
|
||||
elif 'does not exist' in stderr:
|
||||
raise excp.RepoNotExist(label=label)
|
||||
|
||||
if 'permission denied' in stderr:
|
||||
elif ('permission denied' in stderr) or ('No such remote' in stderr):
|
||||
raise excp.RepoNoPermission(label=label)
|
||||
|
||||
if 'did not match any file(s) known to git' in stderr:
|
||||
elif 'did not match any file(s) known to git' in stderr:
|
||||
raise excp.FileNotInRepo(label=label)
|
||||
|
||||
# general unknown exception in case none of the above
|
||||
# are the cause of the problem
|
||||
raise excp.RepoUnknownException(label=label, unknown=stderr)
|
||||
elif 'remote origin already exists' in stderr:
|
||||
pass
|
||||
else:
|
||||
# general unknown exception in case none of the above
|
||||
# are the cause of the problem
|
||||
raise excp.RepoUnknownException(label=label, unknown=stderr)
|
||||
|
@ -1,5 +1,4 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
# Copyright 2016 ATT
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
@ -13,10 +12,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ast
|
||||
from multiprocessing import Process
|
||||
import os
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
from ord.client import rpcengine
|
||||
from ord.engine.workerfactory import WorkerFactory
|
||||
from ord.openstack.common import log as logging
|
||||
@ -51,8 +52,7 @@ class QueueHandler(object):
|
||||
LOG.debug("\n Payload: %s \nctxt: %s "
|
||||
% (str(payload), str(ctxt)))
|
||||
LOG.debug("\n-------------------------------\n")
|
||||
|
||||
d = eval(payload)
|
||||
d = ast.literal_eval(payload)
|
||||
template_type = d["template_type"]
|
||||
resource_name = d["resource_name"]
|
||||
resource_type = d["resource_type"]
|
||||
|
@ -1,5 +1,4 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
# Copyright 2016 ATT
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
@ -16,7 +15,7 @@
|
||||
import itertools
|
||||
import json
|
||||
from oslo_config import cfg
|
||||
import random
|
||||
from random import SystemRandom
|
||||
import six
|
||||
import sys
|
||||
import threading
|
||||
@ -83,21 +82,16 @@ class WorkerFactory(object):
|
||||
str(WorkerFactory._client_initialize))
|
||||
WorkerThread._init_error = None
|
||||
try:
|
||||
try:
|
||||
WorkerThread._temp_repo_client = \
|
||||
getrepo.TemplateRepoClient(CONF.local_repo)
|
||||
except exc.RepoInitializationException as repoexp:
|
||||
LOG.critical("Failed to initialize Repo %s " % repoexp)
|
||||
WorkerThread._init_error = utils.ErrorCode.ORD_018.value
|
||||
try:
|
||||
WorkerThread._heat_client = heat.HeatClient()
|
||||
except exc.KeystoneInitializationException as kcexp:
|
||||
LOG.critical("Failed to initialize Keystone %s " % kcexp)
|
||||
WorkerThread._init_error = utils.ErrorCode.ORD_016.value
|
||||
|
||||
WorkerThread._temp_repo_client = \
|
||||
getrepo.TemplateRepoClient(CONF.local_repo)
|
||||
|
||||
WorkerThread._heat_client = heat.HeatClient()
|
||||
|
||||
try:
|
||||
WorkerThread._rpcengine = rpcengine.RpcEngine()
|
||||
except exc.RPCInitializationException as rpcexp:
|
||||
LOG.critical("Failed to initialize RPC %s " % rpcexp)
|
||||
LOG.critical("Failed to initialize RPC %s ", rpcexp)
|
||||
WorkerThread._init_error = utils.ErrorCode.ORD_019.value
|
||||
|
||||
except Exception as exception:
|
||||
@ -130,9 +124,10 @@ class WorkerFactory(object):
|
||||
template_type):
|
||||
template_type = template_type.lower()
|
||||
|
||||
# FIXME: this code have a none zero to fail in very unexpected
|
||||
# FIXME(db2242): this code have a none zero to fail in very unexpected
|
||||
# way
|
||||
threadID = random.randint(0, 99999999)
|
||||
randCrypt = SystemRandom()
|
||||
threadID = randCrypt.randint(1, 99999999)
|
||||
if template_type == "hot":
|
||||
miniWorker = WorkerThread(threadID, operation,
|
||||
path_to_tempate, stack_name,
|
||||
@ -142,7 +137,7 @@ class WorkerFactory(object):
|
||||
elif template_type == "ansible":
|
||||
threadID = -1
|
||||
else:
|
||||
# FIXME: too late for such check
|
||||
# FIXME(db2242): too late for such check
|
||||
raise exc.UnsupportedTemplateTypeError(template=template_type)
|
||||
return threadID
|
||||
|
||||
@ -169,8 +164,11 @@ class WorkerThread(threading.Thread):
|
||||
self.client_error = client_error
|
||||
|
||||
def extract_resource_extra_metadata(self, rds_payload, rds_status):
|
||||
resource_operation =\
|
||||
rds_payload.get('rds-listener')['resource-operation']
|
||||
if self.resource_type.lower() == 'image' \
|
||||
and rds_status == utils.STATUS_SUCCESS:
|
||||
and rds_status == utils.STATUS_SUCCESS\
|
||||
and resource_operation != utils.OPERATION_DELETE:
|
||||
stack = self._heat_client.get_stack_by_name(self.stack_name)
|
||||
image_data = self._heat_client.get_image_data_by_stackid(stack.id)
|
||||
if image_data:
|
||||
@ -250,24 +248,23 @@ class WorkerThread(threading.Thread):
|
||||
|
||||
try:
|
||||
self._wait_for_heat(stack, self.operation)
|
||||
except exc.StackOperationError:
|
||||
except exc.ORDException:
|
||||
_, e, _tb = sys.exc_info()
|
||||
if e.arguments['operation'] != utils.OPERATION_CREATE:
|
||||
raise
|
||||
if self.operation == utils.OPERATION_CREATE:
|
||||
|
||||
args = {}
|
||||
try:
|
||||
self._delete_stack()
|
||||
self._wait_for_heat(
|
||||
e.arguments['stack'], utils.OPERATION_DELETE)
|
||||
except exc.StackOperationError as e_rollback:
|
||||
args['rollback_error'] = e_rollback
|
||||
args['rollback_message'] = e_rollback.message
|
||||
args['rollback_status'] = False
|
||||
else:
|
||||
args['rollback_status'] = True
|
||||
args = {}
|
||||
try:
|
||||
stack = self._delete_stack()
|
||||
self._wait_for_heat(
|
||||
stack, utils.OPERATION_DELETE)
|
||||
except exc.ORDException as e_rollback:
|
||||
args['rollback_error'] = e_rollback
|
||||
args['rollback_message'] = e_rollback.message
|
||||
args['rollback_status'] = False
|
||||
else:
|
||||
args['rollback_status'] = True
|
||||
|
||||
raise exc.StackRollbackError(error=e, **args)
|
||||
raise
|
||||
|
||||
def _update_permanent_storage(self, error=None):
|
||||
args = {}
|
||||
@ -306,7 +303,8 @@ class WorkerThread(threading.Thread):
|
||||
|
||||
def _send_operation_results(self):
|
||||
rds_payload = self._prepare_rds_payload()
|
||||
res_ctxt = {'request-id': rds_payload.get('request-id')}
|
||||
res_ctxt = \
|
||||
{'request-id': rds_payload.get('rds-listener')['request-id']}
|
||||
LOG.debug("----- RPC API Payload to RDS %r", rds_payload)
|
||||
status_original = rds_payload.get('rds-listener')['status']
|
||||
|
||||
@ -314,7 +312,7 @@ class WorkerThread(threading.Thread):
|
||||
self.extract_resource_extra_metadata(rds_payload, status_original)
|
||||
except Exception as exception:
|
||||
LOG.error("Unexpected error collecting extra \
|
||||
Image Parameter %s" % exception)
|
||||
Image Parameter %s", exception)
|
||||
|
||||
max_range = int(CONF.orm.retry_limits)
|
||||
self._rpcengine. \
|
||||
@ -415,7 +413,18 @@ class WorkerThread(threading.Thread):
|
||||
status_check.status)
|
||||
|
||||
if status_check.is_fail:
|
||||
raise exc.StackOperationError(operation=operation, stack=stack)
|
||||
if operation == utils.OPERATION_CREATE:
|
||||
raise exc.HEATStackCreateError(
|
||||
details=stack.stack_status_reason)
|
||||
elif operation == utils.OPERATION_MODIFY:
|
||||
raise exc.HEATStackUpdateError(
|
||||
details=stack.stack_status_reason)
|
||||
elif operation == utils.OPERATION_DELETE:
|
||||
raise exc.HEATStackDeleteError(
|
||||
details=stack.stack_status_reason)
|
||||
else:
|
||||
raise exc.StackOperationError(
|
||||
operation=operation, stack=stack)
|
||||
elif status_check.is_in_progress:
|
||||
raise exc.StackTimeoutError(operation=operation, stack=stack)
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
# Copyright 2016 ATT
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
@ -27,6 +26,7 @@ from ord.tests import base
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
# FIXME(db2242): pep8 compatible - camelcase attributes
|
||||
class TestWorkerThread(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
@ -75,16 +75,18 @@ class TestWorkerThread(base.BaseTestCase):
|
||||
input_payload = {'rds-listener':
|
||||
{'request-id': '2',
|
||||
'resource-id': '1',
|
||||
'resource-operation': 'create',
|
||||
'resource-type': 'image'}
|
||||
}
|
||||
output_payload = {'rds-listener':
|
||||
{'request-id': '2',
|
||||
'resource-id': '1',
|
||||
'resource-operation': 'create',
|
||||
'resource-type': 'image',
|
||||
'resource_extra_metadata':
|
||||
{'checksum': 'dae557b1365b606e57fbd5d8c9d4516a',
|
||||
'size': '10',
|
||||
'virtual_size': '12'}}}
|
||||
{'checksum': 'dae557b1365b606e57fbd5d8c9d4516a',
|
||||
'size': '10',
|
||||
'virtual_size': '12'}}}
|
||||
|
||||
self.heat_client.get_stack_by_name.return_value = stack
|
||||
self.heat_client.get_image_data_by_stackid.return_value = image_data
|
||||
@ -165,18 +167,17 @@ class TestWorkerThread(base.BaseTestCase):
|
||||
id='1', stack_name=self.stack_name,
|
||||
stack_status='CREATE_IN_PROGRESS')
|
||||
stack_ready = base.Dummy(
|
||||
id='1', stack_name=self.stack_name, stack_status='CREATE_FAILED')
|
||||
id='1', stack_name=self.stack_name, stack_status='CREATE_FAILED',
|
||||
stack_status_reason='Stack fail due to resource creation')
|
||||
|
||||
status_responses = [stack_wait] * 4 + [stack_ready]
|
||||
|
||||
self.heat_client.get_stack.side_effect = status_responses
|
||||
|
||||
error = self.assertRaises(
|
||||
exc.StackOperationError, self.workerThread._wait_for_heat,
|
||||
self.assertRaises(
|
||||
exc.HEATStackCreateError, self.workerThread._wait_for_heat,
|
||||
stack_wait, utils.OPERATION_CREATE)
|
||||
|
||||
self.assertEqual(utils.OPERATION_CREATE, error.arguments['operation'])
|
||||
self.assertIs(status_responses[-1], error.arguments['stack'])
|
||||
|
||||
def test_wait_for_heat_race(self):
|
||||
self.patch('time.time', side_effect=itertools.count(1))
|
||||
self.patch('time.sleep')
|
||||
|
Loading…
x
Reference in New Issue
Block a user