Use a consistent working dir for deployment
Adds a new cli arg --working-dir, which defaults to: $HOME/overcloud-deploy-<stack> The working directory will be used for all files created by the overcloud deploy command, instead of using tmp dirs and files directly in $HOME. The working dir provides a single dir for all state associated with the deployment, which is needed with the transition to using ephemeral Heat, and especially when combined with multi-stack/multi-overcloud. This patch addresses: - deployment status - templates - config-download - heat-launcher - overcloudrc Further patches will address other uses of files outside of working-dir and migrate them over. Change-Id: I0d803f695c725c58ef2e6b655753b6c8248d1b2f Signed-off-by: James Slagle <jslagle@redhat.com>
This commit is contained in:
parent
42aa7357fb
commit
336808ba57
|
@ -30,6 +30,14 @@ class DeploymentWorkflowFixture(fixtures.Fixture):
|
|||
self.mock_set_deployment_status = self.useFixture(fixtures.MockPatch(
|
||||
'tripleoclient.workflows.deployment.set_deployment_status')
|
||||
).mock
|
||||
self.mock_create_overcloudrc = self.useFixture(fixtures.MockPatch(
|
||||
'tripleoclient.workflows.deployment.create_overcloudrc')
|
||||
).mock
|
||||
make_config_download_dir = \
|
||||
'tripleoclient.workflows.deployment.make_config_download_dir'
|
||||
self.mock_make_config_download_dir = self.useFixture(
|
||||
fixtures.MockPatch(make_config_download_dir)
|
||||
).mock
|
||||
|
||||
|
||||
class PlanManagementFixture(fixtures.Fixture):
|
||||
|
|
|
@ -1800,7 +1800,8 @@ class TestGeneralUtils(base.TestCommand):
|
|||
}
|
||||
utils.update_deployment_status(
|
||||
'overcloud',
|
||||
mock_status
|
||||
mock_status,
|
||||
''
|
||||
)
|
||||
mock_write.assert_called()
|
||||
|
||||
|
|
|
@ -232,7 +232,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
mock_get_template_contents.assert_called_with(
|
||||
template_file=mock.ANY)
|
||||
|
||||
mock_create_tempest_deployer_input.assert_called_with()
|
||||
mock_create_tempest_deployer_input.assert_called_with(
|
||||
output_dir=self.cmd.working_dir)
|
||||
mock_copy.assert_called_once()
|
||||
|
||||
@mock.patch('tripleoclient.utils.build_stack_data', autospec=True)
|
||||
|
@ -336,7 +337,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
mock_get_template_contents.assert_called_with(
|
||||
template_file=mock.ANY)
|
||||
|
||||
utils_overcloud_fixture.mock_deploy_tht.assert_called_with()
|
||||
utils_overcloud_fixture.mock_deploy_tht.assert_called_with(
|
||||
output_dir=self.cmd.working_dir)
|
||||
|
||||
mock_validate_args.assert_called_once_with(parsed_args)
|
||||
self.assertFalse(mock_invoke_plan_env_wf.called)
|
||||
|
@ -352,8 +354,6 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
autospec=True, return_value={})
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_environment_and_files', autospec=True)
|
||||
@mock.patch('tripleoclient.workflows.deployment.create_overcloudrc',
|
||||
autospec=True)
|
||||
@mock.patch('os.chdir')
|
||||
@mock.patch('tripleoclient.utils.copy_clouds_yaml')
|
||||
@mock.patch('tripleoclient.v1.overcloud_deploy.DeployOvercloud.'
|
||||
|
@ -379,7 +379,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
mock_breakpoints_cleanup,
|
||||
mock_postconfig, mock_stack_network_check,
|
||||
mock_ceph_fsid, mock_get_undercloud_host_entry, mock_copy,
|
||||
mock_chdir, mock_overcloudrc,
|
||||
mock_chdir,
|
||||
mock_process_env, mock_roles_data,
|
||||
mock_image_prepare, mock_generate_password,
|
||||
mock_rc_params, mock_stack_data):
|
||||
|
@ -512,7 +512,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
mock_get_template_contents.assert_called_with(
|
||||
template_file=mock.ANY)
|
||||
|
||||
mock_create_tempest_deployer_input.assert_called_with()
|
||||
mock_create_tempest_deployer_input.assert_called_with(
|
||||
output_dir=self.cmd.working_dir)
|
||||
mock_copy.assert_called_once()
|
||||
|
||||
@mock.patch('tripleoclient.v1.overcloud_deploy.DeployOvercloud.'
|
||||
|
@ -802,7 +803,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
|
||||
arglist = ['--answers-file', test_answerfile,
|
||||
'--environment-file', test_env2,
|
||||
'--disable-password-generation']
|
||||
'--disable-password-generation',
|
||||
'--working-dir', self.tmp_dir.path]
|
||||
verifylist = [
|
||||
('answers_file', test_answerfile),
|
||||
('environment_files', [test_env2]),
|
||||
|
@ -827,7 +829,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
self.assertIn('Test', call_args[8]['resource_registry'])
|
||||
self.assertIn('Test2', call_args[8]['resource_registry'])
|
||||
|
||||
utils_fixture.mock_deploy_tht.assert_called_with()
|
||||
utils_fixture.mock_deploy_tht.assert_called_with(
|
||||
output_dir=self.cmd.working_dir)
|
||||
mock_copy.assert_called_once()
|
||||
|
||||
@mock.patch('tripleoclient.utils.get_rc_params', autospec=True)
|
||||
|
@ -1016,7 +1019,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
mock_get_template_contents.assert_called_with(
|
||||
template_file=mock.ANY)
|
||||
|
||||
mock_create_tempest_deployer_input.assert_called_with()
|
||||
mock_create_tempest_deployer_input.assert_called_with(
|
||||
output_dir=self.cmd.working_dir)
|
||||
|
||||
mock_validate_args.assert_called_once_with(parsed_args)
|
||||
mock_copy.assert_called_once()
|
||||
|
@ -1279,7 +1283,11 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
deployment_options={}, env_files_tracker=mock.ANY)],
|
||||
mock_hd.mock_calls)
|
||||
self.assertIn(
|
||||
[mock.call(mock.ANY, mock.ANY, mock.ANY, 'ctlplane', None, None,
|
||||
[mock.call(mock.ANY, mock.ANY, mock.ANY, 'ctlplane',
|
||||
os.path.join(
|
||||
self.cmd.working_dir,
|
||||
'config-download'),
|
||||
None,
|
||||
deployment_options={},
|
||||
deployment_timeout=448, # 451 - 3, total time left
|
||||
in_flight_validations=False, limit_hosts=None,
|
||||
|
@ -1289,6 +1297,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
fixture.mock_config_download.assert_called()
|
||||
mock_copy.assert_called_once()
|
||||
|
||||
@mock.patch('tripleoclient.workflows.deployment.make_config_download_dir')
|
||||
@mock.patch('tripleoclient.utils.get_rc_params', autospec=True)
|
||||
@mock.patch('tripleoclient.utils.copy_clouds_yaml')
|
||||
@mock.patch('tripleoclient.v1.overcloud_deploy.DeployOvercloud.'
|
||||
|
@ -1301,7 +1310,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
def test_config_download_only_timeout(
|
||||
self, mock_deploy_tmpdir,
|
||||
mock_get_undercloud_host_entry, mock_update,
|
||||
mock_copyi, mock_rc_params):
|
||||
mock_copyi, mock_rc_params, mock_cd_dir):
|
||||
utils_fixture = deployment.UtilsOvercloudFixture()
|
||||
self.useFixture(utils_fixture)
|
||||
utils_fixture2 = deployment.UtilsFixture()
|
||||
|
@ -1328,7 +1337,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
|
||||
self.cmd.take_action(parsed_args)
|
||||
playbook = os.path.join(os.environ.get(
|
||||
'HOME'), 'config-download/overcloud/deploy_steps_playbook.yaml')
|
||||
'HOME'), self.cmd.working_dir,
|
||||
'config-download/overcloud/deploy_steps_playbook.yaml')
|
||||
self.assertIn(
|
||||
[mock.call(
|
||||
ansible_cfg=None, ansible_timeout=42,
|
||||
|
|
|
@ -607,9 +607,7 @@ def run_ansible_playbook(playbook, inventory, workdir, playbook_dir=None,
|
|||
env.update(extra_env_variables)
|
||||
|
||||
if 'ANSIBLE_CONFIG' not in env and not ansible_cfg:
|
||||
config_download = os.path.join(constants.DEFAULT_WORK_DIR, plan)
|
||||
makedirs(config_download)
|
||||
ansible_cfg = os.path.join(config_download, 'ansible.cfg')
|
||||
ansible_cfg = os.path.join(workdir, 'ansible.cfg')
|
||||
config = configparser.ConfigParser()
|
||||
if os.path.isfile(ansible_cfg):
|
||||
config.read(ansible_cfg)
|
||||
|
@ -861,7 +859,8 @@ def store_cli_param(command_name, parsed_args):
|
|||
"directory") % history_path)
|
||||
|
||||
|
||||
def create_tempest_deployer_input(config_name='tempest-deployer-input.conf'):
|
||||
def create_tempest_deployer_input(config_name='tempest-deployer-input.conf',
|
||||
output_dir=None):
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
# Create required sections
|
||||
|
@ -895,7 +894,11 @@ def create_tempest_deployer_input(config_name='tempest-deployer-input.conf'):
|
|||
'object-storage', 'volume'):
|
||||
config.set(section, 'region', 'regionOne')
|
||||
|
||||
with open(config_name, 'w+') as config_file:
|
||||
if output_dir:
|
||||
config_path = os.path.join(output_dir, config_name)
|
||||
else:
|
||||
config_path = config_name
|
||||
with open(config_path, 'w+') as config_file:
|
||||
config.write(config_file)
|
||||
|
||||
|
||||
|
@ -1647,21 +1650,24 @@ def build_stack_data(clients, stack_name, template,
|
|||
return stack_data
|
||||
|
||||
|
||||
def archive_deploy_artifacts(log, stack_name, tht_dir,
|
||||
def archive_deploy_artifacts(log, stack_name, working_dir,
|
||||
ansible_dir=None, output_dir=None):
|
||||
"""Create a tarball of the temporary folders used"""
|
||||
log.debug(_("Preserving deployment artifacts"))
|
||||
|
||||
if not output_dir:
|
||||
output_dir = tht_dir
|
||||
output_dir = working_dir
|
||||
|
||||
def get_tar_filename():
|
||||
return '%s/%s-install-%s.tar.bzip2' % \
|
||||
(constants.CLOUD_HOME_DIR, stack_name,
|
||||
datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S'))
|
||||
return os.path.join(
|
||||
working_dir, '%s-install-%s.tar.bzip2' %
|
||||
(stack_name,
|
||||
datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')))
|
||||
|
||||
def remove_leading_path(info):
|
||||
def tar_filter(info):
|
||||
"""Tar filter to remove output dir from path"""
|
||||
if info.name.endswith('.bzip2'):
|
||||
return None
|
||||
leading_path = output_dir[1:] + '/'
|
||||
info.name = info.name.replace(leading_path, '')
|
||||
return info
|
||||
|
@ -1669,10 +1675,10 @@ def archive_deploy_artifacts(log, stack_name, tht_dir,
|
|||
tar_filename = get_tar_filename()
|
||||
try:
|
||||
tf = tarfile.open(tar_filename, 'w:bz2')
|
||||
tf.add(tht_dir, recursive=True, filter=remove_leading_path)
|
||||
tf.add(working_dir, recursive=True, filter=tar_filter)
|
||||
if ansible_dir:
|
||||
tf.add(ansible_dir, recursive=True,
|
||||
filter=remove_leading_path)
|
||||
filter=tar_filter)
|
||||
tf.close()
|
||||
except Exception as ex:
|
||||
msg = _("Unable to create artifact tarball, %s") % str(ex)
|
||||
|
@ -2469,21 +2475,22 @@ def copy_clouds_yaml(user):
|
|||
raise exceptions.DeploymentError(msg)
|
||||
|
||||
|
||||
def get_status_yaml(stack_name):
|
||||
def get_status_yaml(stack_name, working_dir):
|
||||
status_yaml = os.path.join(
|
||||
constants.CLOUD_HOME_DIR,
|
||||
working_dir,
|
||||
'%s-deployment_status.yaml' % stack_name)
|
||||
return status_yaml
|
||||
|
||||
|
||||
def update_deployment_status(stack_name, status):
|
||||
def update_deployment_status(stack_name, status, working_dir):
|
||||
"""Update the deployment status."""
|
||||
|
||||
contents = yaml.safe_dump(
|
||||
{'deployment_status': status},
|
||||
default_flow_style=False)
|
||||
|
||||
safe_write(get_status_yaml(stack_name), contents)
|
||||
safe_write(get_status_yaml(stack_name, working_dir),
|
||||
contents)
|
||||
|
||||
|
||||
def create_breakpoint_cleanup_env(tht_root, stack):
|
||||
|
|
|
@ -25,7 +25,6 @@ import re
|
|||
import shutil
|
||||
import six
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import yaml
|
||||
|
||||
|
@ -242,31 +241,22 @@ class DeployOvercloud(command.Command):
|
|||
deployment.deploy_without_plan(
|
||||
self.clients, stack, stack_name,
|
||||
template, files, env_files_tracker,
|
||||
self.log)
|
||||
self.log, self.working_dir)
|
||||
|
||||
def _deploy_tripleo_heat_templates_tmpdir(self, stack, parsed_args):
|
||||
tht_root = os.path.abspath(parsed_args.templates)
|
||||
tht_tmp = tempfile.mkdtemp(prefix='tripleoclient-')
|
||||
new_tht_root = "%s/tripleo-heat-templates" % tht_tmp
|
||||
self.log.debug("Creating temporary templates tree in %s"
|
||||
new_tht_root = "%s/tripleo-heat-templates" % self.working_dir
|
||||
self.log.debug("Creating working templates tree in %s"
|
||||
% new_tht_root)
|
||||
try:
|
||||
shutil.copytree(tht_root, new_tht_root, symlinks=True)
|
||||
utils.jinja_render_files(self.log, parsed_args.templates,
|
||||
new_tht_root,
|
||||
parsed_args.roles_file,
|
||||
parsed_args.networks_file,
|
||||
new_tht_root)
|
||||
self._deploy_tripleo_heat_templates(stack, parsed_args,
|
||||
new_tht_root, tht_root)
|
||||
finally:
|
||||
utils.archive_deploy_artifacts(self.log, parsed_args.stack,
|
||||
new_tht_root)
|
||||
if parsed_args.no_cleanup:
|
||||
self.log.warning("Not cleaning temporary directory %s"
|
||||
% tht_tmp)
|
||||
else:
|
||||
shutil.rmtree(tht_tmp)
|
||||
shutil.rmtree(new_tht_root, ignore_errors=True)
|
||||
shutil.copytree(tht_root, new_tht_root, symlinks=True)
|
||||
utils.jinja_render_files(self.log, parsed_args.templates,
|
||||
new_tht_root,
|
||||
parsed_args.roles_file,
|
||||
parsed_args.networks_file,
|
||||
new_tht_root)
|
||||
self._deploy_tripleo_heat_templates(stack, parsed_args,
|
||||
new_tht_root, tht_root)
|
||||
|
||||
def _deploy_tripleo_heat_templates(self, stack, parsed_args,
|
||||
tht_root, user_tht_root):
|
||||
|
@ -804,7 +794,7 @@ class DeployOvercloud(command.Command):
|
|||
default=None,
|
||||
help=_('Directory to use for saved output when using '
|
||||
'--config-download. When not '
|
||||
'specified, $HOME/config-download will be used.')
|
||||
'specified, <working-dir>/config-download will be used.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--override-ansible-cfg',
|
||||
|
@ -873,6 +863,13 @@ class DeployOvercloud(command.Command):
|
|||
'the container parameters configured, the deployment '
|
||||
'action may fail.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--working-dir',
|
||||
action='store',
|
||||
help=_('The working directory for the deployment where all '
|
||||
'input, output, and generated files will be stored.\n'
|
||||
'Defaults to "$HOME/overcloud-deploy-<stack>"')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
|
@ -880,6 +877,14 @@ class DeployOvercloud(command.Command):
|
|||
logging.setup(CONF, '')
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
|
||||
if not parsed_args.working_dir:
|
||||
self.working_dir = os.path.join(
|
||||
os.path.expanduser('~'),
|
||||
"overcloud-deploy-%s" % parsed_args.stack)
|
||||
else:
|
||||
self.working_dir = parsed_args.working_dir
|
||||
utils.makedirs(self.working_dir)
|
||||
|
||||
if parsed_args.update_plan_only:
|
||||
raise exceptions.DeploymentError(
|
||||
'Only plan update is not supported.')
|
||||
|
@ -925,6 +930,12 @@ class DeployOvercloud(command.Command):
|
|||
# a create then the previous stack object would be None.
|
||||
stack = utils.get_stack(self.orchestration_client, parsed_args.stack)
|
||||
|
||||
overcloud_endpoint = None
|
||||
old_rcpath = None
|
||||
rcpath = None
|
||||
horizon_url = None
|
||||
deploy_message = None
|
||||
|
||||
try:
|
||||
# Force fetching of attributes
|
||||
stack.get()
|
||||
|
@ -935,14 +946,20 @@ class DeployOvercloud(command.Command):
|
|||
self.orchestration_client,
|
||||
parsed_args.stack)
|
||||
|
||||
rcpath = deployment.create_overcloudrc(
|
||||
# For backwards compatibility, we will also write overcloudrc to
|
||||
# $HOME and then self.working_dir.
|
||||
old_rcpath = deployment.create_overcloudrc(
|
||||
stack, rc_params, parsed_args.no_proxy)
|
||||
rcpath = deployment.create_overcloudrc(
|
||||
stack, rc_params, parsed_args.no_proxy,
|
||||
self.working_dir)
|
||||
|
||||
if parsed_args.config_download:
|
||||
self.log.info("Deploying overcloud configuration")
|
||||
deployment.set_deployment_status(
|
||||
stack.stack_name,
|
||||
status='DEPLOYING'
|
||||
status='DEPLOYING',
|
||||
working_dir=self.working_dir
|
||||
)
|
||||
|
||||
if not parsed_args.config_download_only:
|
||||
|
@ -969,12 +986,17 @@ class DeployOvercloud(command.Command):
|
|||
deployment_options['ansible_python_interpreter'] = \
|
||||
parsed_args.deployment_python_interpreter
|
||||
|
||||
config_download_dir = parsed_args.output_dir or \
|
||||
os.path.join(self.working_dir, "config-download")
|
||||
deployment.make_config_download_dir(config_download_dir,
|
||||
parsed_args.stack)
|
||||
|
||||
deployment.config_download(
|
||||
self.log,
|
||||
self.clients,
|
||||
stack,
|
||||
parsed_args.overcloud_ssh_network,
|
||||
parsed_args.output_dir,
|
||||
config_download_dir,
|
||||
parsed_args.override_ansible_cfg,
|
||||
timeout=parsed_args.overcloud_ssh_port_timeout,
|
||||
verbosity=utils.playbook_verbosity(self=self),
|
||||
|
@ -990,14 +1012,16 @@ class DeployOvercloud(command.Command):
|
|||
)
|
||||
deployment.set_deployment_status(
|
||||
stack.stack_name,
|
||||
status=deploy_status)
|
||||
status=deploy_status,
|
||||
working_dir=self.working_dir)
|
||||
except Exception as deploy_e:
|
||||
deploy_status = 'DEPLOY_FAILED'
|
||||
deploy_message = 'with error'
|
||||
deploy_trace = deploy_e
|
||||
deployment.set_deployment_status(
|
||||
stack.stack_name,
|
||||
status=deploy_status
|
||||
status=deploy_status,
|
||||
working_dir=self.working_dir
|
||||
)
|
||||
finally:
|
||||
# Run postconfig on create or force. Use force to makes sure
|
||||
|
@ -1010,13 +1034,21 @@ class DeployOvercloud(command.Command):
|
|||
user = \
|
||||
getpwuid(os.stat(constants.CLOUD_HOME_DIR).st_uid).pw_name
|
||||
utils.copy_clouds_yaml(user)
|
||||
utils.create_tempest_deployer_input()
|
||||
utils.create_tempest_deployer_input(output_dir=self.working_dir)
|
||||
|
||||
print("Overcloud Endpoint: {0}".format(overcloud_endpoint))
|
||||
print("Overcloud Horizon Dashboard URL: {0}".format(horizon_url))
|
||||
print("Overcloud rc file: {0}".format(rcpath))
|
||||
print("Overcloud rc file: {} and {}".format(
|
||||
rcpath, old_rcpath))
|
||||
print("Overcloud Deployed {0}".format(deploy_message))
|
||||
|
||||
if parsed_args.output_dir:
|
||||
ansible_dir = config_download_dir
|
||||
else:
|
||||
ansible_dir = None
|
||||
utils.archive_deploy_artifacts(self.log, parsed_args.stack,
|
||||
self.working_dir, ansible_dir)
|
||||
|
||||
if deploy_status == 'DEPLOY_FAILED':
|
||||
raise(deploy_trace)
|
||||
|
||||
|
|
|
@ -46,7 +46,8 @@ def create_overcloudrc(stack, rc_params, no_proxy='',
|
|||
|
||||
def deploy_without_plan(clients, stack, stack_name, template,
|
||||
files, env_files,
|
||||
log):
|
||||
log,
|
||||
working_dir):
|
||||
orchestration_client = clients.orchestration
|
||||
if stack is None:
|
||||
log.info("Performing Heat stack create")
|
||||
|
@ -64,7 +65,8 @@ def deploy_without_plan(clients, stack, stack_name, template,
|
|||
action = 'UPDATE'
|
||||
|
||||
set_deployment_status(stack_name,
|
||||
status='DEPLOYING')
|
||||
status='DEPLOYING',
|
||||
working_dir=working_dir)
|
||||
stack_args = {
|
||||
'stack_name': stack_name,
|
||||
'template': template,
|
||||
|
@ -80,7 +82,8 @@ def deploy_without_plan(clients, stack, stack_name, template,
|
|||
print("Success.")
|
||||
except Exception:
|
||||
set_deployment_status(stack_name,
|
||||
status='DEPLOY_FAILED')
|
||||
status='DEPLOY_FAILED',
|
||||
working_dir=working_dir)
|
||||
raise
|
||||
|
||||
create_result = utils.wait_for_stack_ready(
|
||||
|
@ -89,7 +92,8 @@ def deploy_without_plan(clients, stack, stack_name, template,
|
|||
shell.OpenStackShell().run(["stack", "failures", "list", stack_name])
|
||||
set_deployment_status(
|
||||
stack_name,
|
||||
status='DEPLOY_FAILED'
|
||||
status='DEPLOY_FAILED',
|
||||
working_dir=working_dir
|
||||
)
|
||||
if stack is None:
|
||||
raise exceptions.DeploymentError("Heat Stack create failed.")
|
||||
|
@ -394,28 +398,27 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
|
|||
else:
|
||||
playbooks = os.path.join(stack_work_dir, ansible_playbook_name)
|
||||
|
||||
with utils.TempDirs() as tmp:
|
||||
utils.run_ansible_playbook(
|
||||
playbook=playbooks,
|
||||
inventory=inventory_path,
|
||||
workdir=tmp,
|
||||
playbook_dir=stack_work_dir,
|
||||
skip_tags=skip_tags,
|
||||
tags=tags,
|
||||
ansible_cfg=override_ansible_cfg,
|
||||
verbosity=verbosity,
|
||||
ssh_user=ssh_user,
|
||||
key=key_file,
|
||||
limit_hosts=limit_hosts,
|
||||
ansible_timeout=timeout,
|
||||
reproduce_command=True,
|
||||
extra_env_variables={
|
||||
'ANSIBLE_BECOME': True,
|
||||
},
|
||||
extra_vars=extra_vars,
|
||||
timeout=deployment_timeout,
|
||||
forks=forks
|
||||
)
|
||||
utils.run_ansible_playbook(
|
||||
playbook=playbooks,
|
||||
inventory=inventory_path,
|
||||
workdir=output_dir,
|
||||
playbook_dir=stack_work_dir,
|
||||
skip_tags=skip_tags,
|
||||
tags=tags,
|
||||
ansible_cfg=override_ansible_cfg,
|
||||
verbosity=verbosity,
|
||||
ssh_user=ssh_user,
|
||||
key=key_file,
|
||||
limit_hosts=limit_hosts,
|
||||
ansible_timeout=timeout,
|
||||
reproduce_command=True,
|
||||
extra_env_variables={
|
||||
'ANSIBLE_BECOME': True,
|
||||
},
|
||||
extra_vars=extra_vars,
|
||||
timeout=deployment_timeout,
|
||||
forks=forks
|
||||
)
|
||||
|
||||
_log_and_print(
|
||||
message='Overcloud configuration completed for stack: {}'.format(
|
||||
|
@ -486,7 +489,22 @@ def get_deployment_status(clients, stack_name):
|
|||
return None
|
||||
|
||||
|
||||
def set_deployment_status(stack_name, status):
|
||||
def set_deployment_status(stack_name, status, working_dir):
|
||||
utils.update_deployment_status(
|
||||
stack_name=stack_name,
|
||||
status=status)
|
||||
status=status,
|
||||
working_dir=working_dir)
|
||||
|
||||
|
||||
def make_config_download_dir(config_download_dir, stack):
|
||||
utils.makedirs(config_download_dir)
|
||||
utils.makedirs(DEFAULT_WORK_DIR)
|
||||
# Symlink for the previous default config-download dir to the
|
||||
# new consistent location.
|
||||
# This will create the following symlink:
|
||||
# ~/config-download ->
|
||||
# ~/overcloud-deploy/<stack>/config-download
|
||||
config_download_stack_dir = \
|
||||
os.path.join(DEFAULT_WORK_DIR, stack)
|
||||
if not os.path.exists(config_download_stack_dir):
|
||||
os.symlink(config_download_dir, config_download_stack_dir)
|
||||
|
|
Loading…
Reference in New Issue