Merge "Allow ansible python interpreter be configurable"

This commit is contained in:
Zuul 2018-11-16 00:10:05 +00:00 committed by Gerrit Code Review
commit 88cf1d3953
8 changed files with 75 additions and 16 deletions

View File

@ -1147,3 +1147,17 @@ class TestDeployNameScenarios(TestWithScenarios):
observed = self.func()
self.assertEqual(self.expected, observed)
class TestDeploymentPythonInterpreter(TestCase):
def test_system_default(self):
args = mock.MagicMock()
args.deployment_python_interpreter = None
py = utils.get_deployment_python_interpreter(args)
self.assertEqual(py, sys.executable)
def test_provided_interpreter(self):
args = mock.MagicMock()
args.deployment_python_interpreter = 'foo'
py = utils.get_deployment_python_interpreter(args)
self.assertEqual(py, 'foo')

View File

@ -658,7 +658,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
def _fake_heat_deploy(self, stack, stack_name, template_path,
parameters, environments, timeout, tht_root,
env, update_plan_only, run_validations,
skip_deploy_identifier, plan_env_file):
skip_deploy_identifier, plan_env_file,
deployment_options=None):
assertEqual(
{'parameter_defaults': {'NovaComputeLibvirtType': 'qemu'},
'resource_registry': {
@ -719,7 +720,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
def _fake_heat_deploy(self, stack, stack_name, template_path,
parameters, environments, timeout, tht_root,
env, update_plan_only, run_validations,
skip_deploy_identifier, plan_env_file):
skip_deploy_identifier, plan_env_file,
deployment_options=None):
# Should be no breakpoint cleanup because utils.get_stack = None
assertEqual(
{'parameter_defaults': {},
@ -1030,7 +1032,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
self.cmd, {}, 'overcloud',
'/fake/path/' + constants.OVERCLOUD_YAML_NAME, {},
['~/overcloud-env.json'], 1, '/fake/path', {}, False, True, False,
None)
None, deployment_options=None)
@mock.patch('tripleoclient.v1.overcloud_deploy.DeployOvercloud.'
'_heat_deploy', autospec=True)

View File

@ -419,7 +419,8 @@ class TestOvercloudDeployPlan(utils.TestCommand):
workflow_input={
'container': 'overcast',
'run_validations': True,
'skip_deploy_identifier': False
'skip_deploy_identifier': False,
'deployment_options': {},
}
)

View File

@ -669,7 +669,9 @@ class TestDeployUndercloud(TestPluginV1):
mock_importInv, createdir_mock):
fake_output_dir = '/twd'
extra_vars = {'Undercloud': {'ansible_connection': 'local'}}
extra_vars = {'Undercloud': {
'ansible_connection': 'local',
'ansible_python_interpreter': sys.executable}}
mock_inventory = mock.Mock()
mock_importInv.return_value = mock_inventory
self.cmd.output_dir = fake_output_dir
@ -793,7 +795,8 @@ class TestDeployUndercloud(TestPluginV1):
mock_tht.assert_called_once_with(self.cmd, fake_orchestration,
parsed_args)
mock_download.assert_called_with(self.cmd, fake_orchestration,
'undercloud', 'Undercloud')
'undercloud', 'Undercloud',
sys.executable)
mock_launchansible.assert_called_once()
mock_tarball.assert_called_once()
mock_cleanupdirs.assert_called_once()

View File

@ -1349,3 +1349,10 @@ def update_nodes_deploy_data(imageclient, nodes):
if 'ramdisk_id' not in node and ramdisk in img_map:
node['ramdisk_id'] = img_map[ramdisk]
break
def get_deployment_python_interpreter(parsed_args):
"""Return correct deployment python interpreter """
if parsed_args.deployment_python_interpreter:
return parsed_args.deployment_python_interpreter
return sys.executable

View File

@ -188,7 +188,8 @@ class DeployOvercloud(command.Command):
def _heat_deploy(self, stack, stack_name, template_path, parameters,
env_files, timeout, tht_root, env, update_plan_only,
run_validations, skip_deploy_identifier, plan_env_file):
run_validations, skip_deploy_identifier, plan_env_file,
deployment_options=None):
"""Verify the Baremetal nodes are available and do a stack update"""
self.log.debug("Getting template contents from plan %s" % stack_name)
@ -231,7 +232,8 @@ class DeployOvercloud(command.Command):
stack_name, self.app_args.verbose_level,
timeout=timeout,
run_validations=run_validations,
skip_deploy_identifier=skip_deploy_identifier)
skip_deploy_identifier=skip_deploy_identifier,
deployment_options=deployment_options)
def _process_and_upload_environment(self, container_name,
env, moved_files, tht_root):
@ -426,6 +428,11 @@ class DeployOvercloud(command.Command):
if parsed_args.environment_files:
created_env_files.extend(parsed_args.environment_files)
deployment_options = {}
if parsed_args.deployment_python_interpreter:
deployment_options['ansible_python_interpreter'] = \
parsed_args.deployment_python_interpreter
self.log.debug("Processing environment files %s" % created_env_files)
env_files, localenv = utils.process_multiple_environments(
created_env_files, tht_root, user_tht_root,
@ -451,7 +458,8 @@ class DeployOvercloud(command.Command):
tht_root, stack, parsed_args.stack, parameters, env_files,
parsed_args.timeout, env, parsed_args.update_plan_only,
parsed_args.run_validations, parsed_args.skip_deploy_identifier,
parsed_args.plan_environment_file)
parsed_args.plan_environment_file,
deployment_options=deployment_options)
def _try_overcloud_deploy_with_compat_yaml(self, tht_root, stack,
stack_name, parameters,
@ -459,14 +467,16 @@ class DeployOvercloud(command.Command):
env, update_plan_only,
run_validations,
skip_deploy_identifier,
plan_env_file):
plan_env_file,
deployment_options=None):
overcloud_yaml = os.path.join(tht_root, constants.OVERCLOUD_YAML_NAME)
try:
self._heat_deploy(stack, stack_name, overcloud_yaml,
parameters, env_files, timeout,
tht_root, env, update_plan_only,
run_validations, skip_deploy_identifier,
plan_env_file)
plan_env_file,
deployment_options=deployment_options)
except ClientException as e:
messages = 'Failed to deploy: %s' % str(e)
raise ValueError(messages)
@ -830,6 +840,11 @@ class DeployOvercloud(command.Command):
'unset, will default to however much time is leftover '
'from the --timeout parameter after the stack operation.')
)
parser.add_argument('--deployment-python-interpreter', default=None,
help=_('The path to python interpreter to use for '
'the deployment actions. This may need to '
'be used if deploying on a python2 host '
'from a python3 system or vice versa.'))
return parser
def take_action(self, parsed_args):

View File

@ -759,7 +759,8 @@ class Deploy(command.Command):
return "%s/%s" % (stack_name, stack_id)
def _download_ansible_playbooks(self, client, stack_name,
tripleo_role_name='Standalone'):
tripleo_role_name='Standalone',
python_interpreter=sys.executable):
stack_config = config.Config(client)
self._create_working_dirs()
@ -777,7 +778,13 @@ class Deploy(command.Command):
ansible_ssh_user='root')
inv_path = os.path.join(self.tmp_ansible_dir, 'inventory.yaml')
extra_vars = {tripleo_role_name: {'ansible_connection': 'local'}}
extra_vars = {
tripleo_role_name: {
'ansible_connection': 'local',
'ansible_python_interpreter': python_interpreter,
}
}
inventory.write_static_inventory(inv_path, extra_vars)
self.log.info(_('** Downloaded {0} ansible to {1} **').format(
@ -898,6 +905,13 @@ class Deploy(command.Command):
'Defaults to $SUDO_USER. If $SUDO_USER is unset '
'it defaults to stack.')
)
parser.add_argument('--deployment-python-interpreter', default=None,
help=_('The path to python interpreter to use for '
'the deployment actions. If not specified '
'the python version of the openstackclient '
'will be used. This may need to be used '
'if deploying on a python2 host from a '
'python3 system or vice versa.'))
parser.add_argument(
'--heat-container-image', metavar='<HEAT_CONTAINER_IMAGE>',
dest='heat_container_image',
@ -1129,10 +1143,12 @@ class Deploy(command.Command):
raise exceptions.DeploymentError(message)
# download the ansible playbooks and execute them.
depl_python = utils.get_deployment_python_interpreter(parsed_args)
self.ansible_dir = \
self._download_ansible_playbooks(orchestration_client,
parsed_args.stack,
parsed_args.standalone_role)
parsed_args.standalone_role,
depl_python)
# Do not override user's custom ansible configuraition file,
# it may have been pre-created with the tripleo CLI, or the like

View File

@ -64,13 +64,14 @@ def deploy(log, clients, **workflow_input):
def deploy_and_wait(log, clients, stack, plan_name, verbose_level,
timeout=None, run_validations=False,
skip_deploy_identifier=False):
skip_deploy_identifier=False, deployment_options={}):
"""Start the deploy and wait for it to finish"""
workflow_input = {
"container": plan_name,
"run_validations": run_validations,
"skip_deploy_identifier": skip_deploy_identifier
"skip_deploy_identifier": skip_deploy_identifier,
"deployment_options": deployment_options,
}
if timeout is not None: