Merge "Add host/group vars"

This commit is contained in:
Zuul 2018-02-16 21:27:19 +00:00 committed by Gerrit Code Review
commit 9a21ad84dc
10 changed files with 201 additions and 70 deletions

View File

@ -877,6 +877,48 @@ Here is an example of two job definitions:
same name will override a previously defined variable, but new
variable names will be added to the set of defined variables.
.. attr:: host_vars
A dictionary of host variables to supply to Ansible. The keys
of this dictionary are node names as defined in a
:ref:`nodeset`, and the values are dictionaries of variables,
just as in :attr:`job.vars`.
.. attr:: group_vars
A dictionary of group variables to supply to Ansible. The keys
of this dictionary are node groups as defined in a
:ref:`nodeset`, and the values are dictionaries of variables,
just as in :attr:`job.vars`.
An example of three kinds of variables:
.. code-block:: yaml
- job:
name: variable-example
nodeset:
nodes:
- name: controller
label: fedora-27
- name: api1
label: centos-7
- name: api2
label: centos-7
groups:
- name: api
nodes:
- api1
- api2
vars:
foo: "this variable is visible to all nodes"
host_vars:
controller:
bar: "this variable is visible only on the controller node"
group_vars:
api:
baz: "this variable is visible on api1 and api2"
.. attr:: dependencies
A list of other jobs upon which this job depends. Zuul will not

View File

@ -1350,7 +1350,7 @@ class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
host['host_vars']['ansible_connection'] = 'local'
hosts.append(dict(
name=['localhost'],
name='localhost',
host_vars=dict(ansible_connection='local'),
host_keys=[]))
return hosts

View File

@ -0,0 +1,26 @@
- hosts: host1
tasks:
- name: Assert hostvar is present.
assert:
that:
- allvar == 'all'
- hostvar == 'host'
- groupvar is not defined
- hosts: host2
tasks:
- name: Assert groupvar is present.
assert:
that:
- allvar == 'all'
- hostvar is not defined
- groupvar == 'group'
- hosts: host3
tasks:
- name: Assert groupvar is present.
assert:
that:
- allvar == 'all'
- hostvar is not defined
- groupvar == 'group'

View File

@ -119,6 +119,32 @@
- vartest_site
- vartest_secret
- job:
parent: python27
name: check-hostvars
run: playbooks/check-hostvars.yaml
nodeset:
nodes:
- name: host1
label: ubuntu-xenial
- name: host2
label: ubuntu-xenial
- name: host3
label: ubuntu-xenial
groups:
- name: group1
nodes:
- host2
- host3
vars:
allvar: all
host_vars:
host1:
hostvar: host
group_vars:
group1:
groupvar: group
- job:
parent: python27
name: check-secret-names

View File

@ -15,6 +15,7 @@
- python27
- faillocal
- check-vars
- check-hostvars
- check-secret-names
- timeout
- post-timeout

View File

@ -425,12 +425,14 @@ class TestAnsibleJob(ZuulTestCase):
node = {'name': 'fake-host',
'host_keys': ['fake-host-key'],
'interface_ip': 'localhost'}
keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
keys = self.test_job.getHostList({'nodes': [node],
'host_vars': {}})[0]['host_keys']
self.assertEqual(keys[0], 'localhost fake-host-key')
# Test with custom connection_port set
node['connection_port'] = 22022
keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
keys = self.test_job.getHostList({'nodes': [node],
'host_vars': {}})[0]['host_keys']
self.assertEqual(keys[0], '[localhost]:22022 fake-host-key')

View File

@ -508,6 +508,8 @@ class JobParser(object):
'roles': to_list(role),
'required-projects': to_list(vs.Any(job_project, str)),
'vars': dict,
'host_vars': {str: dict},
'group_vars': {str: dict},
'dependencies': to_list(str),
'allowed-projects': to_list(str),
'override-branch': str,
@ -611,8 +613,9 @@ class JobParser(object):
secret = layout.secrets.get(secret_config['secret'])
if secret is None:
raise SecretNotFoundError(secret_name)
if secret_name == 'zuul':
raise Exception("Secrets named 'zuul' are not allowed.")
if secret_name == 'zuul' or secret_name == 'nodepool':
raise Exception("Secrets named 'zuul' or 'nodepool' "
"are not allowed.")
if not secret.source_context.isSameProject(job.source_context):
raise Exception(
"Unable to use secret %s. Secrets must be "
@ -730,9 +733,24 @@ class JobParser(object):
variables = conf.get('vars', None)
if variables:
if 'zuul' in variables:
raise Exception("Variables named 'zuul' are not allowed.")
if 'zuul' in variables or 'nodepool' in variables:
raise Exception("Variables named 'zuul' or 'nodepool' "
"are not allowed.")
job.variables = variables
host_variables = conf.get('host_vars', None)
if host_variables:
for host, hvars in host_variables.items():
if 'zuul' in hvars or 'nodepool' in hvars:
raise Exception("Variables named 'zuul' or 'nodepool' "
"are not allowed.")
job.host_variables = host_variables
group_variables = conf.get('group_vars', None)
if group_variables:
for group, gvars in group_variables.items():
if 'zuul' in group_variables or 'nodepool' in gvars:
raise Exception("Variables named 'zuul' or 'nodepool' "
"are not allowed.")
job.group_variables = group_variables
allowed_projects = conf.get('allowed-projects', None)
if allowed_projects:

View File

@ -12,7 +12,6 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
import gear
import json
import logging
@ -209,7 +208,9 @@ class ExecutorClient(object):
nodes.append(n)
params['nodes'] = nodes
params['groups'] = [group.toDict() for group in nodeset.getGroups()]
params['vars'] = copy.deepcopy(job.variables)
params['vars'] = job.variables
params['host_vars'] = job.host_variables
params['group_vars'] = job.group_variables
params['zuul'] = zuul_params
projects = set()
required_projects = set()

View File

@ -497,16 +497,22 @@ def _copy_ansible_files(python_module, target_dir):
shutil.copy(os.path.join(library_path, fn), target_dir)
def make_setup_inventory_dict(nodes):
def check_varnames(var):
# We block these in configloader, but block it here too to make
# sure that a job doesn't pass variables named zuul or nodepool.
if 'zuul' in var:
raise Exception("Defining variables named 'zuul' is not allowed")
if 'nodepool' in var:
raise Exception("Defining variables named 'nodepool' is not allowed")
def make_setup_inventory_dict(nodes):
hosts = {}
for node in nodes:
if (node['host_vars']['ansible_connection'] in
BLACKLISTED_ANSIBLE_CONNECTION_TYPES):
continue
for name in node['name']:
hosts[name] = node['host_vars']
hosts[node['name']] = node['host_vars']
inventory = {
'all': {
@ -517,12 +523,10 @@ def make_setup_inventory_dict(nodes):
return inventory
def make_inventory_dict(nodes, groups, all_vars):
def make_inventory_dict(nodes, args, all_vars):
hosts = {}
for node in nodes:
for name in node['name']:
hosts[name] = node['host_vars']
hosts[node['name']] = node['host_vars']
inventory = {
'all': {
@ -531,14 +535,16 @@ def make_inventory_dict(nodes, groups, all_vars):
}
}
for group in groups:
for group in args['groups']:
group_hosts = {}
for node_name in group['nodes']:
# children is a dict with None as values because we don't have
# and per-group variables. If we did, None would be a dict
# with the per-group variables
group_hosts[node_name] = None
inventory[group['name']] = {'hosts': group_hosts}
group_vars = args['group_vars'].get(group['name'], {}).copy()
check_varnames(group_vars)
inventory[group['name']] = {
'hosts': group_hosts,
'vars': group_vars,
}
return inventory
@ -974,9 +980,12 @@ class AnsibleJob(object):
# set to True in the clouds.yaml for a cloud if this
# results in the wrong thing being in interface_ip
# TODO(jeblair): Move this notice to the docs.
for name in node['name']:
ip = node.get('interface_ip')
port = node.get('connection_port', node.get('ssh_port', 22))
host_vars = dict(
host_vars = args['host_vars'].get(name, {}).copy()
check_varnames(host_vars)
host_vars.update(dict(
ansible_host=ip,
ansible_user=self.executor_server.default_username,
ansible_port=port,
@ -989,7 +998,7 @@ class AnsibleJob(object):
interface_ip=node.get('interface_ip'),
public_ipv4=node.get('public_ipv4'),
private_ipv4=node.get('private_ipv4'),
public_ipv6=node.get('public_ipv6')))
public_ipv6=node.get('public_ipv6'))))
username = node.get('username')
if username:
@ -1007,7 +1016,7 @@ class AnsibleJob(object):
host_keys.append("%s %s" % (ip, key))
hosts.append(dict(
name=node['name'],
name=name,
host_vars=host_vars,
host_keys=host_keys))
return hosts
@ -1102,10 +1111,7 @@ class AnsibleJob(object):
secrets = playbook['secrets']
if secrets:
if 'zuul' in secrets:
# We block this in configloader, but block it here too to make
# sure that a job doesn't pass secrets named zuul.
raise Exception("Defining secrets named 'zuul' is not allowed")
check_varnames(secrets)
jobdir_playbook.secrets_content = yaml.safe_dump(
secrets, default_flow_style=False)
@ -1206,12 +1212,9 @@ class AnsibleJob(object):
def prepareAnsibleFiles(self, args):
all_vars = args['vars'].copy()
check_varnames(all_vars)
# TODO(mordred) Hack to work around running things with python3
all_vars['ansible_python_interpreter'] = '/usr/bin/python2'
if 'zuul' in all_vars:
# We block this in configloader, but block it here too to make
# sure that a job doesn't pass variables named zuul.
raise Exception("Defining vars named 'zuul' is not allowed")
all_vars['zuul'] = args['zuul'].copy()
all_vars['zuul']['executor'] = dict(
hostname=self.executor_server.hostname,
@ -1222,7 +1225,7 @@ class AnsibleJob(object):
nodes = self.getHostList(args)
setup_inventory = make_setup_inventory_dict(nodes)
inventory = make_inventory_dict(nodes, args['groups'], all_vars)
inventory = make_inventory_dict(nodes, args, all_vars)
with open(self.jobdir.setup_inventory, 'w') as setup_inventory_yaml:
setup_inventory_yaml.write(

View File

@ -841,6 +841,8 @@ class Job(object):
timeout=None,
post_timeout=None,
variables={},
host_variables={},
group_variables={},
nodeset=NodeSet(),
workspace=None,
pre_run=(),
@ -982,10 +984,19 @@ class Job(object):
matchers.append(self.branch_matcher)
self.branch_matcher = change_matcher.MatchAll(matchers)
def updateVariables(self, other_vars):
def updateVariables(self, other_vars, other_host_vars, other_group_vars):
if other_vars is not None:
v = copy.deepcopy(self.variables)
Job._deepUpdate(v, other_vars)
self.variables = v
if other_host_vars is not None:
v = copy.deepcopy(self.host_variables)
Job._deepUpdate(v, other_host_vars)
self.host_variables = v
if other_group_vars is not None:
v = copy.deepcopy(self.group_variables)
Job._deepUpdate(v, other_group_vars)
self.group_variables = v
def updateParentData(self, other_vars):
# Update variables, but give the current values priority (used
@ -1062,7 +1073,8 @@ class Job(object):
"from other projects."
% (repr(self), this_origin))
if k not in set(['pre_run', 'run', 'post_run', 'roles',
'variables', 'required_projects',
'variables', 'host_variables',
'group_variables', 'required_projects',
'allowed_projects']):
# TODO(jeblair): determine if deepcopy is required
setattr(self, k, copy.deepcopy(other._get(k)))
@ -1103,8 +1115,8 @@ class Job(object):
if other._get('post_run') is not None:
other_post_run = self.freezePlaybooks(other.post_run)
self.post_run = other_post_run + self.post_run
if other._get('variables') is not None:
self.updateVariables(other.variables)
self.updateVariables(other.variables, other.host_variables,
other.group_variables)
if other._get('required_projects') is not None:
self.updateProjects(other.required_projects)
if (other._get('allowed_projects') is not None and