Load job from pipeline state on executors

Instead of sending the required job variables via the build request we
will give the executor the job path so it can load the frozen job from
the pipeline state.

Change-Id: Ie1b7ea0a2bc5dfc2d44bcafbc9eb8c227bbe7de2
This commit is contained in:
Simon Westphahl 2021-11-18 10:31:51 +01:00 committed by James E. Blair
parent d5efa36684
commit 21f5bd9f11
9 changed files with 231 additions and 237 deletions

View File

@ -2935,18 +2935,22 @@ class FakeBuild(object):
self.jobdir = None
self.uuid = build_request.uuid
self.parameters = params
self.job = model.FrozenJob.fromZK(executor_server.zk_context,
params["job_ref"])
self.parameters["zuul"].update(
zuul.executor.server.zuul_params_from_job(self.job))
# TODOv3(jeblair): self.node is really "the label of the node
# assigned". We should rename it (self.node_label?) if we
# keep using it like this, or we may end up exposing more of
# the complexity around multi-node jobs here
# (self.nodes[0].label?)
self.node = None
if len(self.parameters['nodeset']['nodes']) == 1:
self.node = self.parameters['nodeset']['nodes'][0]['label']
if len(self.job.nodeset.nodes) == 1:
self.node = next(iter(self.job.nodeset.nodes.values())).label
self.unique = self.parameters['zuul']['build']
self.pipeline = self.parameters['zuul']['pipeline']
self.project = self.parameters['zuul']['project']['name']
self.name = self.parameters['job']
self.name = self.job.name
self.wait_condition = threading.Condition()
self.waiting = False
self.paused = False
@ -3114,7 +3118,7 @@ class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
return
self.executor_server.build_history.append(
BuildHistory(name=build.name, result=result, changes=build.changes,
node=build.node, uuid=build.unique,
node=build.node, uuid=build.unique, job=build.job,
ref=build.parameters['zuul']['ref'],
newrev=build.parameters['zuul'].get('newrev'),
parameters=build.parameters, jobdir=build.jobdir,
@ -3316,7 +3320,7 @@ class TestingExecutorApi(HoldableExecutorApi):
return self._test_build_request_job_map[build_request.uuid]
d = self.getParams(build_request)
if d:
data = d.get('job', '')
data = d.get('job_ref', '').split('/')[-1]
else:
data = ''
self._test_build_request_job_map[build_request.uuid] = data
@ -3627,6 +3631,7 @@ class FakeNodepool(object):
self.resources = None
self.python_path = 'auto'
self.shell_type = None
self.connection_port = None
def stop(self):
self._running = False
@ -3711,7 +3716,10 @@ class FakeNodepool(object):
remote_ip = os.environ.get('ZUUL_REMOTE_IPV4', '127.0.0.1')
if self.remote_ansible and not self.host_keys:
self.host_keys = self.keyscan(remote_ip)
host_keys = self.host_keys or ["fake-key1", "fake-key2"]
if self.host_keys is None:
host_keys = ["fake-key1", "fake-key2"]
else:
host_keys = self.host_keys
data = dict(type=node_type,
cloud='test-cloud',
provider='test-provider',
@ -3745,6 +3753,8 @@ class FakeNodepool(object):
data['connection_type'] = 'winrm'
if 'network' in node_type:
data['connection_type'] = 'network_cli'
if self.connection_port:
data['connection_port'] = self.connection_port
if 'kubernetes-namespace' in node_type or 'fedora-pod' in node_type:
data['connection_type'] = 'namespace'
data['connection_port'] = {

View File

@ -1036,7 +1036,8 @@ class TestGerritCircularDependencies(ZuulTestCase):
vars_builds = [b for b in self.builds if b.name == "project-vars-job"]
self.assertEqual(len(vars_builds), 1)
self.assertEqual(vars_builds[0].parameters["vars"]["test_var"], "pass")
self.assertEqual(vars_builds[0].job.combined_variables["test_var"],
"pass")
self.executor_server.release()
self.waitUntilSettled()
@ -1050,7 +1051,8 @@ class TestGerritCircularDependencies(ZuulTestCase):
vars_builds = [b for b in self.builds if b.name == "project-vars-job"]
self.assertEqual(len(vars_builds), 1)
self.assertEqual(vars_builds[0].parameters["vars"]["test_var"], "pass")
self.assertEqual(vars_builds[0].job.combined_variables["test_var"],
"pass")
self.executor_server.release()
self.waitUntilSettled()
@ -1064,7 +1066,8 @@ class TestGerritCircularDependencies(ZuulTestCase):
vars_builds = [b for b in self.builds if b.name == "project-vars-job"]
self.assertEqual(len(vars_builds), 1)
self.assertEqual(vars_builds[0].parameters["vars"]["test_var"], "pass")
self.assertEqual(vars_builds[0].job.combined_variables["test_var"],
"pass")
self.executor_server.release()
self.waitUntilSettled()
@ -1085,7 +1088,8 @@ class TestGerritCircularDependencies(ZuulTestCase):
vars_builds = [b for b in self.builds if b.name == "project-vars-job"]
self.assertEqual(len(vars_builds), 3)
for build in vars_builds:
self.assertEqual(build.parameters["vars"]["test_var"], "pass")
self.assertEqual(build.job.combined_variables["test_var"],
"pass")
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()

View File

@ -34,9 +34,8 @@ from tests.base import (
from zuul.executor.sensors.startingbuilds import StartingBuildsSensor
from zuul.executor.sensors.ram import RAMSensor
from zuul.executor.server import AnsibleJob, JobDir, squash_variables
from zuul.lib.ansible import AnsibleManager
from zuul.model import BuildRequest, NodeSet, Group
from zuul.executor.server import squash_variables
from zuul.model import NodeSet, Group
class TestExecutorRepos(ZuulTestCase):
@ -432,119 +431,80 @@ class TestExecutorRepos(ZuulTestCase):
class TestAnsibleJob(ZuulTestCase):
tenant_config_file = 'config/ansible/main.yaml'
tenant_config_file = 'config/single-tenant/main.yaml'
def setUp(self):
super(TestAnsibleJob, self).setUp()
ansible_version = AnsibleManager().default_version
params = {
"ansible_version": ansible_version,
"zuul_event_id": 0,
"nodeset": {
"name": "dummy-node",
"node_request_id": 0,
"nodes": [],
"groups": [],
},
}
build_request = BuildRequest(
"test",
state=None,
precedence=200,
zone=None,
job_name=None,
build_set_uuid=None,
tenant_name=None,
pipeline_name=None,
event_id='1',
)
def run_job(self):
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.test_job = AnsibleJob(self.executor_server, build_request, params)
self.test_job.jobdir = JobDir(self.executor_server.jobdir_root,
self.executor_server.keep_jobdir,
str(build_request.uuid))
return list(self.executor_server.job_workers.values())[0]
def test_prepareNodes_host_keys(self):
# Test without connection_port set
node = {'name': 'fake-host',
'label': 'fake-label',
'state': 'ready',
'cloud': 'fake',
'host_keys': ['fake-host-key'],
'interface_ip': 'localhost'}
nodeset = {
"name": "dummy-node",
"node_request_id": 0,
"nodes": [node],
"groups": [],
}
self.test_job.nodeset = NodeSet.fromDict(nodeset)
self.test_job.prepareNodes({'host_vars': {},
'vars': {},
'groups': [],
})
keys = self.test_job.host_list[0]['host_keys']
self.assertEqual(keys[0], 'localhost fake-host-key')
def test_host_keys(self):
self.fake_nodepool.host_keys = ['fake-host-key']
job = self.run_job()
keys = job.host_list[0]['host_keys']
self.assertEqual(keys[0], '127.0.0.1 fake-host-key')
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
def test_host_keys_connection_port(self):
# Test with custom connection_port set
node['connection_port'] = 22022
self.test_job.nodeset = NodeSet.fromDict(nodeset)
self.test_job.prepareNodes({'host_vars': {},
'vars': {},
'groups': [],
})
keys = self.test_job.host_list[0]['host_keys']
self.assertEqual(keys[0], '[localhost]:22022 fake-host-key')
self.fake_nodepool.host_keys = ['fake-host-key']
self.fake_nodepool.connection_port = 22022
job = self.run_job()
keys = job.host_list[0]['host_keys']
self.assertEqual(keys[0], '[127.0.0.1]:22022 fake-host-key')
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
def test_no_host_keys_connection_port(self):
# Test with no host keys
node['host_keys'] = []
self.test_job.nodeset = NodeSet.fromDict(nodeset)
self.test_job.prepareNodes({'nodes': [node],
'host_vars': {},
'vars': {},
'groups': [],
})
host = self.test_job.host_list[0]
self.assertEqual(host['host_keys'], [])
self.fake_nodepool.host_keys = []
self.fake_nodepool.connection_port = 22022
job = self.run_job()
keys = job.host_list[0]['host_keys']
self.assertEqual(keys, [])
self.assertEqual(
host['host_vars']['ansible_ssh_common_args'],
job.host_list[0]['host_vars']['ansible_ssh_common_args'],
'-o StrictHostKeyChecking=false')
def test_prepareNodes_shell_type(self):
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
def test_no_shell_type(self):
# Test without shell type set
node = {'name': 'fake-host',
'label': 'fake-label',
'state': 'ready',
'cloud': 'fake',
'host_keys': ['fake-host-key'],
'interface_ip': 'localhost'}
nodeset = {
"name": "dummy-node",
"node_request_id": 0,
"nodes": [node],
"groups": [],
}
self.test_job.nodeset = NodeSet.fromDict(nodeset)
self.test_job.prepareNodes({'host_vars': {},
'vars': {},
'groups': [],
})
host = self.test_job.host_list[0]
job = self.run_job()
host = job.host_list[0]
self.assertNotIn('ansible_shell_type', host['host_vars'])
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
def test_custom_shell_type(self):
# Test with custom shell type set.
node['shell_type'] = 'cmd'
self.test_job.nodeset = NodeSet.fromDict(nodeset)
self.test_job.prepareNodes({'host_vars': {},
'vars': {},
'groups': [],
})
host = self.test_job.host_list[0]
self.fake_nodepool.shell_type = 'cmd'
job = self.run_job()
host = job.host_list[0]
self.assertIn('ansible_shell_type', host['host_vars'])
self.assertEqual(
host['host_vars']['ansible_shell_type'],
'cmd')
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
class TestExecutorHostname(ZuulTestCase):
config_file = 'zuul-executor-hostname.conf'

View File

@ -946,6 +946,9 @@ class TestScheduler(ZuulTestCase):
def test_failed_change_at_head_with_queue(self):
"Test that if a change at the head fails, queued jobs are canceled"
def get_name(params):
return params.get('job_ref', '').split('/')[-1]
self.hold_jobs_in_queue = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
@ -966,7 +969,7 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(len(queue), 1)
self.assertEqual(queue[0].zone, None)
params = self.executor_server.executor_api.getParams(queue[0])
self.assertEqual(params['job'], 'project-merge')
self.assertEqual(get_name(params), 'project-merge')
self.assertEqual(params['items'][0]['number'], '%d' % A.number)
self.executor_api.release('.*-merge')
@ -982,12 +985,12 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(len(self.builds), 0)
self.assertEqual(len(queue), 6)
self.assertEqual(params[0]['job'], 'project-test1')
self.assertEqual(params[1]['job'], 'project-test2')
self.assertEqual(params[2]['job'], 'project-test1')
self.assertEqual(params[3]['job'], 'project-test2')
self.assertEqual(params[4]['job'], 'project-test1')
self.assertEqual(params[5]['job'], 'project-test2')
self.assertEqual(get_name(params[0]), 'project-test1')
self.assertEqual(get_name(params[1]), 'project-test2')
self.assertEqual(get_name(params[2]), 'project-test1')
self.assertEqual(get_name(params[3]), 'project-test2')
self.assertEqual(get_name(params[4]), 'project-test1')
self.assertEqual(get_name(params[5]), 'project-test2')
self.executor_api.release(queue[0])
self.waitUntilSettled()
@ -3330,59 +3333,61 @@ class TestScheduler(ZuulTestCase):
], ordered=False)
j = self.getJobFromHistory('parentjob')
rp = set([p['name'] for p in j.parameters['projects']])
self.assertEqual(j.parameters['vars']['project_var'], 'set_in_project')
self.assertEqual(j.parameters['vars']['template_var1'],
'set_in_template1')
self.assertEqual(j.parameters['vars']['template_var2'],
'set_in_template2')
self.assertEqual(j.parameters['vars']['override'], 0)
self.assertEqual(j.parameters['vars']['child1override'], 0)
self.assertEqual(j.parameters['vars']['parent'], 0)
self.assertEqual(j.parameters['vars']['deep']['override'], 0)
self.assertFalse('child1' in j.parameters['vars'])
self.assertFalse('child2' in j.parameters['vars'])
self.assertFalse('child3' in j.parameters['vars'])
job_vars = j.job.combined_variables
self.assertEqual(job_vars['project_var'], 'set_in_project')
self.assertEqual(job_vars['template_var1'], 'set_in_template1')
self.assertEqual(job_vars['template_var2'], 'set_in_template2')
self.assertEqual(job_vars['override'], 0)
self.assertEqual(job_vars['child1override'], 0)
self.assertEqual(job_vars['parent'], 0)
self.assertEqual(job_vars['deep']['override'], 0)
self.assertFalse('child1' in job_vars)
self.assertFalse('child2' in job_vars)
self.assertFalse('child3' in job_vars)
self.assertEqual(rp, set(['org/project', 'org/project0',
'org/project0']))
j = self.getJobFromHistory('child1')
rp = set([p['name'] for p in j.parameters['projects']])
self.assertEqual(j.parameters['vars']['project_var'], 'set_in_project')
self.assertEqual(j.parameters['vars']['override'], 1)
self.assertEqual(j.parameters['vars']['child1override'], 1)
self.assertEqual(j.parameters['vars']['parent'], 0)
self.assertEqual(j.parameters['vars']['child1'], 1)
self.assertEqual(j.parameters['vars']['deep']['override'], 1)
self.assertFalse('child2' in j.parameters['vars'])
self.assertFalse('child3' in j.parameters['vars'])
job_vars = j.job.combined_variables
self.assertEqual(job_vars['project_var'], 'set_in_project')
self.assertEqual(job_vars['override'], 1)
self.assertEqual(job_vars['child1override'], 1)
self.assertEqual(job_vars['parent'], 0)
self.assertEqual(job_vars['child1'], 1)
self.assertEqual(job_vars['deep']['override'], 1)
self.assertFalse('child2' in job_vars)
self.assertFalse('child3' in job_vars)
self.assertEqual(rp, set(['org/project', 'org/project0',
'org/project1']))
j = self.getJobFromHistory('child2')
self.assertEqual(j.parameters['vars']['project_var'], 'set_in_project')
job_vars = j.job.combined_variables
self.assertEqual(job_vars['project_var'], 'set_in_project')
rp = set([p['name'] for p in j.parameters['projects']])
self.assertEqual(j.parameters['vars']['override'], 2)
self.assertEqual(j.parameters['vars']['child1override'], 0)
self.assertEqual(j.parameters['vars']['parent'], 0)
self.assertEqual(j.parameters['vars']['deep']['override'], 2)
self.assertFalse('child1' in j.parameters['vars'])
self.assertEqual(j.parameters['vars']['child2'], 2)
self.assertFalse('child3' in j.parameters['vars'])
self.assertEqual(job_vars['override'], 2)
self.assertEqual(job_vars['child1override'], 0)
self.assertEqual(job_vars['parent'], 0)
self.assertEqual(job_vars['deep']['override'], 2)
self.assertFalse('child1' in job_vars)
self.assertEqual(job_vars['child2'], 2)
self.assertFalse('child3' in job_vars)
self.assertEqual(rp, set(['org/project', 'org/project0',
'org/project2']))
j = self.getJobFromHistory('child3')
self.assertEqual(j.parameters['vars']['project_var'], 'set_in_project')
job_vars = j.job.combined_variables
self.assertEqual(job_vars['project_var'], 'set_in_project')
rp = set([p['name'] for p in j.parameters['projects']])
self.assertEqual(j.parameters['vars']['override'], 3)
self.assertEqual(j.parameters['vars']['child1override'], 0)
self.assertEqual(j.parameters['vars']['parent'], 0)
self.assertEqual(j.parameters['vars']['deep']['override'], 3)
self.assertFalse('child1' in j.parameters['vars'])
self.assertFalse('child2' in j.parameters['vars'])
self.assertEqual(j.parameters['vars']['child3'], 3)
self.assertEqual(job_vars['override'], 3)
self.assertEqual(job_vars['child1override'], 0)
self.assertEqual(job_vars['parent'], 0)
self.assertEqual(job_vars['deep']['override'], 3)
self.assertFalse('child1' in job_vars)
self.assertFalse('child2' in job_vars)
self.assertEqual(job_vars['child3'], 3)
self.assertEqual(rp, set(['org/project', 'org/project0',
'org/project3']))
j = self.getJobFromHistory('override_project_var')
self.assertEqual(j.parameters['vars']['project_var'],
'override_in_job')
job_vars = j.job.combined_variables
self.assertEqual(job_vars['project_var'], 'override_in_job')
@simple_layout('layouts/job-variants.yaml')
def test_job_branch_variants(self):
@ -3413,39 +3418,39 @@ class TestScheduler(ZuulTestCase):
dict(name='python27', result='SUCCESS'),
])
p = self.history[0].parameters
self.assertEqual(p['timeout'], 40)
self.assertEqual(len(p['nodeset']['nodes']), 1)
self.assertEqual(p['nodeset']['nodes'][0]['label'], 'new')
self.assertEqual([x['path'] for x in p['pre_playbooks']],
j = self.history[0].job
self.assertEqual(j.timeout, 40)
self.assertEqual(len(j.nodeset.nodes), 1)
self.assertEqual(next(iter(j.nodeset.nodes.values())).label, 'new')
self.assertEqual([x['path'] for x in j.pre_run],
['base-pre', 'py27-pre'])
self.assertEqual([x['path'] for x in p['post_playbooks']],
self.assertEqual([x['path'] for x in j.post_run],
['py27-post-a', 'py27-post-b', 'base-post'])
self.assertEqual([x['path'] for x in p['playbooks']],
self.assertEqual([x['path'] for x in j.run],
['playbooks/python27.yaml'])
p = self.history[1].parameters
self.assertEqual(p['timeout'], 50)
self.assertEqual(len(p['nodeset']['nodes']), 1)
self.assertEqual(p['nodeset']['nodes'][0]['label'], 'old')
self.assertEqual([x['path'] for x in p['pre_playbooks']],
j = self.history[1].job
self.assertEqual(j.timeout, 50)
self.assertEqual(len(j.nodeset.nodes), 1)
self.assertEqual(next(iter(j.nodeset.nodes.values())).label, 'old')
self.assertEqual([x['path'] for x in j.pre_run],
['base-pre', 'py27-pre', 'py27-diablo-pre'])
self.assertEqual([x['path'] for x in p['post_playbooks']],
self.assertEqual([x['path'] for x in j.post_run],
['py27-diablo-post', 'py27-post-a', 'py27-post-b',
'base-post'])
self.assertEqual([x['path'] for x in p['playbooks']],
self.assertEqual([x['path'] for x in j.run],
['py27-diablo'])
p = self.history[2].parameters
self.assertEqual(p['timeout'], 40)
self.assertEqual(len(p['nodeset']['nodes']), 1)
self.assertEqual(p['nodeset']['nodes'][0]['label'], 'new')
self.assertEqual([x['path'] for x in p['pre_playbooks']],
j = self.history[2].job
self.assertEqual(j.timeout, 40)
self.assertEqual(len(j.nodeset.nodes), 1)
self.assertEqual(next(iter(j.nodeset.nodes.values())).label, 'new')
self.assertEqual([x['path'] for x in j.pre_run],
['base-pre', 'py27-pre', 'py27-essex-pre'])
self.assertEqual([x['path'] for x in p['post_playbooks']],
self.assertEqual([x['path'] for x in j.post_run],
['py27-essex-post', 'py27-post-a', 'py27-post-b',
'base-post'])
self.assertEqual([x['path'] for x in p['playbooks']],
self.assertEqual([x['path'] for x in j.run],
['playbooks/python27.yaml'])
@simple_layout("layouts/no-run.yaml")

View File

@ -724,7 +724,7 @@ class TestBranchVariants(ZuulTestCase):
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertEqual(len(self.builds[0].parameters['pre_playbooks']), 3)
self.assertEqual(len(self.builds[0].job.pre_run), 3)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()

View File

@ -48,17 +48,11 @@ def construct_build_params(uuid, connections, job, item, pipeline,
pipeline=pipeline.name,
post_review=pipeline.post_review,
job=job.name,
voting=job.voting,
project=project,
tenant=tenant.name,
timeout=job.timeout,
event_id=item.event.zuul_event_id if item.event else None,
jobtags=sorted(job.tags),
_inheritance_path=list(job.inheritance_path))
if job.artifact_data:
zuul_params['artifacts'] = job.artifact_data
if job.override_checkout:
zuul_params['override_checkout'] = job.override_checkout
)
if hasattr(item.change, 'branch'):
zuul_params['branch'] = item.change.branch
if hasattr(item.change, 'tag'):
@ -83,31 +77,18 @@ def construct_build_params(uuid, connections, job, item, pipeline,
getDirectDependentJobs(job.name))
params = dict()
params['job'] = job.name
params['timeout'] = job.timeout
params['post_timeout'] = job.post_timeout
params['job_ref'] = job.getPath()
params['items'] = merger_items
params['projects'] = []
if hasattr(item.change, 'branch'):
params['branch'] = item.change.branch
else:
params['branch'] = None
params['override_branch'] = job.override_branch
params['override_checkout'] = job.override_checkout
merge_rs = item.current_build_set.merge_repo_state
params['merge_repo_state_ref'] = merge_rs and merge_rs.getPath()
extra_rs = item.current_build_set.extra_repo_state
params['extra_repo_state_ref'] = extra_rs and extra_rs.getPath()
params['ansible_version'] = job.ansible_version
params['workspace_scheme'] = job.workspace_scheme
if job.name != 'noop':
params['playbooks'] = job.run
params['pre_playbooks'] = job.pre_run
params['post_playbooks'] = job.post_run
params['cleanup_playbooks'] = job.cleanup_run
params["nodeset"] = job.nodeset.toDict()
params['ssh_keys'] = []
if pipeline.post_review:
if redact_secrets_and_keys:
@ -116,11 +97,6 @@ def construct_build_params(uuid, connections, job, item, pipeline,
params['ssh_keys'].append(dict(
connection_name=item.change.project.connection_name,
project_name=item.change.project.name))
params['vars'] = job.combined_variables
params['extra_vars'] = job.extra_variables
params['host_vars'] = job.host_variables
params['group_vars'] = job.group_variables
params['secret_vars'] = job.secret_parent_data
params['zuul'] = zuul_params
projects = set()
required_projects = set()
@ -189,3 +165,18 @@ def construct_build_params(uuid, connections, job, item, pipeline,
if item.event:
params['zuul_event_id'] = item.event.zuul_event_id
return params
def zuul_params_from_job(job):
zuul_params = {
"job": job.name,
"voting": job.voting,
"timeout": job.timeout,
"jobtags": sorted(job.tags),
"_inheritance_path": list(job.inheritance_path),
}
if job.artifact_data:
zuul_params['artifacts'] = job.artifact_data
if job.override_checkout:
zuul_params['override_checkout'] = job.override_checkout
return zuul_params

View File

@ -54,6 +54,7 @@ from zuul.executor.sensors.hdd import HDDSensor
from zuul.executor.sensors.pause import PauseSensor
from zuul.executor.sensors.startingbuilds import StartingBuildsSensor
from zuul.executor.sensors.ram import RAMSensor
from zuul.executor.common import zuul_params_from_job
from zuul.lib import commandsocket
from zuul.merger.server import BaseMergeServer, RepoLocks
from zuul.model import (
@ -63,8 +64,8 @@ from zuul.model import (
BuildStartedEvent,
BuildStatusEvent,
ExtraRepoState,
FrozenJob,
MergeRepoState,
NodeSet,
)
import zuul.model
from zuul.nodepool import Nodepool
@ -822,10 +823,10 @@ def make_setup_inventory_dict(nodes, hostvars):
return inventory
def is_group_var_set(name, host, nodeset, args):
def is_group_var_set(name, host, nodeset, job):
for group in nodeset.getGroups():
if host in group.nodes:
group_vars = args['group_vars'].get(group.name, {})
group_vars = job.group_variables.get(group.name, {})
if name in group_vars:
return True
return False
@ -889,13 +890,16 @@ class AnsibleJob(object):
def __init__(self, executor_server, build_request, arguments):
logger = logging.getLogger("zuul.AnsibleJob")
self.arguments = arguments
self.job = FrozenJob.fromZK(executor_server.zk_context,
arguments["job_ref"])
self.arguments["zuul"].update(zuul_params_from_job(self.job))
self.zuul_event_id = self.arguments["zuul_event_id"]
# Record ansible version being used for the cleanup phase
self.ansible_version = self.arguments.get('ansible_version')
self.ansible_version = self.job.ansible_version
# TODO(corvus): Remove default setting after 4.3.0; this is to handle
# scheduler/executor version skew.
self.scheme = self.arguments.get('workspace_scheme',
zuul.model.SCHEME_GOLANG)
self.scheme = self.job.workspace_scheme or zuul.model.SCHEME_GOLANG
self.log = get_annotated_logger(
logger, self.zuul_event_id, build=build_request.uuid
)
@ -946,10 +950,10 @@ class AnsibleJob(object):
'executor', 'variables')
plugin_dir = self.executor_server.ansible_manager.getAnsiblePluginDir(
self.arguments.get('ansible_version'))
self.ansible_version)
self.ara_callbacks = \
self.executor_server.ansible_manager.getAraCallbackPlugin(
self.arguments.get('ansible_version'))
self.ansible_version)
self.library_dir = os.path.join(plugin_dir, 'library')
self.action_dir = os.path.join(plugin_dir, 'action')
self.action_dir_general = os.path.join(plugin_dir, 'actiongeneral')
@ -1100,7 +1104,7 @@ class AnsibleJob(object):
try:
# This shouldn't fail - but theoretically it could. So we handle
# it similar to a NodeRequestError.
self.nodeset = NodeSet.fromDict(self.arguments["nodeset"])
self.nodeset = self.job.nodeset
except KeyError:
self.log.error("Unable to deserialize nodeset")
raise NodeRequestError
@ -1218,8 +1222,8 @@ class AnsibleJob(object):
# ...as well as all playbook and role projects.
repos = []
playbooks = (args['pre_playbooks'] + args['playbooks'] +
args['post_playbooks'] + args['cleanup_playbooks'])
playbooks = (self.job.pre_run + self.job.run +
self.job.post_run + self.job.cleanup_run)
for playbook in playbooks:
repos.append(playbook)
repos += playbook['roles']
@ -1344,8 +1348,8 @@ class AnsibleJob(object):
project['canonical_name'],
ref,
args['branch'],
args['override_branch'],
args['override_checkout'],
self.job.override_branch,
self.job.override_checkout,
project['override_branch'],
project['override_checkout'],
project['default_branch'])
@ -1651,7 +1655,7 @@ class AnsibleJob(object):
# pre-run and run playbooks. post-run is different because
# it is used to copy out job logs and we want to do our best
# to copy logs even when the job has timed out.
job_timeout = args['timeout']
job_timeout = self.job.timeout
for index, playbook in enumerate(self.jobdir.pre_playbooks):
# TODOv3(pabelanger): Implement pre-run timeout setting.
ansible_timeout = self.getAnsibleTimeout(time_started, job_timeout)
@ -1714,7 +1718,7 @@ class AnsibleJob(object):
if self.aborted:
return 'ABORTED'
post_timeout = args['post_timeout']
post_timeout = self.job.post_timeout
post_unreachable = False
for index, playbook in enumerate(self.jobdir.post_playbooks):
# Post timeout operates a little differently to the main job
@ -1804,7 +1808,7 @@ class AnsibleJob(object):
for name in node.name:
ip = node.interface_ip
port = node.connection_port
host_vars = args['host_vars'].get(name, {}).copy()
host_vars = self.job.host_variables.get(name, {}).copy()
check_varnames(host_vars)
host_vars.update(dict(
ansible_host=ip,
@ -1834,8 +1838,8 @@ class AnsibleJob(object):
# var or all-var, then don't do anything here; let the
# user control.
api = 'ansible_python_interpreter'
if (api not in args['vars'] and
not is_group_var_set(api, name, self.nodeset, args)):
if (api not in self.job.combined_variables and
not is_group_var_set(api, name, self.nodeset, self.job)):
python = getattr(node, 'python_path', 'auto')
host_vars.setdefault(api, python)
@ -1947,12 +1951,12 @@ class AnsibleJob(object):
self.writeAnsibleConfig(self.jobdir.setup_playbook)
self.writeAnsibleConfig(self.jobdir.freeze_playbook)
for playbook in args['pre_playbooks']:
for playbook in self.job.pre_run:
jobdir_playbook = self.jobdir.addPrePlaybook()
self.preparePlaybook(jobdir_playbook, playbook, args)
job_playbook = None
for playbook in args['playbooks']:
for playbook in self.job.run:
jobdir_playbook = self.jobdir.addPlaybook()
self.preparePlaybook(jobdir_playbook, playbook, args)
if jobdir_playbook.path is not None:
@ -1962,11 +1966,11 @@ class AnsibleJob(object):
if job_playbook is None:
raise ExecutorError("No playbook specified")
for playbook in args['post_playbooks']:
for playbook in self.job.post_run:
jobdir_playbook = self.jobdir.addPostPlaybook()
self.preparePlaybook(jobdir_playbook, playbook, args)
for playbook in args['cleanup_playbooks']:
for playbook in self.job.cleanup_run:
jobdir_playbook = self.jobdir.addCleanupPlaybook()
self.preparePlaybook(jobdir_playbook, playbook, args)
@ -2006,7 +2010,7 @@ class AnsibleJob(object):
self.prepareRole(jobdir_playbook, role, args)
secrets = self.decryptSecrets(playbook['secrets'])
secrets = self.mergeSecretVars(secrets, args)
secrets = self.mergeSecretVars(secrets)
if secrets:
check_varnames(secrets)
secrets = yaml.mark_strings_unsafe(secrets)
@ -2133,15 +2137,14 @@ class AnsibleJob(object):
project.name)
return path
def mergeSecretVars(self, secrets, args):
def mergeSecretVars(self, secrets):
'''
Merge secret return data with secrets.
:arg secrets dict: Actual Zuul secrets.
:arg args dict: The job arguments.
'''
secret_vars = args.get('secret_vars') or {}
secret_vars = self.job.secret_parent_data or {}
# We need to handle secret vars specially. We want to pass
# them to Ansible as we do secrets, but we want them to have
@ -2150,12 +2153,12 @@ class AnsibleJob(object):
# anything above it in precedence.
other_vars = set()
other_vars.update(args['vars'].keys())
for group_vars in args['group_vars'].values():
other_vars.update(self.job.combined_variables.keys())
for group_vars in self.job.group_variables.values():
other_vars.update(group_vars.keys())
for host_vars in args['host_vars'].values():
for host_vars in self.job.host_variables.values():
other_vars.update(host_vars.keys())
other_vars.update(args['extra_vars'].keys())
other_vars.update(self.job.extra_variables.keys())
other_vars.update(secrets.keys())
ret = secret_vars.copy()
@ -2224,8 +2227,8 @@ class AnsibleJob(object):
project.canonical_name,
None,
args['branch'],
args['override_branch'],
args['override_checkout'],
self.job.override_branch,
self.job.override_checkout,
args_project.get('override_branch'),
args_project.get('override_checkout'),
role['project_default_branch'])
@ -2382,16 +2385,16 @@ class AnsibleJob(object):
return zuul_resources
def prepareVars(self, args, zuul_resources):
all_vars = args['vars'].copy()
all_vars = self.job.combined_variables.copy()
check_varnames(all_vars)
# Check the group and extra var names for safety; they'll get
# merged later
for group in self.nodeset.getGroups():
group_vars = args['group_vars'].get(group.name, {})
group_vars = self.job.group_variables.get(group.name, {})
check_varnames(group_vars)
check_varnames(args['extra_vars'])
check_varnames(self.job.extra_variables)
zuul_vars = {}
# Start with what the client supplied
@ -2422,7 +2425,7 @@ class AnsibleJob(object):
host_list = self.host_list + [localhost]
self.original_hostvars = squash_variables(
host_list, self.nodeset, all_vars,
args['group_vars'], args['extra_vars'])
self.job.group_variables, self.job.extra_variables)
def loadFrozenHostvars(self):
# Read in the frozen hostvars, and remove the frozen variable

View File

@ -3959,7 +3959,8 @@ class QueueItem(zkobject.ZKObject):
return obj
def getPath(self):
return self.itemPath(self.pipeline.state.getPath(), self.uuid)
return self.itemPath(PipelineState.pipelinePath(self.pipeline),
self.uuid)
@classmethod
def itemPath(cls, pipeline_path, item_uuid):

View File

@ -1451,7 +1451,27 @@ class ZuulWebAPI(object):
uuid = "0" * 32
params = zuul.executor.common.construct_build_params(
uuid, self.zuulweb.connections, job, item, item.pipeline)
params['zuul'].update(zuul.executor.common.zuul_params_from_job(job))
del params['job_ref']
params['job'] = job.name
params['zuul']['buildset'] = None
params['timeout'] = job.timeout
params['post_timeout'] = job.post_timeout
params['override_branch'] = job.override_branch
params['override_checkout'] = job.override_checkout
params['ansible_version'] = job.ansible_version
params['workspace_scheme'] = job.workspace_scheme
if job.name != 'noop':
params['playbooks'] = job.run
params['pre_playbooks'] = job.pre_run
params['post_playbooks'] = job.post_run
params['cleanup_playbooks'] = job.cleanup_run
params["nodeset"] = job.nodeset.toDict()
params['vars'] = job.combined_variables
params['extra_vars'] = job.extra_variables
params['host_vars'] = job.host_variables
params['group_vars'] = job.group_variables
params['secret_vars'] = job.secret_parent_data
ret = params
resp = cherrypy.response