Pass result data back from ansible

This loads a json file (work/results.json) that the job can write
to.  It will be loaded by the executor after the job completes and
returned to the scheduler.

We can use the data in this file as the reported log URL for the
build.  Later we can use it to supply file/line comments in
reviews.

Change-Id: Ib4eb743405f337c5bd541dd147e687fd44699713
This commit is contained in:
James E. Blair 2017-06-30 15:42:29 -07:00
parent adafa6c12d
commit 196f61a552
11 changed files with 182 additions and 10 deletions

View File

@ -101,3 +101,28 @@ untrusted job content.
.. TODO: describe standard lib and link to published docs for it.
Return Values
-------------
The job may return some values to Zuul to affect its behavior. To
return a value, use the *zuul_return* Ansible module in a job
playbook. For example::
tasks:
- zuul_return:
data:
foo: bar
Will return the dictionary "{'foo': 'bar'}" to Zuul.
.. TODO: xref to section describing formatting
Several uses of these values are planned, but the only currently
implemented use is to set the log URL for a build. To do so, set the
**zuul.log_url** value. For example::
tasks:
- zuul_return:
data:
zuul:
log_url: http://logs.example.com/path/to/build/logs

View File

@ -0,0 +1,6 @@
- hosts: localhost
tasks:
- zuul_return:
data:
zuul:
log_url: test/log/url

View File

@ -0,0 +1,22 @@
- pipeline:
name: check
manager: independent
allow-secrets: true
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
- job:
name: data-return
- project:
name: org/project
check:
jobs:
- data-return

View File

@ -0,0 +1 @@
test

View File

@ -0,0 +1,8 @@
- tenant:
name: tenant-one
source:
gerrit:
config-projects:
- common-config
untrusted-projects:
- org/project

View File

@ -700,3 +700,19 @@ class TestShadow(ZuulTestCase):
dict(name='test1', result='SUCCESS', changes='1,1'),
dict(name='test2', result='SUCCESS', changes='1,1'),
], ordered=False)
class TestDataReturn(AnsibleZuulTestCase):
tenant_config_file = 'config/data-return/main.yaml'
def test_data_return(self):
# This exercises a proposed change to a role being checked out
# and used.
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='data-return', result='SUCCESS', changes='1,1'),
])
self.assertIn('- data-return test/log/url',
A.messages[-1])

View File

@ -0,0 +1,72 @@
#!/usr/bin/python
# Copyright (c) 2017 Red Hat
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import json
import tempfile
def set_value(path, new_data, new_file):
workdir = os.path.dirname(path)
data = None
if os.path.exists(path):
with open(path, 'r') as f:
data = f.read()
if data:
data = json.loads(data)
else:
data = {}
if new_file:
with open(new_file, 'r') as f:
data.update(json.load(f))
if new_data:
data.update(new_data)
(f, tmp_path) = tempfile.mkstemp(dir=workdir)
try:
f = os.fdopen(f, 'w')
json.dump(data, f)
f.close()
os.rename(tmp_path, path)
except Exception:
os.unlink(tmp_path)
raise
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(required=False, type='str'),
data=dict(required=False, type='dict'),
file=dict(required=False, type='str'),
)
)
p = module.params
path = p['path']
if not path:
path = os.path.join(os.environ['ZUUL_JOBDIR'], 'work',
'results.json')
set_value(path, p['data'], p['file'])
module.exit_json(changed=True, e=os.environ)
from ansible.module_utils.basic import * # noqa
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()

View File

@ -298,7 +298,7 @@ class ExecutorClient(object):
build.parameters = params
if job.name == 'noop':
self.sched.onBuildCompleted(build, 'SUCCESS')
self.sched.onBuildCompleted(build, 'SUCCESS', {})
return build
gearman_job = gear.TextJob('executor:execute', json.dumps(params),
@ -386,9 +386,10 @@ class ExecutorClient(object):
result = 'RETRY_LIMIT'
else:
build.retry = True
result_data = data.get('data', {})
self.log.info("Build %s complete, result %s" %
(job, result))
self.sched.onBuildCompleted(build, result)
self.sched.onBuildCompleted(build, result, result_data)
# The test suite expects the build to be removed from the
# internal dict after it's added to the report queue.
del self.builds[job.unique]

View File

@ -187,6 +187,9 @@ class JobDir(object):
os.makedirs(self.ansible_root)
ssh_dir = os.path.join(self.work_root, '.ssh')
os.mkdir(ssh_dir, 0o700)
self.result_data_file = os.path.join(self.work_root, 'results.json')
with open(self.result_data_file, 'w'):
pass
self.known_hosts = os.path.join(ssh_dir, 'known_hosts')
self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
self.playbooks = [] # The list of candidate playbooks
@ -835,12 +838,22 @@ class AnsibleJob(object):
self.job.sendWorkStatus(0, 100)
result = self.runPlaybooks(args)
data = self.getResultData()
result_data = json.dumps(dict(result=result,
data=data))
self.log.debug("Sending result: %s" % (result_data,))
self.job.sendWorkComplete(result_data)
if result is None:
self.job.sendWorkFail()
return
result = dict(result=result)
self.job.sendWorkComplete(json.dumps(result))
def getResultData(self):
data = {}
try:
with open(self.jobdir.result_data_file) as f:
file_data = f.read()
if file_data:
data = json.loads(file_data)
except Exception:
self.log.exception("Unable to load result data:")
return data
def doMergeChanges(self, merger, items, repo_state):
ret = merger.mergeChanges(items, repo_state=repo_state)
@ -1185,7 +1198,8 @@ class AnsibleJob(object):
all_vars['zuul']['executor'] = dict(
hostname=self.executor_server.hostname,
src_root=self.jobdir.src_root,
log_root=self.jobdir.log_root)
log_root=self.jobdir.log_root,
result_data_file=self.jobdir.result_data_file)
nodes = self.getHostList(args)
inventory = make_inventory_dict(nodes, args['groups'], all_vars)
@ -1277,6 +1291,7 @@ class AnsibleJob(object):
env_copy.update(self.ssh_agent.env)
env_copy['LOGNAME'] = 'zuul'
env_copy['ZUUL_JOB_OUTPUT_FILE'] = self.jobdir.job_output_file
env_copy['ZUUL_JOBDIR'] = self.jobdir.root
pythonpath = env_copy.get('PYTHONPATH')
if pythonpath:
pythonpath = [pythonpath]

View File

@ -1077,6 +1077,7 @@ class Build(object):
self.uuid = uuid
self.url = None
self.result = None
self.result_data = {}
self.build_set = None
self.execute_time = time.time()
self.start_time = None
@ -1095,7 +1096,9 @@ class Build(object):
(self.uuid, self.job.name, self.worker))
def getSafeAttributes(self):
return Attributes(uuid=self.uuid)
return Attributes(uuid=self.uuid,
result=self.result,
result_data=self.result_data)
class Worker(object):
@ -1626,6 +1629,8 @@ class QueueItem(object):
url = None
if pattern:
url = self.formatUrlPattern(pattern, job, build)
if not url:
url = build.result_data.get('zuul', {}).get('log_url')
if not url:
url = build.url or job.name
return (result, url)

View File

@ -273,10 +273,11 @@ class Scheduler(threading.Thread):
self.wake_event.set()
self.log.debug("Done adding start event for build: %s" % build)
def onBuildCompleted(self, build, result):
def onBuildCompleted(self, build, result, result_data):
self.log.debug("Adding complete event for build: %s result: %s" % (
build, result))
build.end_time = time.time()
build.result_data = result_data
# Note, as soon as the result is set, other threads may act
# upon this, even though the event hasn't been fully
# processed. Ensure that any other data from the event (eg,