Run pre and post playbooks

This allows jobs to specify pre and post playbooks.  Jobs which inherit
from parents or variants add their pre and post playbooks to their
parents in onion fashion -- the outermost pre playbooks run first and post
playbooks run last.

Change-Id: Ic844dcac77d87481534745a220664d72be2ffa7c
This commit is contained in:
James E. Blair 2017-01-31 14:47:52 -08:00
parent 1774dd5178
commit 66b274ea15
9 changed files with 217 additions and 98 deletions

View File

@ -0,0 +1,5 @@
- hosts: all
tasks:
- file:
path: "{{zuul._test.test_root}}/{{zuul.uuid}}.post.flag"
state: touch

View File

@ -0,0 +1,5 @@
- hosts: all
tasks:
- file:
path: "{{zuul._test.test_root}}/{{zuul.uuid}}.pre.flag"
state: touch

View File

@ -38,3 +38,5 @@
- job:
name: python27
pre-run: pre
post-run: post

View File

@ -94,6 +94,9 @@ class TestJob(BaseTestCase):
layout.addJob(python27diablo)
project_config = configloader.ProjectParser.fromYaml(layout, {
'_source_project': project,
'_source_branch': 'master',
'_source_configrepo': True,
'name': 'project',
'gate': {
'jobs': [
@ -247,6 +250,9 @@ class TestJob(BaseTestCase):
layout.addJob(python27diablo)
project_config = configloader.ProjectParser.fromYaml(layout, {
'_source_project': project,
'_source_branch': 'master',
'_source_configrepo': True,
'name': 'project',
'gate': {
'jobs': [
@ -309,6 +315,9 @@ class TestJob(BaseTestCase):
layout.addJob(python27)
project_config = configloader.ProjectParser.fromYaml(layout, {
'_source_project': project,
'_source_branch': 'master',
'_source_configrepo': True,
'name': 'project',
'gate': {
'jobs': [

View File

@ -129,3 +129,9 @@ class TestAnsible(AnsibleZuulTestCase):
self.assertEqual(build.result, 'SUCCESS')
flag_path = os.path.join(self.test_root, build.uuid + '.flag')
self.assertTrue(os.path.exists(flag_path))
pre_flag_path = os.path.join(self.test_root, build.uuid +
'.pre.flag')
self.assertTrue(os.path.exists(pre_flag_path))
post_flag_path = os.path.join(self.test_root, build.uuid +
'.post.flag')
self.assertTrue(os.path.exists(post_flag_path))

View File

@ -101,6 +101,8 @@ class JobParser(object):
'nodes': vs.Any([node], str),
'timeout': int,
'attempts': int,
'pre-run': to_list(str),
'post-run': to_list(str),
'_source_project': model.Project,
'_source_branch': vs.Any(str, None),
'_source_configrepo': bool,
@ -111,6 +113,7 @@ class JobParser(object):
@staticmethod
def fromYaml(layout, conf):
JobParser.getSchema()(conf)
job = model.Job(conf['name'])
if 'auth' in conf:
job.auth = conf.get('auth')
@ -119,8 +122,6 @@ class JobParser(object):
job.inheritFrom(parent)
job.timeout = conf.get('timeout', job.timeout)
job.workspace = conf.get('workspace', job.workspace)
job.pre_run = as_list(conf.get('pre-run', job.pre_run))
job.post_run = as_list(conf.get('post-run', job.post_run))
job.voting = conf.get('voting', True)
job.hold_following_changes = conf.get('hold-following-changes', False)
job.mutex = conf.get('mutex', None)
@ -144,14 +145,38 @@ class JobParser(object):
# accumulate onto any previously applied tags from
# metajobs.
job.tags = job.tags.union(set(tags))
# The source attributes and playbook may not be overridden --
# they are always supplied by the config loader. They
# correspond to the Project instance of the repo where it
# The source attributes and playbook info may not be
# overridden -- they are always supplied by the config loader.
# They correspond to the Project instance of the repo where it
# originated, and the branch name.
job.source_project = conf.get('_source_project')
job.source_branch = conf.get('_source_branch')
job.source_configrepo = conf.get('_source_configrepo')
job.playbook = os.path.join('playbooks', job.name)
pre_run_name = conf.get('pre-run')
# Append the pre-run command
if pre_run_name:
pre_run_name = os.path.join('playbooks', pre_run_name)
pre_run = model.PlaybookContext(job.source_project,
job.source_branch,
pre_run_name,
job.source_configrepo)
job.pre_run.append(pre_run)
# Prepend the post-run command
post_run_name = conf.get('post-run')
if post_run_name:
post_run_name = os.path.join('playbooks', post_run_name)
post_run = model.PlaybookContext(job.source_project,
job.source_branch,
post_run_name,
job.source_configrepo)
job.post_run.insert(0, post_run)
# Set the run command
run_name = job.name
run_name = os.path.join('playbooks', run_name)
run = model.PlaybookContext(job.source_project,
job.source_branch, run_name,
job.source_configrepo)
job.run = run
job.failure_message = conf.get('failure-message', job.failure_message)
job.success_message = conf.get('success-message', job.success_message)
job.failure_url = conf.get('failure-url', job.failure_url)
@ -193,7 +218,11 @@ class ProjectTemplateParser(object):
vs.Required('name'): str,
'merge-mode': vs.Any(
'merge', 'merge-resolve',
'cherry-pick')}
'cherry-pick'),
'_source_project': model.Project,
'_source_branch': vs.Any(str, None),
'_source_configrepo': bool,
}
for p in layout.pipelines.values():
project_template[p.name] = {'queue': str,
@ -204,6 +233,9 @@ class ProjectTemplateParser(object):
def fromYaml(layout, conf):
ProjectTemplateParser.getSchema(layout)(conf)
project_template = model.ProjectConfig(conf['name'])
source_project = conf['_source_project']
source_branch = conf['_source_branch']
source_configrepo = conf['_source_configrepo']
for pipeline in layout.pipelines.values():
conf_pipeline = conf.get(pipeline.name)
if not conf_pipeline:
@ -212,11 +244,13 @@ class ProjectTemplateParser(object):
project_template.pipelines[pipeline.name] = project_pipeline
project_pipeline.queue_name = conf_pipeline.get('queue')
project_pipeline.job_tree = ProjectTemplateParser._parseJobTree(
layout, conf_pipeline.get('jobs', []))
layout, conf_pipeline.get('jobs', []),
source_project, source_branch, source_configrepo)
return project_template
@staticmethod
def _parseJobTree(layout, conf, tree=None):
def _parseJobTree(layout, conf, source_project, source_branch,
source_configrepo, tree=None):
if not tree:
tree = model.JobTree(None)
for conf_job in conf:
@ -230,6 +264,9 @@ class ProjectTemplateParser(object):
if attrs:
# We are overriding params, so make a new job def
attrs['name'] = jobname
attrs['_source_project'] = source_project
attrs['_source_branch'] = source_branch
attrs['_source_configrepo'] = source_configrepo
subtree = tree.addJob(JobParser.fromYaml(layout, attrs))
else:
# Not overriding, so get existing job
@ -237,7 +274,11 @@ class ProjectTemplateParser(object):
if jobs:
# This is the root of a sub tree
ProjectTemplateParser._parseJobTree(layout, jobs, subtree)
ProjectTemplateParser._parseJobTree(layout, jobs,
source_project,
source_branch,
source_configrepo,
subtree)
else:
raise Exception("Job must be a string or dictionary")
return tree
@ -248,10 +289,16 @@ class ProjectParser(object):
@staticmethod
def getSchema(layout):
project = {vs.Required('name'): str,
'templates': [str],
'merge-mode': vs.Any('merge', 'merge-resolve',
'cherry-pick')}
project = {
vs.Required('name'): str,
'templates': [str],
'merge-mode': vs.Any('merge', 'merge-resolve',
'cherry-pick'),
'_source_project': model.Project,
'_source_branch': vs.Any(str, None),
'_source_configrepo': bool,
}
for p in layout.pipelines.values():
project[p.name] = {'queue': str,
'jobs': [vs.Any(str, dict)]}

View File

@ -373,15 +373,11 @@ class LaunchClient(object):
params['items'] = merger_items
params['projects'] = []
config_repos = set([x[1] for x in
item.pipeline.layout.tenant.config_repos])
if job.name != 'noop':
params['playbook'] = dict(
connection=job.source_project.connection_name,
config_repo=job.source_project in config_repos,
project=job.source_project.name,
branch=job.source_branch,
path=job.playbook)
params['playbook'] = job.run.toDict()
params['pre_playbooks'] = [x.toDict() for x in job.pre_run]
params['post_playbooks'] = [x.toDict() for x in job.post_run]
nodes = []
for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
nodes.append(dict(name=node.name, image=node.image))

View File

@ -66,6 +66,13 @@ class Watchdog(object):
# repos end up in git.openstack.org.
class JobDirPlaybook(object):
def __init__(self, root):
self.root = root
self.secure = None
self.path = None
class JobDir(object):
def __init__(self, keep=False):
self.keep = keep
@ -77,20 +84,30 @@ class JobDir(object):
self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
self.inventory = os.path.join(self.ansible_root, 'inventory')
self.vars = os.path.join(self.ansible_root, 'vars.yaml')
self.playbook = None
self.playbook_root = os.path.join(self.ansible_root, 'playbook')
os.makedirs(self.playbook_root)
self.pre_playbook = None
self.pre_playbook_root = os.path.join(self.ansible_root,
'pre_playbook')
os.makedirs(self.pre_playbook_root)
self.post_playbook = None
self.post_playbook_root = os.path.join(self.ansible_root,
'post_playbook')
os.makedirs(self.post_playbook_root)
self.playbook = JobDirPlaybook(self.playbook_root)
self.pre_playbooks = []
self.post_playbooks = []
self.config = os.path.join(self.ansible_root, 'ansible.cfg')
self.ansible_log = os.path.join(self.ansible_root, 'ansible_log.txt')
def addPrePlaybook(self):
count = len(self.pre_playbooks)
root = os.path.join(self.ansible_root, 'pre_playbook_%i' % (count,))
os.makedirs(root)
playbook = JobDirPlaybook(root)
self.pre_playbooks.append(playbook)
return playbook
def addPostPlaybook(self):
count = len(self.post_playbooks)
root = os.path.join(self.ansible_root, 'post_playbook_%i' % (count,))
os.makedirs(root)
playbook = JobDirPlaybook(root)
self.post_playbooks.append(playbook)
return playbook
def cleanup(self):
if not self.keep:
shutil.rmtree(self.root)
@ -463,7 +480,7 @@ class AnsibleJob(object):
commit = args['items'][-1]['newrev'] # noqa
# is the playbook in a repo that we have already prepared?
self.jobdir.playbook = self.preparePlaybookRepo(args)
self.preparePlaybookRepos(args)
# TODOv3: Ansible the ansible thing here.
self.prepareAnsibleFiles(args)
@ -499,13 +516,14 @@ class AnsibleJob(object):
def runPlaybooks(self):
result = None
pre_status, pre_code = self.runAnsiblePrePlaybook()
if pre_status != self.RESULT_NORMAL or pre_code != 0:
# These should really never fail, so return None and have
# zuul try again
return result
for playbook in self.jobdir.pre_playbooks:
pre_status, pre_code = self.runAnsiblePlaybook(playbook)
if pre_status != self.RESULT_NORMAL or pre_code != 0:
# These should really never fail, so return None and have
# zuul try again
return result
job_status, job_code = self.runAnsiblePlaybook()
job_status, job_code = self.runAnsiblePlaybook(self.jobdir.playbook)
if job_status == self.RESULT_TIMED_OUT:
return 'TIMED_OUT'
if job_status == self.RESULT_ABORTED:
@ -515,14 +533,17 @@ class AnsibleJob(object):
# run it again.
return result
post_status, post_code = self.runAnsiblePostPlaybook(
job_code == 0)
if post_status != self.RESULT_NORMAL or post_code != 0:
result = 'POST_FAILURE'
elif job_code == 0:
success = (job_code == 0)
if success:
result = 'SUCCESS'
else:
result = 'FAILURE'
for playbook in self.jobdir.post_playbooks:
post_status, post_code = self.runAnsiblePlaybook(
playbook, success)
if post_status != self.RESULT_NORMAL or post_code != 0:
result = 'POST_FAILURE'
return result
def getHostList(self, args):
@ -542,16 +563,28 @@ class AnsibleJob(object):
return fn
raise Exception("Unable to find playbook %s" % path)
def preparePlaybookRepo(self, args):
# Check out the playbook repo if needed and return the path to
def preparePlaybookRepos(self, args):
for playbook in args['pre_playbooks']:
jobdir_playbook = self.jobdir.addPrePlaybook()
self.preparePlaybookRepo(jobdir_playbook, playbook, args)
jobdir_playbook = self.jobdir.playbook
self.preparePlaybookRepo(jobdir_playbook, args['playbook'], args)
for playbook in args['post_playbooks']:
jobdir_playbook = self.jobdir.addPostPlaybook()
self.preparePlaybookRepo(jobdir_playbook, playbook, args)
def preparePlaybookRepo(self, jobdir_playbook, playbook, args):
# Check out the playbook repo if needed and set the path to
# the playbook that should be run.
playbook = args['playbook']
jobdir_playbook.secure = playbook['secure']
source = self.launcher_server.connections.getSource(
playbook['connection'])
project = source.getProject(playbook['project'])
# TODO(jeblair): construct the url in the merger itself
url = source.getGitUrl(project)
if not playbook['config_repo']:
if not playbook['secure']:
# This is a project repo, so it is safe to use the already
# checked out version (from speculative merging) of the
# playbook
@ -562,18 +595,19 @@ class AnsibleJob(object):
path = os.path.join(self.jobdir.git_root,
project.name,
playbook['path'])
return self.findPlaybook(path)
jobdir_playbook.path = self.findPlaybook(path)
return
# The playbook repo is either a config repo, or it isn't in
# the stack of changes we are testing, so check out the branch
# tip into a dedicated space.
merger = self.launcher_server._getMerger(self.jobdir.playbook_root)
merger = self.launcher_server._getMerger(jobdir_playbook.root)
merger.checkoutBranch(project.name, url, playbook['branch'])
path = os.path.join(self.jobdir.playbook_root,
path = os.path.join(jobdir_playbook.root,
project.name,
playbook['path'])
return self.findPlaybook(path)
jobdir_playbook.path = self.findPlaybook(path)
def prepareAnsibleFiles(self, args):
with open(self.jobdir.inventory, 'w') as inventory:
@ -682,10 +716,7 @@ class AnsibleJob(object):
return (self.RESULT_NORMAL, ret)
def runAnsiblePrePlaybook(self):
# TODOv3(jeblair): remove return statement
return (self.RESULT_NORMAL, 0)
def runAnsiblePlaybook(self, playbook, success=None):
env_copy = os.environ.copy()
env_copy['LOGNAME'] = 'zuul'
@ -694,44 +725,13 @@ class AnsibleJob(object):
else:
verbose = '-v'
cmd = ['ansible-playbook', self.jobdir.pre_playbook,
'-e@%s' % self.jobdir.vars, verbose]
# TODOv3: get this from the job
timeout = 60
return self.runAnsible(cmd, timeout)
def runAnsiblePlaybook(self):
env_copy = os.environ.copy()
env_copy['LOGNAME'] = 'zuul'
if False: # TODOv3: self.options['verbose']:
verbose = '-vvv'
else:
verbose = '-v'
cmd = ['ansible-playbook', self.jobdir.playbook,
'-e@%s' % self.jobdir.vars, verbose]
# TODOv3: get this from the job
timeout = 60
return self.runAnsible(cmd, timeout)
def runAnsiblePostPlaybook(self, success):
# TODOv3(jeblair): remove return statement
return (self.RESULT_NORMAL, 0)
env_copy = os.environ.copy()
env_copy['LOGNAME'] = 'zuul'
if False: # TODOv3: self.options['verbose']:
verbose = '-vvv'
else:
verbose = '-v'
cmd = ['ansible-playbook', self.jobdir.post_playbook,
'-e', 'success=%s' % success,
'-e@%s' % self.jobdir.vars, verbose]
cmd = ['ansible-playbook', playbook.path]
if success is not None:
cmd.extend(['-e', 'success=%s' % str(bool(success))])
cmd.extend(['-e@%s' % self.jobdir.vars, verbose])
# TODOv3: get this from the job
timeout = 60

View File

@ -515,7 +515,48 @@ class NodeRequest(object):
self.state_time = data['state_time']
class PlaybookContext(object):
"""A reference to a playbook in the context of a project.
Jobs refer to objects of this class for their main, pre, and post
playbooks so that we can keep track of which repos and security
contexts are needed in order to run them."""
def __init__(self, project, branch, path, secure):
self.project = project
self.branch = branch
self.path = path
self.secure = secure
def __repr__(self):
return '<PlaybookContext %s:%s %s secure:%s>' % (self.project,
self.branch,
self.path,
self.secure)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
if not isinstance(other, PlaybookContext):
return False
return (self.project == other.project and
self.branch == other.branch and
self.path == other.path and
self.secure == other.secure)
def toDict(self):
# Render to a dict to use in passing json to the launcher
return dict(
connection=self.project.connection_name,
project=self.project.name,
branch=self.branch,
path=self.path,
secure=self.secure)
class Job(object):
"""A Job represents the defintion of actions to perform."""
def __init__(self, name):
@ -527,6 +568,7 @@ class Job(object):
workspace=None,
pre_run=[],
post_run=[],
run=None,
voting=None,
hold_following_changes=None,
failure_message=None,
@ -544,13 +586,15 @@ class Job(object):
source_project=None,
source_branch=None,
source_configrepo=None,
playbook=None,
)
self.name = name
for k, v in self.attributes.items():
setattr(self, k, v)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
# Compare the name and all inheritable attributes to determine
# whether two jobs with the same name are identically
@ -577,11 +621,15 @@ class Job(object):
if not isinstance(other, Job):
raise Exception("Job unable to inherit from %s" % (other,))
for k, v in self.attributes.items():
if getattr(other, k) != v and k != 'auth':
if (getattr(other, k) != v and k not in
set(['auth', 'pre_run', 'post_run'])):
setattr(self, k, getattr(other, k))
# Inherit auth only if explicitly allowed
if other.auth and 'inherit' in other.auth and other.auth['inherit']:
setattr(self, 'auth', getattr(other, 'auth'))
# Pre and post run are lists; make a copy
self.pre_run = other.pre_run + self.pre_run
self.post_run = self.post_run + other.post_run
def changeMatches(self, change):
if self.branch_matcher and not self.branch_matcher.matches(change):
@ -1816,15 +1864,16 @@ class UnparsedTenantConfig(object):
"a single key (when parsing %s)" %
(conf,))
key, value = item.items()[0]
if key == 'project':
self.projects.append(value)
elif key == 'job':
if key in ['project', 'project-template', 'job']:
if source_project is not None:
value['_source_project'] = source_project
if source_branch is not None:
value['_source_branch'] = source_branch
if source_configrepo is not None:
value['_source_configrepo'] = source_configrepo
if key == 'project':
self.projects.append(value)
elif key == 'job':
self.jobs.append(value)
elif key == 'project-template':
self.project_templates.append(value)