Use playbooks defined in repos

This replaces the stubbed-out 'hello world' Ansible playbook with
an implementation which actually runs the corresponding playbook
defined in the repo where the job is defined.

Change-Id: I73a6b3b067c7d61bb2a2b2140ab98c4944a6adfe
Story: 2000772
changes/50/423550/13
James E. Blair 6 years ago
parent fef033b4cd
commit c73c73a983
  1. 4
      tests/base.py
  2. 6
      tests/fixtures/config/ansible/git/common-config/playbooks/python27.yaml
  3. 40
      tests/fixtures/config/ansible/git/common-config/zuul.yaml
  4. 6
      tests/fixtures/config/ansible/git/org_project/.zuul.yaml
  5. 1
      tests/fixtures/config/ansible/git/org_project/README
  6. 8
      tests/fixtures/config/ansible/main.yaml
  7. 2
      tests/fixtures/config/in-repo/git/org_project/playbooks/project-test1.yaml
  8. 2
      tests/fixtures/config/multi-tenant/git/common-config/playbooks/python27.yaml
  9. 2
      tests/fixtures/config/multi-tenant/git/tenant-one-config/playbooks/project1-test1.yaml
  10. 2
      tests/fixtures/config/multi-tenant/git/tenant-two-config/playbooks/project2-test1.yaml
  11. 2
      tests/fixtures/config/openstack/git/project-config/playbooks/python27.yaml
  12. 2
      tests/fixtures/config/openstack/git/project-config/playbooks/python35.yaml
  13. 12
      tests/unit/test_model.py
  14. 23
      tests/unit/test_v3.py
  15. 25
      zuul/configloader.py
  16. 2
      zuul/driver/gerrit/gerritconnection.py
  17. 10
      zuul/launcher/client.py
  18. 43
      zuul/launcher/server.py
  19. 10
      zuul/merger/merger.py
  20. 13
      zuul/model.py

@ -1752,6 +1752,10 @@ class ZuulTestCase(BaseTestCase):
zuul.merger.merger.reset_repo_to_head(repo)
for fn, content in files.items():
fn = os.path.join(path, fn)
try:
os.makedirs(os.path.dirname(fn))
except OSError:
pass
with open(fn, 'w') as f:
f.write(content)
repo.index.add([fn])

@ -0,0 +1,6 @@
# TODO(jeblair): Perform an action inside of a test chroot
- hosts: all
tasks:
- file:
path: /tmp/playbook.test
state: touch

@ -0,0 +1,40 @@
- pipeline:
name: check
manager: independent
source:
gerrit
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
source:
gerrit
trigger:
gerrit:
- event: comment-added
approval:
- approved: 1
success:
gerrit:
verified: 2
submit: true
failure:
gerrit:
verified: -2
start:
gerrit:
verified: 0
precedence: high
- job:
name: python27

@ -0,0 +1,6 @@
- project:
name: org/project
check:
jobs:
- python27

@ -0,0 +1,8 @@
- tenant:
name: tenant-one
source:
gerrit:
config-repos:
- common-config
project-repos:
- org/project

@ -56,7 +56,7 @@ class TestJob(BaseTestCase):
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
project = model.Project('project')
project = model.Project('project', None)
base = configloader.JobParser.fromYaml(layout, {
'_source_project': project,
@ -122,7 +122,7 @@ class TestJob(BaseTestCase):
def test_job_auth_inheritance(self):
layout = model.Layout()
project = model.Project('project')
project = model.Project('project', None)
base = configloader.JobParser.fromYaml(layout, {
'_source_project': project,
@ -201,7 +201,7 @@ class TestJob(BaseTestCase):
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
project = model.Project('project')
project = model.Project('project', None)
base = configloader.JobParser.fromYaml(layout, {
'_source_project': project,
@ -271,7 +271,7 @@ class TestJob(BaseTestCase):
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
project = model.Project('project')
project = model.Project('project', None)
base = configloader.JobParser.fromYaml(layout, {
'_source_project': project,
@ -312,14 +312,14 @@ class TestJob(BaseTestCase):
def test_job_source_project(self):
layout = model.Layout()
base_project = model.Project('base_project')
base_project = model.Project('base_project', None)
base = configloader.JobParser.fromYaml(layout, {
'_source_project': base_project,
'name': 'base',
})
layout.addJob(base)
other_project = model.Project('other_project')
other_project = model.Project('other_project', None)
base2 = configloader.JobParser.fromYaml(layout, {
'_source_project': other_project,
'name': 'base',

@ -98,8 +98,16 @@ class TestInRepoConfig(AnsibleZuulTestCase):
- project-test2
""")
in_repo_playbook = textwrap.dedent(
"""
- hosts: all
tasks: []
""")
file_dict = {'.zuul.yaml': in_repo_conf,
'playbooks/project-test2.yaml': in_repo_playbook}
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
files={'.zuul.yaml': in_repo_conf})
files=file_dict)
A.addApproval('code-review', 2)
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
self.waitUntilSettled()
@ -110,3 +118,16 @@ class TestInRepoConfig(AnsibleZuulTestCase):
"A should report start and success")
self.assertIn('tenant-one-gate', A.messages[1],
"A should transit tenant-one gate")
class TestAnsible(AnsibleZuulTestCase):
# A temporary class to hold new tests while others are disabled
tenant_config_file = 'config/ansible/main.yaml'
def test_playbook(self):
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertEqual(self.getJobFromHistory('python27').result,
'SUCCESS')

@ -103,6 +103,7 @@ class JobParser(object):
'attempts': int,
'_source_project': model.Project,
'_source_branch': vs.Any(str, None),
'_source_configrepo': bool,
}
return vs.Schema(job)
@ -143,12 +144,16 @@ class JobParser(object):
# accumulate onto any previously applied tags from
# metajobs.
job.tags = job.tags.union(set(tags))
# The source attributes may not be overridden -- they are
# always supplied by the config loader. They correspond to
# the Project instance of the repo where it originated, and
# the branch name.
# The source attributes and playbook may not be overridden --
# they are always supplied by the config loader. They
# correspond to the Project instance of the repo where it
# originated, and the branch name.
job.source_project = conf.get('_source_project')
job.source_branch = conf.get('_source_branch')
job.source_configrepo = conf.get('_source_configrepo')
# TODOv3(jeblair): verify the playbook exists
# TODOv3(jeblair): remove hardcoded extension
job.playbook = os.path.join('playbooks', job.name + '.yaml')
job.failure_message = conf.get('failure-message', job.failure_message)
job.success_message = conf.get('success-message', job.success_message)
job.failure_url = conf.get('failure-url', job.failure_url)
@ -156,7 +161,7 @@ class JobParser(object):
# If the definition for this job came from a project repo,
# implicitly apply a branch matcher for the branch it was on.
if job.source_branch:
if (not job.source_configrepo) and job.source_branch:
branches = [job.source_branch]
elif 'branches' in conf:
branches = as_list(conf['branches'])
@ -546,6 +551,7 @@ class TenantParser(object):
job = merger.getFiles(project.name, url, 'master',
files=['zuul.yaml', '.zuul.yaml'])
job.project = project
job.branch = 'master'
job.config_repo = True
jobs.append(job)
@ -579,7 +585,7 @@ class TenantParser(object):
(job.project, fn))
if job.config_repo:
incdata = TenantParser._parseConfigRepoLayout(
job.files[fn], job.project)
job.files[fn], job.project, job.branch)
config_repos_config.extend(incdata)
else:
incdata = TenantParser._parseProjectRepoLayout(
@ -589,11 +595,10 @@ class TenantParser(object):
return config_repos_config, project_repos_config
@staticmethod
def _parseConfigRepoLayout(data, project):
def _parseConfigRepoLayout(data, project, branch):
# This is the top-level configuration for a tenant.
config = model.UnparsedTenantConfig()
config.extend(yaml.load(data), project)
config.extend(yaml.load(data), project, branch, True)
return config
@staticmethod
@ -601,7 +606,7 @@ class TenantParser(object):
# TODOv3(jeblair): this should implement some rules to protect
# aspects of the config that should not be changed in-repo
config = model.UnparsedTenantConfig()
config.extend(yaml.load(data), project, branch)
config.extend(yaml.load(data), project, branch, False)
return config

@ -255,7 +255,7 @@ class GerritConnection(BaseConnection):
def getProject(self, name):
if name not in self.projects:
self.projects[name] = Project(name)
self.projects[name] = Project(name, self.connection_name)
return self.projects[name]
def maintainCache(self, relevant):

@ -368,6 +368,16 @@ class LaunchClient(object):
params['job'] = job.name
params['items'] = merger_items
params['projects'] = []
config_repos = set([x[1] for x in
item.pipeline.layout.tenant.config_repos])
if job.name != 'noop':
params['playbook'] = dict(
connection=job.source_project.connection_name,
config_repo=job.source_project in config_repos,
project=job.source_project.name,
branch=job.source_branch,
path=job.playbook)
nodes = []
for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
nodes.append(dict(name=node.name, image=node.image))

@ -26,7 +26,6 @@ import time
import traceback
import gear
import yaml
import zuul.merger
import zuul.ansible.library
@ -76,7 +75,9 @@ class JobDir(object):
os.makedirs(self.ansible_root)
self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
self.inventory = os.path.join(self.ansible_root, 'inventory')
self.playbook = os.path.join(self.ansible_root, 'playbook')
self.playbook = None
self.playbook_root = os.path.join(self.ansible_root, 'playbook')
os.makedirs(self.playbook_root)
self.post_playbook = os.path.join(self.ansible_root, 'post_playbook')
self.config = os.path.join(self.ansible_root, 'ansible.cfg')
self.ansible_log = os.path.join(self.ansible_root, 'ansible_log.txt')
@ -358,6 +359,9 @@ class LaunchServer(object):
else:
commit = args['items'][-1]['newrev'] # noqa
# is the playbook in a repo that we have already prepared?
jobdir.playbook = self.preparePlaybookRepo(jobdir, args)
# TODOv3: Ansible the ansible thing here.
self.prepareAnsibleFiles(jobdir, args)
@ -402,6 +406,36 @@ class LaunchServer(object):
hosts.append((node['name'], dict(ansible_connection='local')))
return hosts
def preparePlaybookRepo(self, jobdir, args):
# Check out the playbook repo if needed and return the path to
# the playbook that should be run.
playbook = args['playbook']
source = self.connections.getSource(playbook['connection'])
project = source.getProject(playbook['project'])
# TODO(jeblair): construct the url in the merger itself
url = source.getGitUrl(project)
if not playbook['config_repo']:
# This is a project repo, so it is safe to use the already
# checked out version (from speculative merging) of the
# playbook
for i in args['items']:
if (i['connection_name'] == playbook['connection'] and
i['project'] == playbook['project']):
# We already have this repo prepared
return os.path.join(jobdir.git_root,
project.name,
playbook['path'])
# The playbook repo is either a config repo, or it isn't in
# the stack of changes we are testing, so check out the branch
# tip into a dedicated space.
merger = self._getMerger(jobdir.playbook_root)
merger.checkoutBranch(project.name, url, playbook['branch'])
return os.path.join(jobdir.playbook_root,
project.name,
playbook['path'])
def prepareAnsibleFiles(self, jobdir, args):
with open(jobdir.inventory, 'w') as inventory:
for host_name, host_vars in self.getHostList(args):
@ -410,11 +444,6 @@ class LaunchServer(object):
for k, v in host_vars.items():
inventory.write('%s=%s' % (k, v))
inventory.write('\n')
with open(jobdir.playbook, 'w') as playbook:
play = dict(hosts='localhost',
tasks=[dict(name='test',
shell='echo Hello world')])
playbook.write(yaml.dump([play]))
with open(jobdir.config, 'w') as config:
config.write('[defaults]\n')
config.write('hostfile = %s\n' % jobdir.inventory)

@ -260,6 +260,16 @@ class Merger(object):
except Exception:
self.log.exception("Unable to update %s", project)
def checkoutBranch(self, project, url, branch):
repo = self.getRepo(project, url)
if repo.hasBranch(branch):
self.log.info("Checking out branch %s of %s" % (branch, project))
head = repo.getBranchHead(branch)
repo.checkout(head)
else:
raise Exception("Project %s does not have branch %s" %
(project, branch))
def _mergeChange(self, item, ref):
repo = self.getRepo(item['project'], item['url'])
try:

@ -353,8 +353,9 @@ class Project(object):
# This makes a Project instance a unique identifier for a given
# project from a given source.
def __init__(self, name, foreign=False):
def __init__(self, name, connection_name, foreign=False):
self.name = name
self.connection_name = connection_name
# foreign projects are those referenced in dependencies
# of layout projects, this should matter
# when deciding whether to enqueue their changes
@ -530,11 +531,14 @@ class Job(object):
tags=set(),
mutex=None,
attempts=3,
source_project=None,
source_branch=None,
source_configrepo=None,
playbook=None,
)
def __init__(self, name):
self.name = name
self.project_source = None
for k, v in self.attributes.items():
setattr(self, k, v)
@ -1779,7 +1783,8 @@ class UnparsedTenantConfig(object):
r.nodesets = copy.deepcopy(self.nodesets)
return r
def extend(self, conf, source_project=None, source_branch=None):
def extend(self, conf, source_project=None, source_branch=None,
source_configrepo=None):
if isinstance(conf, UnparsedTenantConfig):
self.pipelines.extend(conf.pipelines)
self.jobs.extend(conf.jobs)
@ -1809,6 +1814,8 @@ class UnparsedTenantConfig(object):
value['_source_project'] = source_project
if source_branch is not None:
value['_source_branch'] = source_branch
if source_configrepo is not None:
value['_source_configrepo'] = source_configrepo
self.jobs.append(value)
elif key == 'project-template':
self.project_templates.append(value)

Loading…
Cancel
Save