Make all configuration in-repo configuration

We need a way to get the ansible playbooks onto the ansible launch
servers.  That's easy for jobs defined in-repo because their repo
will already be on the launch server by definition.  But for the
tenant-global config, those playbooks (and roles, etc) need to be
made available as well.  Rather than shipping them around on the
gearman bus which is inefficient, let's just say that those will
also be in a repo.  Probably the same repo that defines the jobs,
ie in our case, the project-config repo.

In other words -- by making the global config one or more repos
that zuul knows about, the merger component of the launch server
can prepare *that* repo as well as the others involved in any
job, so that the playbooks are available to ansible.

This also has the potential upside of making changes that Depends-On
zuul configuration or ansible playbook changes much more natural
to implement.

We also keep track of the source repo for job definitions so that
we can potentially use that to tell the launch server what repos
should be included for playbooks, though this is not plumbed through
yet.

This adds some features to the test framework to copy a directory
in the fixtures dir into a git repo since we're going to have a
lot more 'git repo content' that should just be in the fixtures dir.

It also removes the merge worker from the tests because it was
racing with the ansible launcher cat handler.  The merge worker
can probably be completely removed in a later change.

Change-Id: I8fc5a8f627e3d915d54d15e70b7960655a6332a1
This commit is contained in:
James E. Blair 2016-01-15 16:20:40 -08:00
parent bbda4706fc
commit 96c6bf868a
13 changed files with 130 additions and 48 deletions

View File

@ -974,10 +974,6 @@ class ZuulTestCase(BaseTestCase):
old_urlopen = urllib2.urlopen old_urlopen = urllib2.urlopen
urllib2.urlopen = URLOpenerFactory urllib2.urlopen = URLOpenerFactory
self.merge_server = zuul.merger.server.MergeServer(self.config,
self.connections)
self.merge_server.start()
self.launcher = zuul.launcher.launchclient.LaunchClient( self.launcher = zuul.launcher.launchclient.LaunchClient(
self.config, self.sched, self.swift) self.config, self.sched, self.swift)
self.merge_client = zuul.merger.client.MergeClient( self.merge_client = zuul.merger.client.MergeClient(
@ -1076,6 +1072,32 @@ class ZuulTestCase(BaseTestCase):
self.config.read(os.path.join(FIXTURE_DIR, self.config_file)) self.config.read(os.path.join(FIXTURE_DIR, self.config_file))
if hasattr(self, 'tenant_config_file'): if hasattr(self, 'tenant_config_file'):
self.config.set('zuul', 'tenant_config', self.tenant_config_file) self.config.set('zuul', 'tenant_config', self.tenant_config_file)
git_path = os.path.join(
os.path.dirname(
os.path.join(FIXTURE_DIR, self.tenant_config_file)),
'git')
if os.path.exists(git_path):
for reponame in os.listdir(git_path):
self.copyDirToRepo(reponame,
os.path.join(git_path, reponame))
def copyDirToRepo(self, project, source_path):
repo_path = os.path.join(self.upstream_root, project)
if not os.path.exists(repo_path):
self.init_repo(project)
files = {}
for (dirpath, dirnames, filenames) in os.walk(source_path):
for filename in filenames:
test_tree_filepath = os.path.join(dirpath, filename)
common_path = os.path.commonprefix([test_tree_filepath,
source_path])
relative_filepath = test_tree_filepath[len(common_path) + 1:]
with open(test_tree_filepath, 'r') as f:
content = f.read()
files[relative_filepath] = content
self.addCommitToRepo(project, 'add content from fixture',
files, branch='master')
def setup_repos(self): def setup_repos(self):
"""Subclasses can override to manipulate repos before tests""" """Subclasses can override to manipulate repos before tests"""
@ -1099,8 +1121,6 @@ class ZuulTestCase(BaseTestCase):
def shutdown(self): def shutdown(self):
self.log.debug("Shutting down after tests") self.log.debug("Shutting down after tests")
self.launcher.stop() self.launcher.stop()
self.merge_server.stop()
self.merge_server.join()
self.merge_client.stop() self.merge_client.stop()
self.sched.stop() self.sched.stop()
self.sched.join() self.sched.join()
@ -1233,7 +1253,6 @@ class ZuulTestCase(BaseTestCase):
time.sleep(0) time.sleep(0)
self.gearman_server.functions = set() self.gearman_server.functions = set()
self.rpc.register() self.rpc.register()
self.merge_server.register()
def haveAllBuildsReported(self): def haveAllBuildsReported(self):
# See if Zuul is waiting on a meta job to complete # See if Zuul is waiting on a meta job to complete
@ -1331,13 +1350,13 @@ class ZuulTestCase(BaseTestCase):
jobs = filter(lambda x: x.result == result, jobs) jobs = filter(lambda x: x.result == result, jobs)
return len(jobs) return len(jobs)
def getJobFromHistory(self, name): def getJobFromHistory(self, name, project=None):
history = self.ansible_server.job_history history = self.ansible_server.job_history
for job in history: for job in history:
params = json.loads(job.arguments) params = json.loads(job.arguments)
if params['job'] == name: if (params['job'] == name and
(project is None or params['ZUUL_PROJECT'] == project)):
result = json.loads(job.data[-1]) result = json.loads(job.data[-1])
print result
ret = BuildHistory(job=job, ret = BuildHistory(job=job,
name=params['job'], name=params['job'],
result=result['result']) result=result['result'])

View File

@ -1,8 +1,8 @@
- tenant: - tenant:
name: tenant-one name: tenant-one
include:
- common.yaml
source: source:
gerrit: gerrit:
repos: config-repos:
- common-config
project-repos:
- org/project - org/project

View File

@ -12,3 +12,7 @@
failure: failure:
gerrit: gerrit:
verified: -1 verified: -1
- job:
name:
python27

View File

@ -29,7 +29,9 @@
name: org/project1 name: org/project1
check: check:
jobs: jobs:
- python27
- project1-test1 - project1-test1
tenant-one-gate: tenant-one-gate:
jobs: jobs:
- python27
- project1-test1 - project1-test1

View File

@ -29,7 +29,9 @@
name: org/project2 name: org/project2
check: check:
jobs: jobs:
- python27
- project2-test1 - project2-test1
tenant-two-gate: tenant-two-gate:
jobs: jobs:
- python27
- project2-test1 - project2-test1

View File

@ -1,11 +1,15 @@
- tenant: - tenant:
name: tenant-one name: tenant-one
include: source:
- common.yaml gerrit:
- tenant-one.yaml config-repos:
- common-config
- tenant-one-config
- tenant: - tenant:
name: tenant-two name: tenant-two
include: source:
- common.yaml gerrit:
- tenant-two.yaml config-repos:
- common-config
- tenant-two-config

View File

@ -1,4 +1,6 @@
- tenant: - tenant:
name: tenant-one name: tenant-one
include: source:
- common.yaml gerrit:
config-repos:
- common-config

View File

@ -38,6 +38,8 @@ class TestMultipleTenants(ZuulTestCase):
self.waitUntilSettled() self.waitUntilSettled()
self.assertEqual(self.getJobFromHistory('project1-test1').result, self.assertEqual(self.getJobFromHistory('project1-test1').result,
'SUCCESS') 'SUCCESS')
self.assertEqual(self.getJobFromHistory('python27').result,
'SUCCESS')
self.assertEqual(A.data['status'], 'MERGED') self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2, self.assertEqual(A.reported, 2,
"A should report start and success") "A should report start and success")
@ -50,6 +52,9 @@ class TestMultipleTenants(ZuulTestCase):
B.addApproval('CRVW', 2) B.addApproval('CRVW', 2)
self.fake_gerrit.addEvent(B.addApproval('APRV', 1)) self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
self.waitUntilSettled() self.waitUntilSettled()
self.assertEqual(self.getJobFromHistory('python27',
'org/project2').result,
'SUCCESS')
self.assertEqual(self.getJobFromHistory('project2-test1').result, self.assertEqual(self.getJobFromHistory('project2-test1').result,
'SUCCESS') 'SUCCESS')
self.assertEqual(B.data['status'], 'MERGED') self.assertEqual(B.data['status'], 'MERGED')

View File

@ -67,6 +67,8 @@ class JobParser(object):
'swift': to_list(swift), 'swift': to_list(swift),
'irrelevant-files': to_list(str), 'irrelevant-files': to_list(str),
'timeout': int, 'timeout': int,
'_project_source': str, # used internally
'_project_name': str, # used internally
} }
return vs.Schema(job) return vs.Schema(job)
@ -91,11 +93,17 @@ class JobParser(object):
# accumulate onto any previously applied tags from # accumulate onto any previously applied tags from
# metajobs. # metajobs.
job.tags = job.tags.union(set(tags)) job.tags = job.tags.union(set(tags))
if not job.project_source:
# Thes attributes may not be overidden -- the first
# reference definition of a job is in the repo where it is
# first defined.
job.project_source = conf.get('_project_source')
job.project_name = conf.get('_project_name')
job.failure_message = conf.get('failure-message', job.failure_message) job.failure_message = conf.get('failure-message', job.failure_message)
job.success_message = conf.get('success-message', job.success_message) job.success_message = conf.get('success-message', job.success_message)
job.failure_url = conf.get('failure-url', job.failure_url) job.failure_url = conf.get('failure-url', job.failure_url)
job.success_url = conf.get('success-url', job.success_url) job.success_url = conf.get('success-url', job.success_url)
if 'branches' in conf: if 'branches' in conf:
matchers = [] matchers = []
for branch in as_list(conf['branches']): for branch in as_list(conf['branches']):
@ -413,7 +421,8 @@ class PipelineParser(object):
class TenantParser(object): class TenantParser(object):
log = logging.getLogger("zuul.TenantParser") log = logging.getLogger("zuul.TenantParser")
tenant_source = vs.Schema({'repos': [str]}) tenant_source = vs.Schema({'config-repos': [str],
'project-repos': [str]})
@staticmethod @staticmethod
def validateTenantSources(connections): def validateTenantSources(connections):
@ -433,7 +442,6 @@ class TenantParser(object):
@staticmethod @staticmethod
def getSchema(connections=None): def getSchema(connections=None):
tenant = {vs.Required('name'): str, tenant = {vs.Required('name'): str,
'include': to_list(str),
'source': TenantParser.validateTenantSources(connections)} 'source': TenantParser.validateTenantSources(connections)}
return vs.Schema(tenant) return vs.Schema(tenant)
@ -442,14 +450,6 @@ class TenantParser(object):
TenantParser.getSchema(connections)(conf) TenantParser.getSchema(connections)(conf)
tenant = model.Tenant(conf['name']) tenant = model.Tenant(conf['name'])
tenant_config = model.UnparsedTenantConfig() tenant_config = model.UnparsedTenantConfig()
for fn in conf.get('include', []):
if not os.path.isabs(fn):
fn = os.path.join(base, fn)
fn = os.path.expanduser(fn)
with open(fn) as config_file:
TenantParser.log.info("Loading configuration from %s" % (fn,))
incdata = yaml.load(config_file)
tenant_config.extend(incdata)
incdata = TenantParser._loadTenantInRepoLayouts(merger, connections, incdata = TenantParser._loadTenantInRepoLayouts(merger, connections,
conf) conf)
tenant_config.extend(incdata) tenant_config.extend(incdata)
@ -463,31 +463,77 @@ class TenantParser(object):
jobs = [] jobs = []
for source_name, conf_source in conf_tenant.get('source', {}).items(): for source_name, conf_source in conf_tenant.get('source', {}).items():
source = connections.getSource(source_name) source = connections.getSource(source_name)
for conf_repo in conf_source.get('repos'):
# Get main config files. These files are permitted the
# full range of configuration.
for conf_repo in conf_source.get('config-repos', []):
project = source.getProject(conf_repo)
url = source.getGitUrl(project)
job = merger.getFiles(project.name, url, 'master',
files=['zuul.yaml', '.zuul.yaml'])
job.project = project
job.config_repo = True
jobs.append(job)
# Get in-project-repo config files which have a restricted
# set of options.
for conf_repo in conf_source.get('project-repos', []):
project = source.getProject(conf_repo) project = source.getProject(conf_repo)
url = source.getGitUrl(project) url = source.getGitUrl(project)
# TODOv3(jeblair): config should be branch specific # TODOv3(jeblair): config should be branch specific
job = merger.getFiles(project.name, url, 'master', job = merger.getFiles(project.name, url, 'master',
files=['.zuul.yaml']) files=['.zuul.yaml'])
job.project = project job.project = project
job.config_repo = False
jobs.append(job) jobs.append(job)
for job in jobs: for job in jobs:
# Note: this is an ordered list -- we wait for cat jobs to
# complete in the order they were launched which is the
# same order they were defined in the main config file.
# This is important for correct inheritence.
TenantParser.log.debug("Waiting for cat job %s" % (job,)) TenantParser.log.debug("Waiting for cat job %s" % (job,))
job.wait() job.wait()
if job.files.get('.zuul.yaml'): for fn in ['zuul.yaml', '.zuul.yaml']:
TenantParser.log.info( if job.files.get(fn):
"Loading configuration from %s/.zuul.yaml" % TenantParser.log.info(
(job.project,)) "Loading configuration from %s/%s" %
incdata = TenantParser._parseInRepoLayout( (job.project, fn))
job.files['.zuul.yaml']) if job.config_repo:
config.extend(incdata) incdata = TenantParser._parseConfigRepoLayout(
job.files[fn], source_name, job.project.name)
else:
incdata = TenantParser._parseProjectRepoLayout(
job.files[fn], source_name, job.project.name)
config.extend(incdata)
return config return config
@staticmethod @staticmethod
def _parseInRepoLayout(data): def _parseConfigRepoLayout(data, source_name, project_name):
# This is the top-level configuration for a tenant.
config = model.UnparsedTenantConfig()
config.extend(yaml.load(data))
# Remember where this job was defined
for conf_job in config.jobs:
conf_job['_project_source'] = source_name
conf_job['_project_name'] = project_name
return config
@staticmethod
def _parseProjectRepoLayout(data, source_name, project_name):
# TODOv3(jeblair): this should implement some rules to protect # TODOv3(jeblair): this should implement some rules to protect
# aspects of the config that should not be changed in-repo # aspects of the config that should not be changed in-repo
return yaml.load(data) config = model.UnparsedTenantConfig()
config.extend(yaml.load(data))
# Remember where this job was defined
for conf_job in config.jobs:
conf_job['_project_source'] = source_name
conf_job['_project_name'] = project_name
return config
@staticmethod @staticmethod
def _parseLayout(base, data, scheduler, connections): def _parseLayout(base, data, scheduler, connections):

View File

@ -77,12 +77,8 @@ class Repo(object):
return self._initialized return self._initialized
def createRepoObject(self): def createRepoObject(self):
try: self._ensure_cloned()
self._ensure_cloned() repo = git.Repo(self.local_path)
repo = git.Repo(self.local_path)
except:
self.log.exception("Unable to initialize repo for %s" %
self.local_path)
return repo return repo
def reset(self): def reset(self):

View File

@ -455,6 +455,8 @@ class Job(object):
pre_run=None, pre_run=None,
post_run=None, post_run=None,
voting=None, voting=None,
project_source=None,
project_name=None,
failure_message=None, failure_message=None,
success_message=None, success_message=None,
failure_url=None, failure_url=None,