Add provides/requires support

Adds support for expressing artifact dependencies between jobs
which may run in different projects.

Change-Id: If8cce8750d296d607841800e4bbf688a24c40e08
This commit is contained in:
James E. Blair 2019-01-28 13:18:22 -08:00
parent 91e7e680a1
commit 1317391323
24 changed files with 1019 additions and 59 deletions

View File

@ -686,6 +686,57 @@ Here is an example of two job definitions:
tags from all the jobs and variants used in constructing the tags from all the jobs and variants used in constructing the
frozen job, with no duplication. frozen job, with no duplication.
.. attr:: provides
A list of free-form strings which identifies resources provided
by this job which may be used by other jobs for other changes
using the :attr:`job.requires` attribute.
.. attr:: requires
A list of free-form strings which identify resources which may
be provided by other jobs for other changes (via the
:attr:`job.provides` attribute) that are used by this job.
When Zuul encounters a job with a `requires` attribute, it
searches for those values in the `provides` attributes of any
jobs associated with any queue items ahead of the current
change. In this way, if a change uses either git dependencies
or a `Depends-On` header to indicate a dependency on another
change, Zuul will be able to determine that the parent change
affects the run-time environment of the child change. If such a
relationship is found, the job with `requires` will not start
until all of the jobs with matching `provides` have completed or
paused. Additionally, the :ref:`artifacts <return_artifacts>`
returned by the `provides` jobs will be made available to the
`requires` job.
For example, a job which produces a builder container image in
one project that is then consumed by a container image build job
in another project might look like this:
.. code-block:: yaml
- job:
name: build-builder-image
provides: images
- job:
name: build-final-image
requires: images
- project:
name: builder-project
check:
jobs:
- build-builder-image
- project:
name: final-project
check:
jobs:
- build-final-image
.. attr:: secrets .. attr:: secrets
A list of secrets which may be used by the job. A A list of secrets which may be used by the job. A

View File

@ -228,6 +228,41 @@ of item.
All items provide the following information as Ansible variables All items provide the following information as Ansible variables
under the ``zuul`` key: under the ``zuul`` key:
.. var:: artifacts
:type: list
If the job has a :attr:`job.requires` attribute, and Zuul has
found changes ahead of this change in the pipeline with matching
:attr:`job.provides` attributes, then information about any
:ref:`artifacts returned <return_artifacts>` from those jobs
will appear here.
This value is a list of dictionaries with the following format:
.. var:: project
The name of the project which supplied this artifact.
.. var:: change
The change number which supplied this artifact.
.. var:: patchset
The patchset of the change.
.. var:: job
The name of the job which produced the artifact.
.. var:: name
The name of the artifact (as supplied to :ref:`return_artifacts`).
.. var:: url
The URL of the artifact (as supplied to :ref:`return_artifacts`).
.. var:: build .. var:: build
The UUID of the build. A build is a single execution of a job. The UUID of the build. A build is a single execution of a job.

View File

@ -0,0 +1,7 @@
---
features:
- Support for expressing artifact or other resource dependencies
between jobs running on different changes with a dependency
relationship (e.g., a container image built in one project and
consumed in a second project) has been added via the
:attr:`job.provides` and :attr:`job.requires` job attributes.

View File

@ -1355,6 +1355,11 @@ class FakeBuild(object):
items = self.parameters['zuul']['items'] items = self.parameters['zuul']['items']
self.changes = ' '.join(['%s,%s' % (x['change'], x['patchset']) self.changes = ' '.join(['%s,%s' % (x['change'], x['patchset'])
for x in items if 'change' in x]) for x in items if 'change' in x])
if 'change' in items[-1]:
self.change = ' '.join((items[-1]['change'],
items[-1]['patchset']))
else:
self.change = None
def __repr__(self): def __repr__(self):
waiting = '' waiting = ''
@ -1401,6 +1406,8 @@ class FakeBuild(object):
self._wait() self._wait()
self.log.debug("Build %s continuing" % self.unique) self.log.debug("Build %s continuing" % self.unique)
self.writeReturnData()
result = (RecordingAnsibleJob.RESULT_NORMAL, 0) # Success result = (RecordingAnsibleJob.RESULT_NORMAL, 0) # Success
if self.shouldFail(): if self.shouldFail():
result = (RecordingAnsibleJob.RESULT_NORMAL, 1) # Failure result = (RecordingAnsibleJob.RESULT_NORMAL, 1) # Failure
@ -1418,6 +1425,14 @@ class FakeBuild(object):
return True return True
return False return False
def writeReturnData(self):
changes = self.executor_server.return_data.get(self.name, {})
data = changes.get(self.change)
if data is None:
return
with open(self.jobdir.result_data_file, 'w') as f:
f.write(json.dumps(data))
def hasChanges(self, *changes): def hasChanges(self, *changes):
"""Return whether this build has certain changes in its git repos. """Return whether this build has certain changes in its git repos.
@ -1554,6 +1569,7 @@ class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
self.running_builds = [] self.running_builds = []
self.build_history = [] self.build_history = []
self.fail_tests = {} self.fail_tests = {}
self.return_data = {}
self.job_builds = {} self.job_builds = {}
def failJob(self, name, change): def failJob(self, name, change):
@ -1569,6 +1585,19 @@ class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
l.append(change) l.append(change)
self.fail_tests[name] = l self.fail_tests[name] = l
def returnData(self, name, change, data):
"""Instruct the executor to return data for this build.
:arg str name: The name of the job to return data.
:arg Change change: The :py:class:`~tests.base.FakeChange`
instance which should cause the job to return data.
:arg dict data: The data to return
"""
changes = self.return_data.setdefault(name, {})
cid = ' '.join((str(change.number), str(change.latest_patchset)))
changes[cid] = data
def release(self, regex=None): def release(self, regex=None):
"""Release a held build. """Release a held build.

View File

@ -0,0 +1,38 @@
- pipeline:
name: check
manager: independent
post-review: true
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
failure:
gerrit:
Verified: -1
- pipeline:
name: gate
manager: dependent
post-review: True
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- job:
name: base
parent: null

View File

@ -0,0 +1 @@
test

View File

@ -0,0 +1,10 @@
- hosts: all
tasks:
- name: Pause and let child run
zuul_return:
data:
zuul:
pause: true
artifacts:
- name: image
url: http://example.com/image

View File

@ -0,0 +1,4 @@
- hosts: all
tasks:
- debug:
var: zuul.artifacts

View File

@ -0,0 +1,26 @@
- job:
name: image-builder
provides:
- image
run: playbooks/image-builder.yaml
- job:
name: image-user
requires:
- image
run: playbooks/image-user.yaml
- project:
check:
jobs:
- image-builder
- image-user:
dependencies:
- image-builder
gate:
queue: integrated
jobs:
- image-builder
- image-user:
dependencies:
- image-builder

View File

@ -0,0 +1,8 @@
- project:
check:
jobs:
- image-user
gate:
queue: integrated
jobs:
- image-user

View File

@ -0,0 +1,8 @@
- tenant:
name: tenant-one
source:
gerrit:
config-projects:
- common-config
- org/project1
- org/project2

View File

@ -0,0 +1,72 @@
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
resultsdb_mysql: null
resultsdb_postgresql: null
failure:
gerrit:
Verified: -1
resultsdb_mysql: null
resultsdb_postgresql: null
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- job:
name: base
parent: null
run: playbooks/base.yaml
- job:
name: image-builder
provides: images
- job:
name: image-user
requires: images
- project:
name: org/project1
check:
jobs:
- image-builder
gate:
queue: integrated
jobs:
- image-builder
- image-user:
dependencies: image-builder
- project:
name: org/project2
check:
jobs:
- image-user
gate:
queue: integrated
jobs:
- image-user

View File

@ -0,0 +1,58 @@
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
failure:
gerrit:
Verified: -1
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- job:
name: base
parent: null
run: playbooks/base.yaml
- job:
name: image-builder
provides: images
- job:
name: image-user
requires: images
- project:
name: org/project1
gate:
jobs:
- image-builder
- project:
name: org/project2
gate:
jobs:
- image-user

View File

@ -0,0 +1,70 @@
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
resultsdb_mysql: null
resultsdb_postgresql: null
failure:
gerrit:
Verified: -1
resultsdb_mysql: null
resultsdb_postgresql: null
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- job:
name: base
parent: null
run: playbooks/base.yaml
- job:
name: image-builder
provides: images
- job:
name: image-user
requires: images
- project:
name: org/project1
check:
jobs:
- image-builder
gate:
queue: integrated
jobs:
- image-builder
- project:
name: org/project2
check:
jobs:
- image-user
gate:
queue: integrated
jobs:
- image-user

View File

@ -28,6 +28,7 @@ from zuul.lib import encryption
from tests.base import ( from tests.base import (
AnsibleZuulTestCase, AnsibleZuulTestCase,
ZuulTestCase, ZuulTestCase,
ZuulDBTestCase,
FIXTURE_DIR, FIXTURE_DIR,
simple_layout, simple_layout,
) )
@ -4714,3 +4715,290 @@ class TestContainerJobs(AnsibleZuulTestCase):
dict(name='container-machine', result='SUCCESS', changes='1,1'), dict(name='container-machine', result='SUCCESS', changes='1,1'),
dict(name='container-native', result='SUCCESS', changes='1,1'), dict(name='container-native', result='SUCCESS', changes='1,1'),
]) ])
class TestProvidesRequiresPause(AnsibleZuulTestCase):
tenant_config_file = "config/provides-requires-pause/main.yaml"
def test_provides_requires_pause(self):
# Changes share a queue, with both running at the same time.
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
# Release image-build, it should cause both instances of
# image-user to run.
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1 2,1'),
], ordered=False)
build = self.getJobFromHistory('image-user', project='org/project2')
self.assertEqual(
build.parameters['zuul']['artifacts'],
[{
'project': 'org/project1',
'change': '1',
'patchset': '1',
'job': 'image-builder',
'url': 'http://example.com/image',
'name': 'image',
}])
class TestProvidesRequires(ZuulDBTestCase):
config_file = "zuul-sql-driver.conf"
@simple_layout('layouts/provides-requires.yaml')
def test_provides_requires_shared_queue_fast(self):
# Changes share a queue, but with only one job, the first
# merges before the second starts.
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.returnData(
'image-builder', A,
{'zuul':
{'artifacts': [
{'name': 'image', 'url': 'http://example.com/image'},
]}}
)
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1 2,1'),
])
# Data are not passed in this instance because the builder
# change merges before the user job runs.
self.assertFalse('artifacts' in self.history[-1].parameters['zuul'])
@simple_layout('layouts/provides-requires-two-jobs.yaml')
def test_provides_requires_shared_queue_slow(self):
# Changes share a queue, with both running at the same time.
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.returnData(
'image-builder', A,
{'zuul':
{'artifacts': [
{'name': 'image', 'url': 'http://example.com/image'},
]}}
)
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
# Release image-build, it should cause both instances of
# image-user to run.
self.executor_server.release()
self.waitUntilSettled()
self.assertEqual(len(self.builds), 2)
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
])
self.orderedRelease()
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1 2,1'),
])
self.assertEqual(
self.history[-1].parameters['zuul']['artifacts'],
[{
'project': 'org/project1',
'change': '1',
'patchset': '1',
'job': 'image-builder',
'url': 'http://example.com/image',
'name': 'image',
}])
@simple_layout('layouts/provides-requires-unshared.yaml')
def test_provides_requires_unshared_queue(self):
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.returnData(
'image-builder', A,
{'zuul':
{'artifacts': [
{'name': 'image', 'url': 'http://example.com/image'},
]}}
)
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
B.subject, A.data['id'])
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
])
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='2,1'),
])
# Data are not passed in this instance because the builder
# change merges before the user job runs.
self.assertFalse('artifacts' in self.history[-1].parameters['zuul'])
@simple_layout('layouts/provides-requires.yaml')
def test_provides_requires_check_current(self):
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.returnData(
'image-builder', A,
{'zuul':
{'artifacts': [
{'name': 'image', 'url': 'http://example.com/image'},
]}}
)
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
B.subject, A.data['id'])
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertEqual(len(self.builds), 1)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1 2,1'),
])
self.assertEqual(
self.history[-1].parameters['zuul']['artifacts'],
[{
'project': 'org/project1',
'change': '1',
'patchset': '1',
'job': 'image-builder',
'url': 'http://example.com/image',
'name': 'image',
}])
@simple_layout('layouts/provides-requires.yaml')
def test_provides_requires_check_old_success(self):
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.returnData(
'image-builder', A,
{'zuul':
{'artifacts': [
{'name': 'image', 'url': 'http://example.com/image'},
]}}
)
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
])
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
B.subject, A.data['id'])
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='SUCCESS', changes='1,1'),
dict(name='image-user', result='SUCCESS', changes='1,1 2,1'),
])
self.assertEqual(
self.history[-1].parameters['zuul']['artifacts'],
[{
'project': 'org/project1',
'change': '1',
'patchset': '1',
'job': 'image-builder',
'url': 'http://example.com/image',
'name': 'image',
}])
@simple_layout('layouts/provides-requires.yaml')
def test_provides_requires_check_old_failure(self):
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
self.executor_server.failJob('image-builder', A)
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='FAILURE', changes='1,1'),
])
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
B.subject, A.data['id'])
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='image-builder', result='FAILURE', changes='1,1'),
])
self.assertIn('image-user : SKIPPED', B.messages[0])
self.assertIn('not met by build', B.messages[0])

View File

@ -305,10 +305,13 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [common_config_role], 'roles': [common_config_role],
'semaphore': None, 'semaphore': None,
'source_context': source_ctx, 'source_context': source_ctx,
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',
@ -337,10 +340,13 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [common_config_role], 'roles': [common_config_role],
'semaphore': None, 'semaphore': None,
'source_context': source_ctx, 'source_context': source_ctx,
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': 'stable', 'variant_description': 'stable',
@ -363,13 +369,16 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [ 'required_projects': [
{'override_branch': None, {'override_branch': None,
'override_checkout': None, 'override_checkout': None,
'project_name': 'review.example.com/org/project'}], 'project_name': 'review.example.com/org/project'}],
'requires': [],
'roles': [common_config_role], 'roles': [common_config_role],
'semaphore': None, 'semaphore': None,
'source_context': source_ctx, 'source_context': source_ctx,
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',
@ -434,13 +443,16 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [], 'roles': [],
'semaphore': None, 'semaphore': None,
'source_context': { 'source_context': {
'branch': 'master', 'branch': 'master',
'path': 'zuul.yaml', 'path': 'zuul.yaml',
'project': 'common-config'}, 'project': 'common-config'},
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',
@ -458,13 +470,16 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [], 'roles': [],
'semaphore': None, 'semaphore': None,
'source_context': { 'source_context': {
'branch': 'master', 'branch': 'master',
'path': 'zuul.yaml', 'path': 'zuul.yaml',
'project': 'common-config'}, 'project': 'common-config'},
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',
@ -482,13 +497,16 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [], 'roles': [],
'semaphore': None, 'semaphore': None,
'source_context': { 'source_context': {
'branch': 'master', 'branch': 'master',
'path': 'zuul.yaml', 'path': 'zuul.yaml',
'project': 'common-config'}, 'project': 'common-config'},
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',
@ -506,13 +524,16 @@ class TestWeb(BaseTestWeb):
'parent': 'base', 'parent': 'base',
'post_review': None, 'post_review': None,
'protected': None, 'protected': None,
'provides': [],
'required_projects': [], 'required_projects': [],
'requires': [],
'roles': [], 'roles': [],
'semaphore': None, 'semaphore': None,
'source_context': { 'source_context': {
'branch': 'master', 'branch': 'master',
'path': 'zuul.yaml', 'path': 'zuul.yaml',
'project': 'common-config'}, 'project': 'common-config'},
'tags': [],
'timeout': None, 'timeout': None,
'variables': {}, 'variables': {},
'variant_description': '', 'variant_description': '',

View File

@ -545,6 +545,8 @@ class JobParser(object):
'final': bool, 'final': bool,
'abstract': bool, 'abstract': bool,
'protected': bool, 'protected': bool,
'requires': to_list(str),
'provides': to_list(str),
'failure-message': str, 'failure-message': str,
'success-message': str, 'success-message': str,
'failure-url': str, 'failure-url': str,
@ -769,11 +771,10 @@ class JobParser(object):
semaphore.get('name'), semaphore.get('name'),
semaphore.get('resources-first', False)) semaphore.get('resources-first', False))
tags = conf.get('tags') for k in ('tags', 'requires', 'provides', 'dependencies'):
if tags: v = frozenset(as_list(conf.get(k)))
job.tags = set(tags) if v:
setattr(job, k, v)
job.dependencies = frozenset(as_list(conf.get('dependencies')))
variables = conf.get('vars', None) variables = conf.get('vars', None)
if variables: if variables:

View File

@ -0,0 +1,46 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add_provides
Revision ID: 39d302d34d38
Revises: 649ce63b5fe5
Create Date: 2019-01-28 15:01:07.408072
"""
# revision identifiers, used by Alembic.
revision = '39d302d34d38'
down_revision = '649ce63b5fe5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
PROVIDES_TABLE = 'zuul_provides'
BUILD_TABLE = 'zuul_build'
def upgrade(table_prefix=''):
op.create_table(
table_prefix + PROVIDES_TABLE,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('build_id', sa.Integer,
sa.ForeignKey(table_prefix + BUILD_TABLE + ".id")),
sa.Column('name', sa.String(255)),
)
def downgrade():
raise Exception("Downgrades not supported")

View File

@ -28,6 +28,7 @@ from zuul.connection import BaseConnection
BUILDSET_TABLE = 'zuul_buildset' BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build' BUILD_TABLE = 'zuul_build'
ARTIFACT_TABLE = 'zuul_artifact' ARTIFACT_TABLE = 'zuul_artifact'
PROVIDES_TABLE = 'zuul_provides'
class DatabaseSession(object): class DatabaseSession(object):
@ -56,17 +57,21 @@ class DatabaseSession(object):
def getBuilds(self, tenant=None, project=None, pipeline=None, def getBuilds(self, tenant=None, project=None, pipeline=None,
change=None, branch=None, patchset=None, ref=None, change=None, branch=None, patchset=None, ref=None,
newrev=None, uuid=None, job_name=None, voting=None, newrev=None, uuid=None, job_name=None, voting=None,
node_name=None, result=None, limit=50, offset=0): node_name=None, result=None, provides=None,
limit=50, offset=0):
build_table = self.connection.zuul_build_table build_table = self.connection.zuul_build_table
buildset_table = self.connection.zuul_buildset_table buildset_table = self.connection.zuul_buildset_table
provides_table = self.connection.zuul_provides_table
# contains_eager allows us to perform eager loading on the # contains_eager allows us to perform eager loading on the
# buildset *and* use that table in filters (unlike # buildset *and* use that table in filters (unlike
# joinedload). # joinedload).
q = self.session().query(self.connection.buildModel).\ q = self.session().query(self.connection.buildModel).\
join(self.connection.buildSetModel).\ join(self.connection.buildSetModel).\
outerjoin(self.connection.providesModel).\
options(orm.contains_eager(self.connection.buildModel.buildset), options(orm.contains_eager(self.connection.buildModel.buildset),
orm.selectinload(self.connection.buildModel.provides),
orm.selectinload(self.connection.buildModel.artifacts)).\ orm.selectinload(self.connection.buildModel.artifacts)).\
with_hint(build_table, 'USE INDEX (PRIMARY)', 'mysql') with_hint(build_table, 'USE INDEX (PRIMARY)', 'mysql')
@ -83,6 +88,7 @@ class DatabaseSession(object):
q = self.listFilter(q, build_table.c.voting, voting) q = self.listFilter(q, build_table.c.voting, voting)
q = self.listFilter(q, build_table.c.node_name, node_name) q = self.listFilter(q, build_table.c.node_name, node_name)
q = self.listFilter(q, build_table.c.result, result) q = self.listFilter(q, build_table.c.result, result)
q = self.listFilter(q, provides_table.c.name, provides)
q = q.order_by(build_table.c.id.desc()).\ q = q.order_by(build_table.c.id.desc()).\
limit(limit).\ limit(limit).\
@ -224,6 +230,15 @@ class SQLConnection(BaseConnection):
session.flush() session.flush()
return a return a
def createProvides(self, *args, **kw):
session = orm.session.Session.object_session(self)
p = ProvidesModel(*args, **kw)
p.build_id = self.id
self.provides.append(p)
session.add(p)
session.flush()
return p
class ArtifactModel(Base): class ArtifactModel(Base):
__tablename__ = self.table_prefix + ARTIFACT_TABLE __tablename__ = self.table_prefix + ARTIFACT_TABLE
id = sa.Column(sa.Integer, primary_key=True) id = sa.Column(sa.Integer, primary_key=True)
@ -233,6 +248,17 @@ class SQLConnection(BaseConnection):
url = sa.Column(sa.TEXT()) url = sa.Column(sa.TEXT())
build = orm.relationship(BuildModel, backref="artifacts") build = orm.relationship(BuildModel, backref="artifacts")
class ProvidesModel(Base):
__tablename__ = self.table_prefix + PROVIDES_TABLE
id = sa.Column(sa.Integer, primary_key=True)
build_id = sa.Column(sa.Integer, sa.ForeignKey(
self.table_prefix + BUILD_TABLE + ".id"))
name = sa.Column(sa.String(255))
build = orm.relationship(BuildModel, backref="provides")
self.providesModel = ProvidesModel
self.zuul_provides_table = self.providesModel.__table__
self.artifactModel = ArtifactModel self.artifactModel = ArtifactModel
self.zuul_artifact_table = self.artifactModel.__table__ self.zuul_artifact_table = self.artifactModel.__table__

View File

@ -16,9 +16,9 @@ import datetime
import logging import logging
import time import time
import voluptuous as v import voluptuous as v
import urllib.parse
from zuul.reporter import BaseReporter from zuul.reporter import BaseReporter
from zuul.lib.artifacts import get_artifacts_from_result_data
class SQLReporter(BaseReporter): class SQLReporter(BaseReporter):
@ -27,26 +27,6 @@ class SQLReporter(BaseReporter):
name = 'sql' name = 'sql'
log = logging.getLogger("zuul.SQLReporter") log = logging.getLogger("zuul.SQLReporter")
artifact = {
'name': str,
'url': str,
}
zuul_data = {
'zuul': {
'log_url': str,
'artifacts': [artifact],
v.Extra: object,
}
}
artifact_schema = v.Schema(zuul_data)
def validateArtifactSchema(self, data):
try:
self.artifact_schema(data)
except Exception:
return False
return True
def report(self, item): def report(self, item):
"""Create an entry into a database.""" """Create an entry into a database."""
@ -104,32 +84,13 @@ class SQLReporter(BaseReporter):
node_name=build.node_name, node_name=build.node_name,
) )
if self.validateArtifactSchema(build.result_data): for provides in job.provides:
artifacts = build.result_data.get('zuul', {}).get( db_build.createProvides(name=provides)
'artifacts', [])
default_url = build.result_data.get('zuul', {}).get( for artifact in get_artifacts_from_result_data(
'log_url') build.result_data,
if default_url: logger=self.log):
if default_url[-1] != '/': db_build.createArtifact(**artifact)
default_url += '/'
for artifact in artifacts:
url = artifact['url']
if default_url:
# If the artifact url is relative, it will
# be combined with the log_url; if it is
# absolute, it will replace it.
try:
url = urllib.parse.urljoin(default_url, url)
except Exception:
self.log.debug("Error parsing URL:",
exc_info=1)
db_build.createArtifact(
name=artifact['name'],
url=url,
)
else:
self.log.debug("Result data did not pass artifact schema "
"validation: %s", build.result_data)
def getSchema(): def getSchema():

View File

@ -165,6 +165,8 @@ class ExecutorClient(object):
timeout=job.timeout, timeout=job.timeout,
jobtags=sorted(job.tags), jobtags=sorted(job.tags),
_inheritance_path=list(job.inheritance_path)) _inheritance_path=list(job.inheritance_path))
if job.artifact_data:
zuul_params['artifacts'] = job.artifact_data
if job.override_checkout: if job.override_checkout:
zuul_params['override_checkout'] = job.override_checkout zuul_params['override_checkout'] = job.override_checkout
if hasattr(item.change, 'branch'): if hasattr(item.change, 'branch'):

69
zuul/lib/artifacts.py Normal file
View File

@ -0,0 +1,69 @@
# Copyright 2018-2019 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import voluptuous as v
import urllib.parse
artifact = {
'name': str,
'url': str,
}
zuul_data = {
'zuul': {
'log_url': str,
'artifacts': [artifact],
v.Extra: object,
}
}
artifact_schema = v.Schema(zuul_data)
def validate_artifact_schema(data):
try:
artifact_schema(data)
except Exception:
return False
return True
def get_artifacts_from_result_data(result_data, logger=None):
ret = []
if validate_artifact_schema(result_data):
artifacts = result_data.get('zuul', {}).get(
'artifacts', [])
default_url = result_data.get('zuul', {}).get(
'log_url')
if default_url:
if default_url[-1] != '/':
default_url += '/'
for artifact in artifacts:
url = artifact['url']
if default_url:
# If the artifact url is relative, it will be combined
# with the log_url; if it is absolute, it will replace
# it.
try:
url = urllib.parse.urljoin(default_url, url)
except Exception:
if logger:
logger.debug("Error parsing URL:",
exc_info=1)
ret.append({'name': artifact['name'],
'url': url})
else:
logger.debug("Result data did not pass artifact schema "
"validation: %s", result_data)
return ret

View File

@ -28,6 +28,7 @@ import itertools
from zuul import change_matcher from zuul import change_matcher
from zuul.lib.config import get_default from zuul.lib.config import get_default
from zuul.lib.artifacts import get_artifacts_from_result_data
MERGER_MERGE = 1 # "git merge" MERGER_MERGE = 1 # "git merge"
MERGER_MERGE_RESOLVE = 2 # "git merge -s resolve" MERGER_MERGE_RESOLVE = 2 # "git merge -s resolve"
@ -164,6 +165,11 @@ class TemplateNotFoundError(Exception):
pass pass
class RequirementsError(Exception):
"""A job's requirements were not met."""
pass
class Attributes(object): class Attributes(object):
"""A class to hold attributes for string formatting.""" """A class to hold attributes for string formatting."""
@ -1070,6 +1076,8 @@ class Job(ConfigObject):
file_matcher=None, file_matcher=None,
irrelevant_file_matcher=None, # skip-if irrelevant_file_matcher=None, # skip-if
tags=frozenset(), tags=frozenset(),
provides=frozenset(),
requires=frozenset(),
dependencies=frozenset(), dependencies=frozenset(),
) )
@ -1111,6 +1119,7 @@ class Job(ConfigObject):
start_mark=None, start_mark=None,
inheritance_path=(), inheritance_path=(),
parent_data=None, parent_data=None,
artifact_data=None,
description=None, description=None,
variant_description=None, variant_description=None,
protected_origin=None, protected_origin=None,
@ -1161,6 +1170,10 @@ class Job(ConfigObject):
d['protected'] = self.protected d['protected'] = self.protected
d['voting'] = self.voting d['voting'] = self.voting
d['timeout'] = self.timeout d['timeout'] = self.timeout
d['tags'] = list(self.tags)
d['provides'] = list(self.provides)
d['requires'] = list(self.requires)
d['dependencies'] = list(self.dependencies)
d['attempts'] = self.attempts d['attempts'] = self.attempts
d['roles'] = list(map(lambda x: x.toDict(), self.roles)) d['roles'] = list(map(lambda x: x.toDict(), self.roles))
d['post_review'] = self.post_review d['post_review'] = self.post_review
@ -1170,9 +1183,6 @@ class Job(ConfigObject):
d['parent'] = self.parent d['parent'] = self.parent
else: else:
d['parent'] = tenant.default_base_job d['parent'] = tenant.default_base_job
d['dependencies'] = []
for dependency in self.dependencies:
d['dependencies'].append(dependency)
if isinstance(self.nodeset, str): if isinstance(self.nodeset, str):
ns = tenant.layout.nodesets.get(self.nodeset) ns = tenant.layout.nodesets.get(self.nodeset)
else: else:
@ -1366,6 +1376,9 @@ class Job(ConfigObject):
self.parent_data = v self.parent_data = v
self.variables = Job._deepUpdate(self.parent_data, self.variables) self.variables = Job._deepUpdate(self.parent_data, self.variables)
def updateArtifactData(self, artifact_data):
self.artifact_data = artifact_data
def updateProjectVariables(self, project_vars): def updateProjectVariables(self, project_vars):
# Merge project/template variables directly into the job # Merge project/template variables directly into the job
# variables. Job variables override project variables. # variables. Job variables override project variables.
@ -1522,11 +1535,12 @@ class Job(ConfigObject):
for k in self.context_attributes: for k in self.context_attributes:
if (other._get(k) is not None and if (other._get(k) is not None and
k not in set(['tags'])): k not in set(['tags', 'requires', 'provides'])):
setattr(self, k, other._get(k)) setattr(self, k, other._get(k))
if other._get('tags') is not None: for k in ('tags', 'requires', 'provides'):
self.tags = frozenset(self.tags.union(other.tags)) if other._get(k) is not None:
setattr(self, k, getattr(self, k).union(other._get(k)))
self.inheritance_path = self.inheritance_path + (repr(other),) self.inheritance_path = self.inheritance_path + (repr(other),)
@ -1947,6 +1961,7 @@ class BuildSet(object):
class QueueItem(object): class QueueItem(object):
"""Represents the position of a Change in a ChangeQueue. """Represents the position of a Change in a ChangeQueue.
All Changes are enqueued into ChangeQueue in a QueueItem. The QueueItem All Changes are enqueued into ChangeQueue in a QueueItem. The QueueItem
@ -1973,6 +1988,7 @@ class QueueItem(object):
self.layout = None self.layout = None
self.project_pipeline_config = None self.project_pipeline_config = None
self.job_graph = None self.job_graph = None
self._cached_sql_results = None
def __repr__(self): def __repr__(self):
if self.pipeline: if self.pipeline:
@ -2169,6 +2185,110 @@ class QueueItem(object):
return False return False
return self.item_ahead.isHoldingFollowingChanges() return self.item_ahead.isHoldingFollowingChanges()
def _getRequirementsResultFromSQL(self, requirements):
# This either returns data or raises an exception
if self._cached_sql_results is None:
sql_driver = self.pipeline.manager.sched.connections.drivers['sql']
conn = sql_driver.tenant_connections.get(self.pipeline.tenant.name)
if conn:
builds = conn.getBuilds(
tenant=self.pipeline.tenant.name,
project=self.change.project.name,
pipeline=self.pipeline.name,
change=self.change.number,
branch=self.change.branch,
patchset=self.change.patchset,
provides=list(requirements))
else:
builds = []
# Just look at the most recent buildset.
# TODO: query for a buildset instead of filtering.
builds = [b for b in builds
if b.buildset.uuid == builds[0].buildset.uuid]
self._cached_sql_results = builds
builds = self._cached_sql_results
data = []
if not builds:
return data
for build in builds:
if build.result != 'SUCCESS':
provides = [x.name for x in build.provides]
requirement = list(requirements.intersection(set(provides)))
raise RequirementsError(
"Requirements %s not met by build %s" % (
requirement, build.uuid))
else:
artifacts = [{'name': a.name,
'url': a.url,
'project': build.buildset.project,
'change': str(build.buildset.change),
'patchset': build.buildset.patchset,
'job': build.job_name}
for a in build.artifacts]
data += artifacts
return data
def providesRequirements(self, requirements, data):
# Mutates data and returns true/false if requirements
# satisfied.
if not requirements:
return True
if not self.live:
# Look for this item in other queues in the pipeline.
item = None
found = False
for item in self.pipeline.getAllItems():
if item.live and item.change == self.change:
found = True
break
if found:
if not item.providesRequirements(requirements, data):
return False
else:
# Look for this item in the SQL DB.
data += self._getRequirementsResultFromSQL(requirements)
if self.hasJobGraph():
for job in self.getJobs():
if job.provides.intersection(requirements):
build = self.current_build_set.getBuild(job.name)
if not build:
return False
if build.result and build.result != 'SUCCESS':
return False
if not build.result and not build.paused:
return False
artifacts = get_artifacts_from_result_data(
build.result_data,
logger=self.log)
artifacts = [{'name': a['name'],
'url': a['url'],
'project': self.change.project.name,
'change': self.change.number,
'patchset': self.change.patchset,
'job': build.job.name}
for a in artifacts]
data += artifacts
if not self.item_ahead:
return True
return self.item_ahead.providesRequirements(requirements, data)
def jobRequirementsReady(self, job):
if not self.item_ahead:
return True
try:
data = []
ret = self.item_ahead.providesRequirements(job.requires, data)
job.updateArtifactData(data)
except RequirementsError as e:
self.warning(str(e))
fakebuild = Build(job, None)
fakebuild.result = 'SKIPPED'
self.addBuild(fakebuild)
ret = True
return ret
def findJobsToRun(self, semaphore_handler): def findJobsToRun(self, semaphore_handler):
torun = [] torun = []
if not self.live: if not self.live:
@ -2196,6 +2316,8 @@ class QueueItem(object):
for job in self.job_graph.getJobs(): for job in self.job_graph.getJobs():
if job not in jobs_not_started: if job not in jobs_not_started:
continue continue
if not self.jobRequirementsReady(job):
continue
all_parent_jobs_successful = True all_parent_jobs_successful = True
parent_builds_with_data = {} parent_builds_with_data = {}
for parent_job in self.job_graph.getParentJobsRecursively( for parent_job in self.job_graph.getParentJobsRecursively(
@ -2260,6 +2382,8 @@ class QueueItem(object):
for job in self.job_graph.getJobs(): for job in self.job_graph.getJobs():
if job not in jobs_not_requested: if job not in jobs_not_requested:
continue continue
if not self.jobRequirementsReady(job):
continue
all_parent_jobs_successful = True all_parent_jobs_successful = True
for parent_job in self.job_graph.getParentJobsRecursively( for parent_job in self.job_graph.getParentJobsRecursively(
job.name): job.name):

View File

@ -440,6 +440,7 @@ class ZuulWebAPI(object):
'newrev': buildset.newrev, 'newrev': buildset.newrev,
'ref_url': buildset.ref_url, 'ref_url': buildset.ref_url,
'artifacts': [], 'artifacts': [],
'provides': [],
} }
for artifact in build.artifacts: for artifact in build.artifacts:
@ -447,6 +448,10 @@ class ZuulWebAPI(object):
'name': artifact.name, 'name': artifact.name,
'url': artifact.url, 'url': artifact.url,
}) })
for provides in build.provides:
ret['provides'].append({
'name': artifact.name,
})
return ret return ret
@cherrypy.expose @cherrypy.expose