Merge "Finish circular dependency refactor"

This commit is contained in:
Zuul 2024-02-10 21:27:22 +00:00 committed by Gerrit Code Review
commit 1beac435ab
42 changed files with 2187 additions and 2650 deletions

View File

@ -192,3 +192,9 @@ Version 25
:Prior Zuul version: 9.3.0
:Description: Add job_uuid to BuildRequests and BuildResultEvents.
Affects schedulers and executors.
Version 26
----------
:Prior Zuul version: 9.5.0
:Description: Refactor circular dependencies.
Affects schedulers and executors.

View File

@ -0,0 +1,60 @@
---
prelude: >
This release includes a significant refactoring of the internal
handling of circular dependencies. This requires some changes for
consumers of Zuul output (via some reporters or the REST API) and
requires special care during upgrades. In the case of a
dependency cycle between changes, Zuul pipeline queue items will
now represent multiple changes rather than a single change. This
allows for more intuitive behavior and information display as well
as better handling of job deduplication.
upgrade:
- |
Zuul can not be upgraded to this version while running. To upgrade:
* Stop all Zuul components running the previous version
(stopping Nodepool is optional).
* On a scheduler machine or image (with the scheduler stopped)
and the new version of Zuul, run the command:
zuul-admin delete-state --keep-config-cache
This will delete all of the pipeline state from ZooKeeper, but
it will retain the configuration cache (which contains all of
the project configuration from zuul.yaml files). This will
speed up the startup process.
* Start all Zuul components on the new version.
- The MQTT reporter now includes a job_uuid field to correlate retry
builds with final builds.
deprecations:
- |
The syntax of string substitution in pipeline reporter messages
has changed. Since queue items may now represent more than one
change, the `{change}` substitution in messages is deprecated and
will be removed in a future version. To maintain backwards
compatability, it currently refers to the arbitrary first change
in the list of changes for a queue item. Please upgrade your
usage to use the new `{changes}` substitution which is a list.
- |
The syntax of string substitution in SMTP reporter messages
has changed. Since queue items may now represent more than one
change, the `{change}` substitution in messages is deprecated and
will be removed in a future version. To maintain backwards
compatability, it currently refers to the arbitrary first change
in the list of changes for a queue item. Please upgrade your
usage to use the new `{changes}` substitution which is a list.
- |
The MQTT and Elasticsearch reporters now include a `changes` field
which is a list of dictionaries representing the changes included
in an item. The correspending scalar fields describing what was
previously the only change associated with an item remain for
backwards compatability and refer to the arbitrary first change is
the list of changes for a queue item. These scalar values will be
removed in a future version of Zuul. Please upgrade yur usage to
use the new `changes` entries.
- |
The `zuul.bundle_id` variable is deprecated and will be removed in
a future version. For backwards compatability, it currently
duplicates the item uuid.

View File

@ -881,32 +881,32 @@ class FakeGerritChange(object):
if 'approved' not in label:
label['approved'] = app['by']
revisions = {}
rev = self.patchsets[-1]
num = len(self.patchsets)
files = {}
for f in rev['files']:
if f['file'] == '/COMMIT_MSG':
continue
files[f['file']] = {"status": f['type'][0]} # ADDED -> A
parent = '0000000000000000000000000000000000000000'
if self.depends_on_change:
parent = self.depends_on_change.patchsets[
self.depends_on_patchset - 1]['revision']
revisions[rev['revision']] = {
"kind": "REWORK",
"_number": num,
"created": rev['createdOn'],
"uploader": rev['uploader'],
"ref": rev['ref'],
"commit": {
"subject": self.subject,
"message": self.data['commitMessage'],
"parents": [{
"commit": parent,
}]
},
"files": files
}
for i, rev in enumerate(self.patchsets):
num = i + 1
files = {}
for f in rev['files']:
if f['file'] == '/COMMIT_MSG':
continue
files[f['file']] = {"status": f['type'][0]} # ADDED -> A
parent = '0000000000000000000000000000000000000000'
if self.depends_on_change:
parent = self.depends_on_change.patchsets[
self.depends_on_patchset - 1]['revision']
revisions[rev['revision']] = {
"kind": "REWORK",
"_number": num,
"created": rev['createdOn'],
"uploader": rev['uploader'],
"ref": rev['ref'],
"commit": {
"subject": self.subject,
"message": self.data['commitMessage'],
"parents": [{
"commit": parent,
}]
},
"files": files
}
data = {
"id": self.project + '~' + self.branch + '~' + self.data['id'],
"project": self.project,
@ -1462,13 +1462,14 @@ class FakeGerritConnection(gerritconnection.GerritConnection):
}
return event
def review(self, item, message, submit, labels, checks_api, file_comments,
phase1, phase2, zuul_event_id=None):
def review(self, item, change, message, submit, labels,
checks_api, file_comments, phase1, phase2,
zuul_event_id=None):
if self.web_server:
return super(FakeGerritConnection, self).review(
item, message, submit, labels, checks_api, file_comments,
phase1, phase2, zuul_event_id)
self._test_handle_review(int(item.change.number), message, submit,
item, change, message, submit, labels, checks_api,
file_comments, phase1, phase2, zuul_event_id)
self._test_handle_review(int(change.number), message, submit,
labels, phase1, phase2)
def _test_get_submitted_together(self, change):
@ -3577,9 +3578,11 @@ class TestingExecutorApi(HoldableExecutorApi):
self._test_build_request_job_map = {}
if build_request.uuid in self._test_build_request_job_map:
return self._test_build_request_job_map[build_request.uuid]
job_name = build_request.job_name
params = self.getParams(build_request)
job_name = params['zuul']['job']
self._test_build_request_job_map[build_request.uuid] = job_name
return build_request.job_name
return job_name
def release(self, what=None):
"""

View File

@ -691,7 +691,7 @@ class FakeGithubSession(object):
if commit is None:
commit = FakeCommit(head_sha)
repo._commits[head_sha] = commit
repo.check_run_counter += 1
repo.check_run_counter += 1
check_run = commit.set_check_run(
str(repo.check_run_counter),
json['name'],

File diff suppressed because it is too large Load Diff

View File

@ -1165,7 +1165,7 @@ class TestExecutorFailure(ZuulTestCase):
self.waitUntilSettled()
job = items[0].current_build_set.job_graph.getJob(
'project-merge', items[0].change.cache_key)
'project-merge', items[0].changes[0].cache_key)
build_retries = items[0].current_build_set.getRetryBuildsForJob(job)
self.assertEqual(len(build_retries), 1)
self.assertIsNotNone(build_retries[0].error_detail)

View File

@ -232,7 +232,7 @@ class TestJob(BaseTestCase):
change = model.Change(self.project)
change.branch = 'master'
change.cache_stat = Dummy(key=Dummy(reference=uuid.uuid4().hex))
item = self.queue.enqueueChange(change, None)
item = self.queue.enqueueChanges([change], None)
self.assertTrue(base.changeMatchesBranch(change))
self.assertTrue(python27.changeMatchesBranch(change))
@ -249,7 +249,7 @@ class TestJob(BaseTestCase):
change.branch = 'stable/diablo'
change.cache_stat = Dummy(key=Dummy(reference=uuid.uuid4().hex))
item = self.queue.enqueueChange(change, None)
item = self.queue.enqueueChanges([change], None)
self.assertTrue(base.changeMatchesBranch(change))
self.assertTrue(python27.changeMatchesBranch(change))
@ -300,7 +300,7 @@ class TestJob(BaseTestCase):
change.branch = 'master'
change.cache_stat = Dummy(key=Dummy(reference=uuid.uuid4().hex))
change.files = ['/COMMIT_MSG', 'ignored-file']
item = self.queue.enqueueChange(change, None)
item = self.queue.enqueueChanges([change], None)
self.assertTrue(base.changeMatchesFiles(change))
self.assertFalse(python27.changeMatchesFiles(change))
@ -375,7 +375,7 @@ class TestJob(BaseTestCase):
# Test master
change.branch = 'master'
change.cache_stat = Dummy(key=Dummy(reference=uuid.uuid4().hex))
item = self.queue.enqueueChange(change, None)
item = self.queue.enqueueChanges([change], None)
with testtools.ExpectedException(
Exception,
"Pre-review pipeline gate does not allow post-review job"):
@ -453,7 +453,7 @@ class TestJob(BaseTestCase):
change = model.Change(self.project)
change.branch = 'master'
change.cache_stat = Dummy(key=Dummy(reference=uuid.uuid4().hex))
item = self.queue.enqueueChange(change, None)
item = self.queue.enqueueChanges([change], None)
self.assertTrue(base.changeMatchesBranch(change))
self.assertTrue(python27.changeMatchesBranch(change))
@ -488,6 +488,7 @@ class FakeFrozenJob(model.Job):
super().__init__(name)
self.uuid = uuid.uuid4().hex
self.ref = 'fake reference'
self.all_refs = [self.ref]
class TestGraph(BaseTestCase):

View File

@ -465,53 +465,6 @@ class TestGithubModelUpgrade(ZuulTestCase):
config_file = 'zuul-github-driver.conf'
scheduler_count = 1
@model_version(3)
@simple_layout('layouts/gate-github.yaml', driver='github')
def test_status_checks_removal(self):
# This tests the old behavior -- that changes are not dequeued
# once their required status checks are removed -- since the
# new behavior requires a flag in ZK.
# Contrast with test_status_checks_removal.
github = self.fake_github.getGithubClient()
repo = github.repo_from_project('org/project')
repo._set_branch_protection(
'master', contexts=['something/check', 'tenant-one/gate'])
A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
self.waitUntilSettled()
self.executor_server.hold_jobs_in_build = True
# Since the required status 'something/check' is not fulfilled,
# no job is expected
self.assertEqual(0, len(self.history))
# Set the required status 'something/check'
repo.create_status(A.head_sha, 'success', 'example.com', 'description',
'something/check')
self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
self.waitUntilSettled()
# Remove it and verify the change is not dequeued (old behavior).
repo.create_status(A.head_sha, 'failed', 'example.com', 'description',
'something/check')
self.fake_github.emitEvent(A.getCommitStatusEvent('something/check',
state='failed',
user='foo'))
self.waitUntilSettled()
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
# the change should have entered the gate
self.assertHistory([
dict(name='project-test1', result='SUCCESS'),
dict(name='project-test2', result='SUCCESS'),
], ordered=False)
self.assertTrue(A.is_merged)
@model_version(10)
@simple_layout('layouts/github-merge-mode.yaml', driver='github')
def test_merge_method_syntax_check(self):
@ -703,48 +656,6 @@ class TestDefaultBranchUpgrade(ZuulTestCase):
self.assertEqual('foobar', md.default_branch)
class TestDeduplication(ZuulTestCase):
config_file = "zuul-gerrit-github.conf"
tenant_config_file = "config/circular-dependencies/main.yaml"
scheduler_count = 1
def _test_job_deduplication(self):
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
# A <-> B
A.data["commitMessage"] = "{}\n\nDepends-On: {}\n".format(
A.subject, B.data["url"]
)
B.data["commitMessage"] = "{}\n\nDepends-On: {}\n".format(
B.subject, A.data["url"]
)
A.addApproval('Code-Review', 2)
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(B.data['status'], 'MERGED')
@simple_layout('layouts/job-dedup-auto-shared.yaml')
@model_version(7)
def test_job_deduplication_auto_shared(self):
self._test_job_deduplication()
self.assertHistory([
dict(name="project1-job", result="SUCCESS", changes="2,1 1,1"),
dict(name="common-job", result="SUCCESS", changes="2,1 1,1"),
dict(name="project2-job", result="SUCCESS", changes="2,1 1,1"),
# This would be deduplicated
dict(name="common-job", result="SUCCESS", changes="2,1 1,1"),
], ordered=False)
self.assertEqual(len(self.fake_nodepool.history), 4)
class TestDataReturn(AnsibleZuulTestCase):
tenant_config_file = 'config/data-return/main.yaml'

View File

@ -1107,7 +1107,7 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(len(queue), 1)
self.assertEqual(queue[0].zone, None)
params = self.executor_server.executor_api.getParams(queue[0])
self.assertEqual(queue[0].job_name, 'project-merge')
self.assertEqual(params['zuul']['job'], 'project-merge')
self.assertEqual(params['items'][0]['number'], '%d' % A.number)
self.executor_api.release('.*-merge')
@ -1121,12 +1121,14 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(len(self.builds), 0)
self.assertEqual(len(queue), 6)
self.assertEqual(queue[0].job_name, 'project-test1')
self.assertEqual(queue[1].job_name, 'project-test2')
self.assertEqual(queue[2].job_name, 'project-test1')
self.assertEqual(queue[3].job_name, 'project-test2')
self.assertEqual(queue[4].job_name, 'project-test1')
self.assertEqual(queue[5].job_name, 'project-test2')
params = [self.executor_server.executor_api.getParams(x)
for x in queue]
self.assertEqual(params[0]['zuul']['job'], 'project-test1')
self.assertEqual(params[1]['zuul']['job'], 'project-test2')
self.assertEqual(params[2]['zuul']['job'], 'project-test1')
self.assertEqual(params[3]['zuul']['job'], 'project-test2')
self.assertEqual(params[4]['zuul']['job'], 'project-test1')
self.assertEqual(params[5]['zuul']['job'], 'project-test2')
self.executor_api.release(queue[0])
self.waitUntilSettled()
@ -2935,16 +2937,16 @@ class TestScheduler(ZuulTestCase):
items = check_pipeline.getAllItems()
self.assertEqual(len(items), 3)
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '1')
self.assertFalse(items[0].live)
self.assertEqual(items[1].change.number, '2')
self.assertEqual(items[1].change.patchset, '1')
self.assertEqual(items[1].changes[0].number, '2')
self.assertEqual(items[1].changes[0].patchset, '1')
self.assertTrue(items[1].live)
self.assertEqual(items[2].change.number, '1')
self.assertEqual(items[2].change.patchset, '1')
self.assertEqual(items[2].changes[0].number, '1')
self.assertEqual(items[2].changes[0].patchset, '1')
self.assertTrue(items[2].live)
# Add a new patchset to A
@ -2957,16 +2959,16 @@ class TestScheduler(ZuulTestCase):
items = check_pipeline.getAllItems()
self.assertEqual(len(items), 3)
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '1')
self.assertFalse(items[0].live)
self.assertEqual(items[1].change.number, '2')
self.assertEqual(items[1].change.patchset, '1')
self.assertEqual(items[1].changes[0].number, '2')
self.assertEqual(items[1].changes[0].patchset, '1')
self.assertTrue(items[1].live)
self.assertEqual(items[2].change.number, '1')
self.assertEqual(items[2].change.patchset, '2')
self.assertEqual(items[2].changes[0].number, '1')
self.assertEqual(items[2].changes[0].patchset, '2')
self.assertTrue(items[2].live)
# Add a new patchset to B
@ -2979,16 +2981,16 @@ class TestScheduler(ZuulTestCase):
items = check_pipeline.getAllItems()
self.assertEqual(len(items), 3)
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '2')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '2')
self.assertTrue(items[0].live)
self.assertEqual(items[1].change.number, '1')
self.assertEqual(items[1].change.patchset, '1')
self.assertEqual(items[1].changes[0].number, '1')
self.assertEqual(items[1].changes[0].patchset, '1')
self.assertFalse(items[1].live)
self.assertEqual(items[2].change.number, '2')
self.assertEqual(items[2].change.patchset, '2')
self.assertEqual(items[2].changes[0].number, '2')
self.assertEqual(items[2].changes[0].patchset, '2')
self.assertTrue(items[2].live)
self.builds[0].release()
@ -3055,13 +3057,13 @@ class TestScheduler(ZuulTestCase):
items = check_pipeline.getAllItems()
self.assertEqual(len(items), 3)
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertFalse(items[0].live)
self.assertEqual(items[1].change.number, '2')
self.assertEqual(items[1].changes[0].number, '2')
self.assertTrue(items[1].live)
self.assertEqual(items[2].change.number, '1')
self.assertEqual(items[2].changes[0].number, '1')
self.assertTrue(items[2].live)
# Abandon A
@ -3073,10 +3075,10 @@ class TestScheduler(ZuulTestCase):
items = check_pipeline.getAllItems()
self.assertEqual(len(items), 2)
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertFalse(items[0].live)
self.assertEqual(items[1].change.number, '2')
self.assertEqual(items[1].changes[0].number, '2')
self.assertTrue(items[1].live)
self.executor_server.hold_jobs_in_build = False
@ -4589,8 +4591,9 @@ class TestScheduler(ZuulTestCase):
first = pipeline_status['change_queues'][0]['heads'][0][0]
second = pipeline_status['change_queues'][1]['heads'][0][0]
self.assertIn(first['ref'], ['refs/heads/master', 'refs/heads/stable'])
self.assertIn(second['ref'],
self.assertIn(first['changes'][0]['ref'],
['refs/heads/master', 'refs/heads/stable'])
self.assertIn(second['changes'][0]['ref'],
['refs/heads/master', 'refs/heads/stable'])
self.executor_server.hold_jobs_in_build = False
@ -5799,7 +5802,6 @@ For CI problems and help debugging, contact ci@example.org"""
build_set = items[0].current_build_set
job = list(filter(lambda j: j.name == 'project-test1',
items[0].getJobs()))[0]
build_set.job_graph.getJobFromName(job)
for x in range(3):
# We should have x+1 retried builds for project-test1
@ -8311,8 +8313,8 @@ class TestSemaphore(ZuulTestCase):
1)
items = check_pipeline.getAllItems()
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '2')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '2')
self.assertTrue(items[0].live)
self.executor_server.hold_jobs_in_build = False
@ -8389,7 +8391,8 @@ class TestSemaphore(ZuulTestCase):
# Save some variables for later use while the job is running
check_pipeline = tenant.layout.pipelines['check']
item = check_pipeline.getAllItems()[0]
job = item.getJob('semaphore-one-test1')
job = list(filter(lambda j: j.name == 'semaphore-one-test1',
item.getJobs()))[0]
tenant.semaphore_handler.cleanupLeaks()

View File

@ -717,7 +717,12 @@ class TestSOSCircularDependencies(ZuulTestCase):
self.assertEqual(len(self.builds), 4)
builds = self.builds[:]
self.executor_server.failJob('job1', A)
# Since it's one queue item for the two changes, all 4
# builds need to complete.
builds[0].release()
builds[1].release()
builds[2].release()
builds[3].release()
app.sched.wake_event.set()
self.waitUntilSettled(matcher=[app])
self.assertEqual(A.reported, 2)

View File

@ -79,7 +79,7 @@ class TestTimerAlwaysDynamicBranches(ZuulTestCase):
self.assertEqual(len(pipeline.queues), 2)
for queue in pipeline.queues:
item = queue.queue[0]
self.assertIn(item.change.branch, ['master', 'stable'])
self.assertIn(item.changes[0].branch, ['master', 'stable'])
self.executor_server.hold_jobs_in_build = False

View File

@ -23,7 +23,11 @@ from opentelemetry import trace
def attributes_to_dict(attrlist):
ret = {}
for attr in attrlist:
ret[attr.key] = attr.value.string_value
if attr.value.string_value:
ret[attr.key] = attr.value.string_value
else:
ret[attr.key] = [v.string_value
for v in attr.value.array_value.values]
return ret
@ -247,8 +251,8 @@ class TestTracing(ZuulTestCase):
jobexec.span_id)
item_attrs = attributes_to_dict(item.attributes)
self.assertTrue(item_attrs['ref_number'] == "1")
self.assertTrue(item_attrs['ref_patchset'] == "1")
self.assertTrue(item_attrs['ref_number'] == ["1"])
self.assertTrue(item_attrs['ref_patchset'] == ["1"])
self.assertTrue('zuul_event_id' in item_attrs)
def getSpan(self, name):

View File

@ -1730,8 +1730,8 @@ class TestInRepoConfig(ZuulTestCase):
self.waitUntilSettled()
items = check_pipeline.getAllItems()
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '1')
self.assertTrue(items[0].live)
in_repo_conf = textwrap.dedent(
@ -1760,8 +1760,8 @@ class TestInRepoConfig(ZuulTestCase):
self.waitUntilSettled()
items = check_pipeline.getAllItems()
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '2')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '2')
self.assertTrue(items[0].live)
self.executor_server.hold_jobs_in_build = False
@ -3438,9 +3438,9 @@ class TestExtraConfigInDependent(ZuulTestCase):
# Jobs in both changes should be success
self.assertHistory([
dict(name='project2-private-extra-file', result='SUCCESS',
changes='3,1 1,1 2,1'),
changes='3,1 2,1 1,1'),
dict(name='project2-private-extra-dir', result='SUCCESS',
changes='3,1 1,1 2,1'),
changes='3,1 2,1 1,1'),
dict(name='project-test1', result='SUCCESS',
changes='3,1 2,1 1,1'),
dict(name='project3-private-extra-file', result='SUCCESS',
@ -3987,8 +3987,8 @@ class TestInRepoJoin(ZuulTestCase):
self.waitUntilSettled()
items = gate_pipeline.getAllItems()
self.assertEqual(items[0].change.number, '1')
self.assertEqual(items[0].change.patchset, '1')
self.assertEqual(items[0].changes[0].number, '1')
self.assertEqual(items[0].changes[0].patchset, '1')
self.assertTrue(items[0].live)
self.executor_server.hold_jobs_in_build = False

View File

@ -173,13 +173,14 @@ class TestWeb(BaseTestWeb):
# information is missing.
self.assertIsNone(q['branch'])
for head in q['heads']:
for change in head:
for item in head:
self.assertIn(
'review.example.com/org/project',
change['project_canonical'])
self.assertTrue(change['active'])
item['changes'][0]['project_canonical'])
self.assertTrue(item['active'])
change = item['changes'][0]
self.assertIn(change['id'], ('1,1', '2,1', '3,1'))
for job in change['jobs']:
for job in item['jobs']:
status_jobs.append(job)
self.assertEqual('project-merge', status_jobs[0]['name'])
# TODO(mordred) pull uuids from self.builds
@ -334,12 +335,13 @@ class TestWeb(BaseTestWeb):
data = self.get_url("api/tenant/tenant-one/status/change/1,1").json()
self.assertEqual(1, len(data), data)
self.assertEqual("org/project", data[0]['project'])
self.assertEqual("org/project", data[0]['changes'][0]['project'])
data = self.get_url("api/tenant/tenant-one/status/change/2,1").json()
self.assertEqual(1, len(data), data)
self.assertEqual("org/project1", data[0]['project'], data)
self.assertEqual("org/project1", data[0]['changes'][0]['project'],
data)
@simple_layout('layouts/nodeset-alternatives.yaml')
def test_web_find_job_nodeset_alternatives(self):
@ -1966,7 +1968,10 @@ class TestBuildInfo(BaseTestWeb):
buildsets = self.get_url("api/tenant/tenant-one/buildsets").json()
self.assertEqual(2, len(buildsets))
project_bs = [x for x in buildsets if x["project"] == "org/project"][0]
project_bs = [
x for x in buildsets
if x["refs"][0]["project"] == "org/project"
][0]
buildset = self.get_url(
"api/tenant/tenant-one/buildset/%s" % project_bs['uuid']).json()
@ -2070,7 +2075,10 @@ class TestArtifacts(BaseTestWeb, AnsibleZuulTestCase):
self.waitUntilSettled()
buildsets = self.get_url("api/tenant/tenant-one/buildsets").json()
project_bs = [x for x in buildsets if x["project"] == "org/project"][0]
project_bs = [
x for x in buildsets
if x["refs"][0]["project"] == "org/project"
][0]
buildset = self.get_url(
"api/tenant/tenant-one/buildset/%s" % project_bs['uuid']).json()
self.assertEqual(3, len(buildset["builds"]))
@ -2672,7 +2680,7 @@ class TestTenantScopedWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
enqueue_times = {}
for item in items:
enqueue_times[str(item.change)] = item.enqueue_time
enqueue_times[str(item.changes[0])] = item.enqueue_time
# REST API
args = {'pipeline': 'gate',
@ -2699,7 +2707,7 @@ class TestTenantScopedWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
for item in items:
self.assertEqual(
enqueue_times[str(item.change)], item.enqueue_time)
enqueue_times[str(item.changes[0])], item.enqueue_time)
self.waitUntilSettled()
self.executor_server.release('.*-merge')
@ -2761,7 +2769,7 @@ class TestTenantScopedWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
enqueue_times = {}
for item in items:
enqueue_times[str(item.change)] = item.enqueue_time
enqueue_times[str(item.changes[0])] = item.enqueue_time
# REST API
args = {'pipeline': 'gate',
@ -2788,7 +2796,7 @@ class TestTenantScopedWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
for item in items:
self.assertEqual(
enqueue_times[str(item.change)], item.enqueue_time)
enqueue_times[str(item.changes[0])], item.enqueue_time)
self.waitUntilSettled()
self.executor_server.release('.*-merge')
@ -2853,7 +2861,7 @@ class TestTenantScopedWebApi(BaseTestWeb):
if i.live]
enqueue_times = {}
for item in items:
enqueue_times[str(item.change)] = item.enqueue_time
enqueue_times[str(item.changes[0])] = item.enqueue_time
# REST API
args = {'pipeline': 'check',
@ -2882,12 +2890,12 @@ class TestTenantScopedWebApi(BaseTestWeb):
if i.live]
for item in items:
self.assertEqual(
enqueue_times[str(item.change)], item.enqueue_time)
enqueue_times[str(item.changes[0])], item.enqueue_time)
# We can't reliably test for side effects in the check
# pipeline since the change queues are independent, so we
# directly examine the queues.
queue_items = [(item.change.number, item.live) for item in
queue_items = [(item.changes[0].number, item.live) for item in
tenant.layout.pipelines['check'].getAllItems()]
expected = [('1', False),
('2', True),
@ -3555,7 +3563,7 @@ class TestCLIViaWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
enqueue_times = {}
for item in items:
enqueue_times[str(item.change)] = item.enqueue_time
enqueue_times[str(item.changes[0])] = item.enqueue_time
# Promote B and C using the cli
authz = {'iss': 'zuul_operator',
@ -3581,7 +3589,7 @@ class TestCLIViaWebApi(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
for item in items:
self.assertEqual(
enqueue_times[str(item.change)], item.enqueue_time)
enqueue_times[str(item.changes[0])], item.enqueue_time)
self.waitUntilSettled()
self.executor_server.release('.*-merge')

View File

@ -356,7 +356,7 @@ class TestZuulClientAdmin(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
enqueue_times = {}
for item in items:
enqueue_times[str(item.change)] = item.enqueue_time
enqueue_times[str(item.changes[0])] = item.enqueue_time
# Promote B and C using the cli
authz = {'iss': 'zuul_operator',
@ -382,7 +382,7 @@ class TestZuulClientAdmin(BaseTestWeb):
items = tenant.layout.pipelines['gate'].getAllItems()
for item in items:
self.assertEqual(
enqueue_times[str(item.change)], item.enqueue_time)
enqueue_times[str(item.changes[0])], item.enqueue_time)
self.waitUntilSettled()
self.executor_server.release('.*-merge')

View File

@ -1,4 +1,5 @@
# Copyright 2019 Red Hat, Inc.
# Copyright 2024 Acme Gating, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -37,20 +38,34 @@ class ElasticsearchReporter(BaseReporter):
docs = []
index = '%s.%s-%s' % (self.index, item.pipeline.tenant.name,
time.strftime("%Y.%m.%d"))
changes = [
{
"project": change.project.name,
"change": getattr(change, 'number', None),
"patchset": getattr(change, 'patchset', None),
"ref": getattr(change, 'ref', ''),
"oldrev": getattr(change, 'oldrev', ''),
"newrev": getattr(change, 'newrev', ''),
"branch": getattr(change, 'branch', ''),
"ref_url": change.url,
}
for change in item.changes
]
buildset_doc = {
"uuid": item.current_build_set.uuid,
"build_type": "buildset",
"tenant": item.pipeline.tenant.name,
"pipeline": item.pipeline.name,
"project": item.change.project.name,
"change": getattr(item.change, 'number', None),
"patchset": getattr(item.change, 'patchset', None),
"ref": getattr(item.change, 'ref', ''),
"oldrev": getattr(item.change, 'oldrev', ''),
"newrev": getattr(item.change, 'newrev', ''),
"branch": getattr(item.change, 'branch', ''),
"changes": changes,
"project": item.changes[0].project.name,
"change": getattr(item.changes[0], 'number', None),
"patchset": getattr(item.changes[0], 'patchset', None),
"ref": getattr(item.changes[0], 'ref', ''),
"oldrev": getattr(item.changes[0], 'oldrev', ''),
"newrev": getattr(item.changes[0], 'newrev', ''),
"branch": getattr(item.changes[0], 'branch', ''),
"zuul_ref": item.current_build_set.ref,
"ref_url": item.change.url,
"ref_url": item.changes[0].url,
"result": item.current_build_set.result,
"message": self._formatItemReport(item, with_jobs=False)
}
@ -80,8 +95,21 @@ class ElasticsearchReporter(BaseReporter):
buildset_doc['duration'] = (
buildset_doc['end_time'] - buildset_doc['start_time'])
change = item.getChangeForJob(build.job)
change_doc = {
"project": change.project.name,
"change": getattr(change, 'number', None),
"patchset": getattr(change, 'patchset', None),
"ref": getattr(change, 'ref', ''),
"oldrev": getattr(change, 'oldrev', ''),
"newrev": getattr(change, 'newrev', ''),
"branch": getattr(change, 'branch', ''),
"ref_url": change.url,
}
build_doc = {
"uuid": build.uuid,
"change": change_doc,
"build_type": "build",
"buildset_uuid": buildset_doc['uuid'],
"job_name": build.job.name,

View File

@ -1,6 +1,6 @@
# Copyright 2011 OpenStack, LLC.
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2023 Acme Gating, LLC
# Copyright 2023-2024 Acme Gating, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -1165,24 +1165,23 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
}
self.event_queue.put(event)
def review(self, item, message, submit, labels, checks_api,
def review(self, item, change, message, submit, labels, checks_api,
file_comments, phase1, phase2, zuul_event_id=None):
if self.session:
meth = self.review_http
else:
meth = self.review_ssh
return meth(item, message, submit, labels, checks_api,
return meth(item, change, message, submit, labels, checks_api,
file_comments, phase1, phase2,
zuul_event_id=zuul_event_id)
def review_ssh(self, item, message, submit, labels, checks_api,
def review_ssh(self, item, change, message, submit, labels, checks_api,
file_comments, phase1, phase2, zuul_event_id=None):
log = get_annotated_logger(self.log, zuul_event_id)
if checks_api:
log.error("Zuul is configured to report to the checks API, "
"but no HTTP password is present for the connection "
"in the configuration file.")
change = item.change
project = change.project.name
cmd = 'gerrit review --project %s' % project
if phase1:
@ -1208,8 +1207,7 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
out, err = self._ssh(cmd, zuul_event_id=zuul_event_id)
return err
def report_checks(self, log, item, changeid, checkinfo):
change = item.change
def report_checks(self, log, item, change, changeid, checkinfo):
checkinfo = checkinfo.copy()
uuid = checkinfo.pop('uuid', None)
scheme = checkinfo.pop('scheme', None)
@ -1254,10 +1252,9 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
"attempt %s: %s", x, e)
time.sleep(x * self.submit_retry_backoff)
def review_http(self, item, message, submit, labels,
def review_http(self, item, change, message, submit, labels,
checks_api, file_comments, phase1, phase2,
zuul_event_id=None):
change = item.change
changeid = "%s~%s~%s" % (
urllib.parse.quote(str(change.project), safe=''),
urllib.parse.quote(str(change.branch), safe=''),
@ -1293,7 +1290,7 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
if self.version >= (2, 13, 0):
data['tag'] = 'autogenerated:zuul:%s' % (item.pipeline.name)
if checks_api:
self.report_checks(log, item, changeid, checks_api)
self.report_checks(log, item, change, changeid, checks_api)
if (message or data.get('labels') or data.get('comments')
or data.get('robot_comments')):
for x in range(1, 4):
@ -1356,7 +1353,7 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
def queryChangeHTTP(self, number, event=None):
query = ('changes/%s?o=DETAILED_ACCOUNTS&o=CURRENT_REVISION&'
'o=CURRENT_COMMIT&o=CURRENT_FILES&o=LABELS&'
'o=DETAILED_LABELS' % (number,))
'o=DETAILED_LABELS&o=ALL_REVISIONS' % (number,))
if self.version >= (3, 5, 0):
query += '&o=SUBMIT_REQUIREMENTS'
data = self.get(query)

View File

@ -160,9 +160,12 @@ class GerritChange(Change):
'%s/c/%s/+/%s' % (baseurl, self.project.name, self.number),
]
for rev_commit, revision in data['revisions'].items():
if str(revision['_number']) == self.patchset:
self.ref = revision['ref']
self.commit = rev_commit
if str(current_revision['_number']) == self.patchset:
self.ref = current_revision['ref']
self.commit = data['current_revision']
self.is_current_patchset = True
else:
self.is_current_patchset = False

View File

@ -1,4 +1,5 @@
# Copyright 2013 Rackspace Australia
# Copyright 2024 Acme Gating, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -43,44 +44,44 @@ class GerritReporter(BaseReporter):
"""Send a message to gerrit."""
log = get_annotated_logger(self.log, item.event)
ret = []
for change in item.changes:
err = self._reportChange(item, change, log, phase1, phase2)
if err:
ret.append(err)
return ret
def _reportChange(self, item, change, log, phase1=True, phase2=True):
"""Send a message to gerrit."""
# If the source is no GerritSource we cannot report anything here.
if not isinstance(item.change.project.source, GerritSource):
if not isinstance(change.project.source, GerritSource):
return
# We can only report changes, not plain branches
if not isinstance(item.change, Change):
if not isinstance(change, Change):
return
# For supporting several Gerrit connections we also must filter by
# the canonical hostname.
if item.change.project.source.connection.canonical_hostname != \
if change.project.source.connection.canonical_hostname != \
self.connection.canonical_hostname:
log.debug("Not reporting %s as this Gerrit reporter "
"is for %s and the change is from %s",
item, self.connection.canonical_hostname,
item.change.project.source.connection.canonical_hostname)
return
comments = self.getFileComments(item)
comments = self.getFileComments(item, change)
if self._create_comment:
message = self._formatItemReport(item)
else:
message = ''
log.debug("Report change %s, params %s, message: %s, comments: %s",
item.change, self.config, message, comments)
if phase2 and self._submit and not hasattr(item.change, '_ref_sha'):
change, self.config, message, comments)
if phase2 and self._submit and not hasattr(change, '_ref_sha'):
# If we're starting to submit a bundle, save the current
# ref sha for every item in the bundle.
changes = set([item.change])
if item.bundle:
for i in item.bundle.items:
changes.add(i.change)
# Store a dict of project,branch -> sha so that if we have
# duplicate project/branches, we only query once.
ref_shas = {}
for other_change in changes:
for other_change in item.changes:
if not isinstance(other_change, GerritChange):
continue
key = (other_change.project, other_change.branch)
@ -92,9 +93,10 @@ class GerritReporter(BaseReporter):
ref_shas[key] = ref_sha
other_change._ref_sha = ref_sha
return self.connection.review(item, message, self._submit,
self._labels, self._checks_api,
comments, phase1, phase2,
return self.connection.review(item, change, message,
self._submit, self._labels,
self._checks_api, comments,
phase1, phase2,
zuul_event_id=item.event)
def getSubmitAllowNeeds(self):

View File

@ -78,7 +78,7 @@ class GitConnection(ZKChangeCacheMixin, BaseConnection):
self.projects[project.name] = project
def getChangeFilesUpdated(self, project_name, branch, tosha):
job = self.sched.merger.getFilesChanges(
job = self.sched.merger.getFilesChangesRaw(
self.connection_name, project_name, branch, tosha,
needs_result=True)
self.log.debug("Waiting for fileschanges job %s" % job)
@ -86,8 +86,8 @@ class GitConnection(ZKChangeCacheMixin, BaseConnection):
if not job.updated:
raise Exception("Fileschanges job %s failed" % job)
self.log.debug("Fileschanges job %s got changes on files %s" %
(job, job.files))
return job.files
(job, job.files[0]))
return job.files[0]
def lsRemote(self, project):
refs = {}

View File

@ -1,4 +1,5 @@
# Copyright 2015 Puppet Labs
# Copyright 2024 Acme Gating, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -58,37 +59,48 @@ class GithubReporter(BaseReporter):
self.context = "{}/{}".format(pipeline.tenant.name, pipeline.name)
def report(self, item, phase1=True, phase2=True):
"""Report on an event."""
log = get_annotated_logger(self.log, item.event)
ret = []
for change in item.changes:
err = self._reportChange(item, change, log, phase1, phase2)
if err:
ret.append(err)
return ret
def _reportChange(self, item, change, log, phase1=True, phase2=True):
"""Report on an event."""
# If the source is not GithubSource we cannot report anything here.
if not isinstance(item.change.project.source, GithubSource):
if not isinstance(change.project.source, GithubSource):
return
# For supporting several Github connections we also must filter by
# the canonical hostname.
if item.change.project.source.connection.canonical_hostname != \
if change.project.source.connection.canonical_hostname != \
self.connection.canonical_hostname:
return
# order is important for github branch protection.
# A status should be set before a merge attempt
if phase1 and self._commit_status is not None:
if (hasattr(item.change, 'patchset') and
item.change.patchset is not None):
self.setCommitStatus(item)
elif (hasattr(item.change, 'newrev') and
item.change.newrev is not None):
self.setCommitStatus(item)
if (hasattr(change, 'patchset') and
change.patchset is not None):
self.setCommitStatus(item, change)
elif (hasattr(change, 'newrev') and
change.newrev is not None):
self.setCommitStatus(item, change)
# Comments, labels, and merges can only be performed on pull requests.
# If the change is not a pull request (e.g. a push) skip them.
if hasattr(item.change, 'number'):
if hasattr(change, 'number'):
errors_received = False
if phase1:
if self._labels or self._unlabels:
self.setLabels(item)
self.setLabels(item, change)
if self._review:
self.addReview(item)
self.addReview(item, change)
if self._check:
check_errors = self.updateCheck(item)
check_errors = self.updateCheck(item, change)
# TODO (felix): We could use this mechanism to
# also report back errors from label and review
# actions
@ -98,12 +110,12 @@ class GithubReporter(BaseReporter):
)
errors_received = True
if self._create_comment or errors_received:
self.addPullComment(item)
self.addPullComment(item, change)
if phase2 and self._merge:
try:
self.mergePull(item)
self.mergePull(item, change)
except Exception as e:
self.addPullComment(item, str(e))