Don't cancel Github check-run during re-enqueue

So far, when the scheduler re-enqueued a change that was missing
dependencies, it also reported the Github check-run as cancelled but did
not report start as the re-enqueued item was added as a "quiet" item.

The check-run on Github was still marked as success after the item
finished. But until then it appeared as cancelled even if the change was
successfully re-enqueued.

To fix this we'll not call the dequeue reporters when the change could
be re-enqueued. The dequeued item will still be reported to the
database though.

Change-Id: Iea465ca1d9132322b912f7723e3ae41a8c6d3002
This commit is contained in:
Simon Westphahl
2023-07-20 10:52:28 +02:00
parent 4dc0962a49
commit ea5f8fea7c
7 changed files with 136 additions and 14 deletions

View File

@ -20,11 +20,21 @@
Verified: 1
github:
status: success
check: success
failure:
gerrit:
Verified: -1
github:
status: failure
check: failure
start:
github:
check: in_progress
comment: false
dequeue:
github:
check: cancelled
comment: false
- pipeline:
name: gate
@ -53,14 +63,22 @@
submit: true
github:
merge: true
check: success
failure:
gerrit:
Verified: -2
github: {}
github:
check: failure
start:
gerrit:
Verified: 0
github: {}
github:
check: in_progress
comment: false
dequeue:
github:
check: cancelled
comment: false
precedence: high
- job:

View File

@ -0,0 +1,40 @@
[statsd]
# note, use 127.0.0.1 rather than localhost to avoid getting ipv6
# see: https://github.com/jsocol/pystatsd/issues/61
server=127.0.0.1
[scheduler]
tenant_config=main.yaml
relative_priority=true
[merger]
git_dir=/tmp/zuul-test/merger-git
git_user_email=zuul@example.com
git_user_name=zuul
[executor]
git_dir=/tmp/zuul-test/executor-git
load_multiplier=100
[connection gerrit]
driver=gerrit
server=review.example.com
user=jenkins
sshkey=fake_id_rsa_path
password=badpassword
[connection github]
driver=github
webhook_token=0000000000000000000000000000000000000000
app_id=1
app_key=$APP_KEY_FIXTURE$
[connection smtp]
driver=smtp
server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
[database]
dburi=$MYSQL_FIXTURE_DBURI$

View File

@ -18,7 +18,12 @@ import textwrap
from zuul.model import PromoteEvent
from tests.base import ZuulTestCase, simple_layout, iterate_timeout
from tests.base import (
iterate_timeout,
simple_layout,
ZuulGithubAppTestCase,
ZuulTestCase,
)
class TestGerritCircularDependencies(ZuulTestCase):
@ -2938,3 +2943,62 @@ class TestGithubCircularDependencies(ZuulTestCase):
dict(name="project-job", result="SUCCESS",
changes=f"{B.number},{B.head_sha} {A.number},{A.head_sha}"),
], ordered=False)
class TestGithubAppCircularDependencies(ZuulGithubAppTestCase):
config_file = "zuul-gerrit-github-app.conf"
tenant_config_file = "config/circular-dependencies/main.yaml"
scheduler_count = 1
def test_dependency_refresh_checks_api(self):
# Test that when two changes are put into a cycle, the
# dependencies are refreshed and items already in pipelines
# are updated and that the Github check-run is still
# in-progress.
self.executor_server.hold_jobs_in_build = True
# This simulates the typical workflow where a developer only
# knows the PR id of changes one at a time.
# The first change:
A = self.fake_github.openFakePullRequest("gh/project", "master", "A")
self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
self.waitUntilSettled()
# Now that it has been uploaded, upload the second change and
# point it at the first.
# B -> A
B = self.fake_github.openFakePullRequest("gh/project", "master", "B")
B.body = "{}\n\nDepends-On: {}\n".format(
B.subject, A.url
)
self.fake_github.emitEvent(B.getPullRequestOpenedEvent())
self.waitUntilSettled()
# Now that the second change is known, update the first change
# B <-> A
A.body = "{}\n\nDepends-On: {}\n".format(
A.subject, B.url
)
self.fake_github.emitEvent(A.getPullRequestEditedEvent(A.subject))
self.waitUntilSettled()
# Validate that the Github check-run is still in progress
# and wasn't cancelled.
check_runs = self.fake_github.getCommitChecks("gh/project", A.head_sha)
self.assertEqual(len(check_runs), 1)
check_run = check_runs[0]
self.assertEqual(check_run["status"], "in_progress")
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertHistory([
dict(name="project-job", result="ABORTED",
changes=f"{A.number},{A.head_sha}"),
dict(name="project-job", result="SUCCESS",
changes=f"{A.number},{A.head_sha} {B.number},{B.head_sha}"),
dict(name="project-job", result="SUCCESS",
changes=f"{B.number},{B.head_sha} {A.number},{A.head_sha}"),
], ordered=False)

View File

@ -351,8 +351,8 @@ class PipelineManager(metaclass=ABCMeta):
self.sql.reportBuildsetEnd(build_set, action,
final, result)
def reportDequeue(self, item):
if not self.pipeline.state.disabled:
def reportDequeue(self, item, quiet=False):
if not (self.pipeline.state.disabled or quiet):
self.log.info(
"Reporting dequeue, action %s item%s",
self.pipeline.dequeue_actions,
@ -787,7 +787,7 @@ class PipelineManager(metaclass=ABCMeta):
for cycle_item in enqueued_cycle_items:
self.dequeueItem(cycle_item)
def dequeueItem(self, item):
def dequeueItem(self, item, quiet=False):
log = get_annotated_logger(self.log, item.event)
log.debug("Removing change %s from queue", item.change)
# In case a item is dequeued that doesn't have a result yet
@ -796,7 +796,7 @@ class PipelineManager(metaclass=ABCMeta):
# twice.
if not item.current_build_set.result and item.live:
item.setReportedResult('DEQUEUED')
self.reportDequeue(item)
self.reportDequeue(item, quiet)
item.queue.dequeueItem(item)
span_attrs = {
@ -1619,7 +1619,7 @@ class PipelineManager(metaclass=ABCMeta):
self.reportItem(item)
except exceptions.MergeFailure:
pass
self.dequeueItem(item)
self.dequeueItem(item, quiet_dequeue)
return (True, nnfi)
actionable = change_queue.isActionable(item)

View File

@ -276,8 +276,8 @@ class DependentPipelineManager(SharedQueuePipelineManager):
return failing_items
return None
def dequeueItem(self, item):
super(DependentPipelineManager, self).dequeueItem(item)
def dequeueItem(self, item, quiet=False):
super(DependentPipelineManager, self).dequeueItem(item, quiet)
# If this was a dynamic queue from a speculative change,
# remove the queue (if empty)
if item.queue.dynamic:

View File

@ -124,8 +124,8 @@ class IndependentPipelineManager(PipelineManager):
# verifying that the dependent change is mergable.
return abort, changes_needed
def dequeueItem(self, item):
super(IndependentPipelineManager, self).dequeueItem(item)
def dequeueItem(self, item, quiet=False):
super(IndependentPipelineManager, self).dequeueItem(item, quiet)
# An independent pipeline manager dynamically removes empty
# queues
if not item.queue.queue:

View File

@ -72,8 +72,8 @@ class SupercedentPipelineManager(PipelineManager):
self._pruneQueues()
return ret
def dequeueItem(self, item):
super(SupercedentPipelineManager, self).dequeueItem(item)
def dequeueItem(self, item, quiet=False):
super(SupercedentPipelineManager, self).dequeueItem(item, quiet)
# A supercedent pipeline manager dynamically removes empty
# queues
if not item.queue.queue: