Merge "Add newrev to timer-triggered items"
This commit is contained in:
commit
4beb4baf8e
@ -42,6 +42,33 @@ Zuul implements the timer using `apscheduler`_, Please check the
|
||||
words, it is not guaranteed to vary from one run of the timer
|
||||
trigger to the next).
|
||||
|
||||
.. attr:: dereference
|
||||
:default: false
|
||||
|
||||
Whether the branch tip should be dereferenced when enqueued.
|
||||
|
||||
This controls the behavior when the timer trigger for a given
|
||||
project-branch activates a second or more time for a given
|
||||
project-branch while a queue item for that project-branch is
|
||||
still in the pipeline.
|
||||
|
||||
If set to the default value of ``false``, then the triggering
|
||||
event and queue item will only include the name of the branch;
|
||||
this means that Zuul will see an identical queue item in the
|
||||
pipeline and will not enqueue a duplicate entry.
|
||||
|
||||
If set to ``true`` then Zuul will look up the current Git sha of
|
||||
the tip of each project-branch when enqueueing that
|
||||
project-branch and include that information in the triggering
|
||||
event and queue item. If the timer trigger activates a second
|
||||
time while a given project-branch is still in the pipeline, the
|
||||
behavior then depends on whether the Git commit sha differs. If
|
||||
the branch has changed between the two activations, Zuul will
|
||||
treat the second activation as distinct and enqueue a new item
|
||||
for the same project-branch (but with a different ``newrev``
|
||||
value). If the Git commit sha is the same on both activations,
|
||||
Zuul will not enqueue a second entry.
|
||||
|
||||
.. warning::
|
||||
Be aware the day-of-week value differs from cron.
|
||||
The first weekday is Monday (0), and the last is Sunday (6).
|
||||
|
@ -439,9 +439,12 @@ described above) are available:
|
||||
This field is present for the following item types:
|
||||
|
||||
Branch
|
||||
If the item was enqueued as the result of a change merging
|
||||
or being pushed to the branch, the git sha of the new
|
||||
revision will be included here.
|
||||
If the item was enqueued as the result of a change merging or
|
||||
being pushed to the branch, the git sha of the new revision
|
||||
will be included here. If the item was enqueued due to a
|
||||
timer and the ``dereference`` flag was set on the timer
|
||||
trigger, it will contain the git sha of the branch at the
|
||||
time it was enqueued.
|
||||
|
||||
Tag
|
||||
If the item was enqueued as the result of a tag being
|
||||
|
27
releasenotes/notes/periodic-head-217573a1cc31279f.yaml
Normal file
27
releasenotes/notes/periodic-head-217573a1cc31279f.yaml
Normal file
@ -0,0 +1,27 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
An option has been added to allow multiple timer-triggered queue
|
||||
items for the same project-branch may now be enqueued if the
|
||||
branch head has advanced.
|
||||
|
||||
Previously timer-triggered items only included information about
|
||||
the project and branch, and therefore if a job on such an item ran
|
||||
longer than the timer interval, Zuul would not enqueue a second
|
||||
copy of the item because it would have been seen as identical to
|
||||
the first (even if the underlying repo had changed.
|
||||
|
||||
If the new ``dereference`` attribute of the timer trigger is set,
|
||||
Zuul will now include information about the branch head SHA of the
|
||||
queue item, so that now if a timer trigger fires while a queue
|
||||
item for the same project-branch is already in the queue, the
|
||||
second item will be enqueued if the branch has changed since the
|
||||
first item was enqueued. If the SHA of the branch head is the
|
||||
same for both items, the second item will not be enqueued.
|
||||
|
||||
When set, the ``newrev`` is now available in the job's ``zuul``
|
||||
variables for such queue items.
|
||||
|
||||
The previous behavior remains the default, where derefence is not
|
||||
set, and ``newrev`` is not available.
|
||||
|
@ -482,9 +482,14 @@ class FakeUser(object):
|
||||
|
||||
|
||||
class FakeBranch(object):
|
||||
def __init__(self, fake_repo, branch='master', protected=False):
|
||||
def __init__(self, fake_repo, github_data, branch='master',
|
||||
protected=False):
|
||||
self.name = branch
|
||||
self._fake_repo = fake_repo
|
||||
upstream_root = github_data.fake_github_connection.upstream_root
|
||||
repo_path = os.path.join(upstream_root, fake_repo.name)
|
||||
repo = git.Repo(repo_path)
|
||||
self.sha = repo.heads[branch].commit.hexsha
|
||||
|
||||
@property
|
||||
def protected(self):
|
||||
@ -493,7 +498,10 @@ class FakeBranch(object):
|
||||
def as_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'protected': self.protected
|
||||
'protected': self.protected,
|
||||
'commit': {
|
||||
'sha': self.sha,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -686,11 +694,11 @@ class FakeCommit:
|
||||
|
||||
class FakeRepository(object):
|
||||
def __init__(self, name, data):
|
||||
self._api = FAKE_BASE_URL
|
||||
self._branches = [FakeBranch(self)]
|
||||
self._commits = {}
|
||||
self.data = data
|
||||
self.name = name
|
||||
self._api = FAKE_BASE_URL
|
||||
self._branches = [FakeBranch(self, data)]
|
||||
self._commits = {}
|
||||
|
||||
# Simple dictionary to store permission values per feature (e.g.
|
||||
# checks, Repository contents, Pull requests, Commit statuses, ...).
|
||||
@ -755,7 +763,7 @@ class FakeRepository(object):
|
||||
return client.session.get(url, headers)
|
||||
|
||||
def _create_branch(self, branch):
|
||||
self._branches.append((FakeBranch(self, branch=branch)))
|
||||
self._branches.append((FakeBranch(self, self.data, branch=branch)))
|
||||
|
||||
def _delete_branch(self, branch_name):
|
||||
self._branches = [b for b in self._branches if b.name != branch_name]
|
||||
|
@ -33,7 +33,8 @@ import git
|
||||
from git.util import IterableList
|
||||
import requests
|
||||
|
||||
FakeGitlabBranch = namedtuple('Branch', ('name', 'protected'))
|
||||
FakeGitlabBranch = namedtuple('Branch',
|
||||
('name', 'protected', 'upstream_root'))
|
||||
|
||||
|
||||
class GitlabWebServer(object):
|
||||
@ -214,7 +215,17 @@ class GitlabWebServer(object):
|
||||
owner, name = project.split('/')
|
||||
if branch in fake_repos[(owner, name)]:
|
||||
protected = fake_repos[(owner, name)][branch].protected
|
||||
self.send_data({'protected': protected})
|
||||
upstream_root = fake_repos[(owner, name)][
|
||||
branch].upstream_root
|
||||
repo_path = os.path.join(upstream_root, owner, name)
|
||||
repo = git.Repo(repo_path)
|
||||
sha = repo.heads[branch].commit.hexsha
|
||||
self.send_data({
|
||||
'protected': protected,
|
||||
'commit': {
|
||||
'id': sha,
|
||||
},
|
||||
})
|
||||
else:
|
||||
return self.send_data({}, code=404)
|
||||
|
||||
@ -325,14 +336,14 @@ class FakeGitlabConnection(gitlabconnection.GitlabConnection):
|
||||
def addProjectByName(self, project_name):
|
||||
owner, proj = project_name.split('/')
|
||||
repo = self._test_web_server.fake_repos[(owner, proj)]
|
||||
branch = FakeGitlabBranch('master', False)
|
||||
branch = FakeGitlabBranch('master', False, self.upstream_root)
|
||||
if 'master' not in repo:
|
||||
repo.append(branch)
|
||||
|
||||
def protectBranch(self, owner, project, branch, protected=True):
|
||||
if branch in self._test_web_server.fake_repos[(owner, project)]:
|
||||
del self._test_web_server.fake_repos[(owner, project)][branch]
|
||||
fake_branch = FakeGitlabBranch(branch, protected=protected)
|
||||
fake_branch = FakeGitlabBranch(branch, protected, self.upstream_root)
|
||||
self._test_web_server.fake_repos[(owner, project)].append(fake_branch)
|
||||
|
||||
def deleteBranch(self, owner, project, branch):
|
||||
|
@ -280,7 +280,7 @@ class FakePagureAPIClient(pagureconnection.PagureAPIClient):
|
||||
return self.gen_error("GET")
|
||||
return pr
|
||||
|
||||
def get(self, url):
|
||||
def get(self, url, params=None):
|
||||
self.log.debug("Getting resource %s ..." % url)
|
||||
|
||||
match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)$', url)
|
||||
@ -307,7 +307,13 @@ class FakePagureAPIClient(pagureconnection.PagureAPIClient):
|
||||
match = re.match('.+/api/0/(.+)/git/branches$', url)
|
||||
if match:
|
||||
# project = match.groups()[0]
|
||||
return {'branches': ['master']}, 200, "", "GET"
|
||||
if params and params.get('with_commits'):
|
||||
branches = {
|
||||
'master': '16ae2a4df107658b52750063ae203f978cf02ff7'
|
||||
}
|
||||
else:
|
||||
branches = ['master']
|
||||
return {'branches': branches}, 200, "", "GET"
|
||||
|
||||
match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/diffstats$', url)
|
||||
if match:
|
||||
|
26
tests/fixtures/layouts/idle-dereference.yaml
vendored
Normal file
26
tests/fixtures/layouts/idle-dereference.yaml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
- pipeline:
|
||||
name: periodic
|
||||
manager: independent
|
||||
trigger:
|
||||
timer:
|
||||
- time: '* * * * * */1'
|
||||
dereference: true
|
||||
|
||||
- job:
|
||||
name: base
|
||||
parent: null
|
||||
run: playbooks/base.yaml
|
||||
|
||||
- job:
|
||||
name: project-bitrot
|
||||
nodeset:
|
||||
nodes:
|
||||
- name: static
|
||||
label: ubuntu-xenial
|
||||
run: playbooks/project-bitrot.yaml
|
||||
|
||||
- project:
|
||||
name: org/project
|
||||
periodic:
|
||||
jobs:
|
||||
- project-bitrot
|
@ -1071,6 +1071,7 @@ class TestConnectionsBranchCache(ZuulTestCase):
|
||||
|
||||
# Ensure that the empty list of branches is valid and is not
|
||||
# seen as an error
|
||||
self.init_repo("org/newproject")
|
||||
newproject = source.getProject('org/newproject')
|
||||
connection.addProject(newproject)
|
||||
tpc = zuul.model.TenantProjectConfig(newproject)
|
||||
|
@ -482,6 +482,13 @@ class TestGerritWeb(ZuulTestCase):
|
||||
# the test will time-out.
|
||||
self.waitUntilSettled()
|
||||
|
||||
def test_get_project_branch_sha(self):
|
||||
# Exercise this method since it's only called from timer
|
||||
# triggers
|
||||
source = self.fake_gerrit.source
|
||||
project = source.getProject('org/project')
|
||||
self.assertIsNotNone(source.getProjectBranchSha(project, 'master'))
|
||||
|
||||
|
||||
class TestFileComments(AnsibleZuulTestCase):
|
||||
config_file = 'zuul-gerrit-web.conf'
|
||||
|
@ -190,3 +190,11 @@ class TestGitDriver(ZuulTestCase):
|
||||
self.waitUntilSettled()
|
||||
# Make sure no job as run as ignore-delete is True by default
|
||||
self.assertEqual(len(self.history), 0)
|
||||
|
||||
@simple_layout('layouts/basic-git.yaml', driver='git')
|
||||
def test_get_project_branch_sha(self):
|
||||
# Exercise this method since it's only called from timer
|
||||
# triggers
|
||||
source = self.scheds.first.sched.connections.getSource('git')
|
||||
project = source.getProject('org/project')
|
||||
self.assertIsNotNone(source.getProjectBranchSha(project, 'master'))
|
||||
|
@ -1625,6 +1625,14 @@ class TestGithubDriver(ZuulTestCase):
|
||||
self.assertIsNotNone(other_change.cache_stat)
|
||||
self.assertIs(change, other_change)
|
||||
|
||||
@simple_layout("layouts/basic-github.yaml", driver="github")
|
||||
def test_get_project_branch_sha(self):
|
||||
# Exercise this method since it's only called from timer
|
||||
# triggers
|
||||
source = self.fake_github.source
|
||||
project = source.getProject('org/project')
|
||||
self.assertIsNotNone(source.getProjectBranchSha(project, 'master'))
|
||||
|
||||
|
||||
class TestMultiGithubDriver(ZuulTestCase):
|
||||
config_file = 'zuul-multi-github.conf'
|
||||
@ -1805,6 +1813,7 @@ class TestGithubUnprotectedBranches(ZuulTestCase):
|
||||
|
||||
# add a spare branch so that the project is not empty after master gets
|
||||
# deleted.
|
||||
self.create_branch('org/project2', 'feat-x')
|
||||
repo._create_branch('feat-x')
|
||||
self.fake_github.emitEvent(
|
||||
self.fake_github.getPushEvent(
|
||||
@ -1902,6 +1911,7 @@ class TestGithubUnprotectedBranches(ZuulTestCase):
|
||||
# took place.
|
||||
def test_reconfigure_on_pr_to_new_protected_branch(self):
|
||||
self.create_branch('org/project2', 'release')
|
||||
self.create_branch('org/project2', 'feature')
|
||||
|
||||
github = self.fake_github.getGithubClient()
|
||||
repo = github.repo_from_project('org/project2')
|
||||
|
@ -1023,6 +1023,14 @@ class TestGitlabDriver(ZuulTestCase):
|
||||
change = conn.getChange(change_key)
|
||||
self.assertEqual(change.commit_id, A.sha)
|
||||
|
||||
@simple_layout("layouts/basic-gitlab.yaml", driver="gitlab")
|
||||
def test_get_project_branch_sha(self):
|
||||
# Exercise this method since it's only called from timer
|
||||
# triggers
|
||||
source = self.fake_gitlab.source
|
||||
project = source.getProject('org/project')
|
||||
self.assertIsNotNone(source.getProjectBranchSha(project, 'master'))
|
||||
|
||||
|
||||
class TestGitlabUnprotectedBranches(ZuulTestCase):
|
||||
config_file = 'zuul-gitlab-driver.conf'
|
||||
|
@ -687,6 +687,14 @@ class TestPagureDriver(ZuulTestCase):
|
||||
# is reverted and not in changed files to trigger project-test2
|
||||
self.assertEqual(1, len(self.history))
|
||||
|
||||
@simple_layout("layouts/basic-pagure.yaml", driver="pagure")
|
||||
def test_get_project_branch_sha(self):
|
||||
# Exercise this method since it's only called from timer
|
||||
# triggers
|
||||
source = self.fake_pagure.source
|
||||
project = source.getProject('org/project')
|
||||
self.assertIsNotNone(source.getProjectBranchSha(project, 'master'))
|
||||
|
||||
|
||||
class TestPagureToGerritCRD(ZuulTestCase):
|
||||
config_file = 'zuul-crd-pagure.conf'
|
||||
|
@ -3207,6 +3207,47 @@ class TestScheduler(ZuulTestCase):
|
||||
self.executor_server.release()
|
||||
self.waitUntilSettled()
|
||||
|
||||
def test_timer_branch_updated(self):
|
||||
# Test that if a branch in updated while a periodic job is
|
||||
# running, we get a second queue item.
|
||||
# This test can not use simple_layout because it must start
|
||||
# with a configuration which does not include a
|
||||
# timer-triggered job so that we have an opportunity to set
|
||||
# the hold flag before the first job.
|
||||
self.executor_server.hold_jobs_in_build = True
|
||||
# Start timer trigger - also org/project
|
||||
self.commitConfigUpdate('common-config',
|
||||
'layouts/idle-dereference.yaml')
|
||||
self.scheds.execute(lambda app: app.sched.reconfigure(app.config))
|
||||
# The pipeline triggers every second, so we should have seen
|
||||
# several by now.
|
||||
time.sleep(5)
|
||||
self.waitUntilSettled()
|
||||
|
||||
M = self.fake_gerrit.addFakeChange('org/project', 'master', 'M')
|
||||
M.setMerged()
|
||||
|
||||
time.sleep(5)
|
||||
self.waitUntilSettled()
|
||||
|
||||
# Stop queuing timer triggered jobs so that the assertions
|
||||
# below don't race against more jobs being queued.
|
||||
# Must be in same repo, so overwrite config with another one
|
||||
self.commitConfigUpdate('common-config', 'layouts/no-timer.yaml')
|
||||
self.scheds.execute(lambda app: app.sched.reconfigure(app.config))
|
||||
self.waitUntilSettled()
|
||||
# If APScheduler is in mid-event when we remove the job, we
|
||||
# can end up with one more event firing, so give it an extra
|
||||
# second to settle.
|
||||
time.sleep(1)
|
||||
self.waitUntilSettled()
|
||||
# There should be two periodic jobs, one before and one after
|
||||
# the update.
|
||||
self.assertEqual(2, len(self.builds))
|
||||
self.executor_server.release()
|
||||
self.waitUntilSettled()
|
||||
self.assertEqual(2, len(self.history))
|
||||
|
||||
def test_new_patchset_dequeues_old_on_head(self):
|
||||
"Test that a new patchset causes the old to be dequeued (at head)"
|
||||
# D -> C (depends on B) -> B (depends on A) -> A -> M
|
||||
|
@ -3619,8 +3619,8 @@ class TestGlobalRepoState(AnsibleZuulTestCase):
|
||||
# of override-checkout
|
||||
repo = github.repo_from_project('org/requiredproject-github')
|
||||
repo._set_branch_protection('master', True)
|
||||
repo._create_branch('feat-x')
|
||||
self.create_branch('org/requiredproject-github', 'feat-x')
|
||||
repo._create_branch('feat-x')
|
||||
self.fake_github.emitEvent(self.fake_github.getPushEvent(
|
||||
'org/requiredproject-github', ref='refs/heads/feat-x'))
|
||||
|
||||
|
@ -96,6 +96,12 @@ class GerritSource(BaseSource):
|
||||
return self.connection.getChange(change_key, refresh=refresh,
|
||||
event=event)
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
return (
|
||||
self.connection.getRefSha(project, f'refs/heads/{branch_name}')
|
||||
or None
|
||||
)
|
||||
|
||||
def getChangeByURL(self, url, event):
|
||||
try:
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
|
@ -140,6 +140,10 @@ class GitConnection(ZKChangeCacheMixin, BaseConnection):
|
||||
refs if ref.startswith('refs/heads/')]
|
||||
return branches
|
||||
|
||||
def getRefSha(self, project, ref):
|
||||
refs = self.lsRemote(project.name)
|
||||
return refs.get(ref, '')
|
||||
|
||||
def getGitUrl(self, project):
|
||||
return os.path.join(self.baseurl, project.name)
|
||||
|
||||
|
@ -52,6 +52,10 @@ class GitSource(BaseSource):
|
||||
return self.connection.getChange(change_key, refresh=refresh,
|
||||
event=event)
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
return (self.connection.getRefSha(project, f'refs/heads/{branch_name}')
|
||||
or None)
|
||||
|
||||
def getChangeByURL(self, url, event):
|
||||
return None
|
||||
|
||||
|
@ -1843,6 +1843,25 @@ class GithubConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
|
||||
resp = resp.json()
|
||||
return resp['default_branch']
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
github = self.getGithubClient(project.name)
|
||||
url = github.session.build_url('repos', project.name, 'branches',
|
||||
branch_name)
|
||||
resp = github.session.get(url)
|
||||
|
||||
if resp.status_code == 403:
|
||||
self.log.error(str(resp))
|
||||
rate_limit = github.rate_limit()
|
||||
if rate_limit['resources']['core']['remaining'] == 0:
|
||||
self.log.warning("Rate limit exceeded")
|
||||
return None
|
||||
elif resp.status_code == 404:
|
||||
raise Exception("Got status code 404 when fetching "
|
||||
"project %s branch %s", project.name, branch_name)
|
||||
|
||||
resp = resp.json()
|
||||
return resp['commit']['sha']
|
||||
|
||||
def isBranchProtected(self, project_name: str, branch_name: str,
|
||||
zuul_event_id=None) -> Optional[bool]:
|
||||
github = self.getGithubClient(
|
||||
|
@ -152,6 +152,9 @@ class GithubSource(BaseSource):
|
||||
return super().getProjectDefaultBranch(project, tenant, min_ltime)
|
||||
return default_branch
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
return self.connection.getProjectBranchSha(project, branch_name)
|
||||
|
||||
def getProjectBranchCacheLtime(self):
|
||||
return self.connection._branch_cache.ltime
|
||||
|
||||
|
@ -637,6 +637,12 @@ class GitlabConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
|
||||
bi.present = True
|
||||
return valid_flags, list(branch_infos.values())
|
||||
|
||||
def getProjectBranchSha(self, project_name, branch_name,
|
||||
zuul_event_id=None):
|
||||
branch = self.gl_client.get_project_branch(project_name, branch_name,
|
||||
zuul_event_id)
|
||||
return branch['commit']['id']
|
||||
|
||||
def isBranchProtected(self, project_name, branch_name,
|
||||
zuul_event_id=None):
|
||||
branch = self.gl_client.get_project_branch(project_name, branch_name,
|
||||
|
@ -121,6 +121,9 @@ class GitlabSource(BaseSource):
|
||||
def getProjectBranches(self, project, tenant, min_ltime=-1):
|
||||
return self.connection.getProjectBranches(project, tenant, min_ltime)
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
return self.connection.getProjectBranchSha(project.name, branch_name)
|
||||
|
||||
def getProjectBranchCacheLtime(self):
|
||||
return self.connection._branch_cache.ltime
|
||||
|
||||
|
@ -384,9 +384,9 @@ class PagureAPIClient():
|
||||
verb, url, code, data
|
||||
))
|
||||
|
||||
def get(self, url):
|
||||
def get(self, url, params=None):
|
||||
self.log.debug("Getting resource %s ..." % url)
|
||||
ret = self.session.get(url, headers=self.headers)
|
||||
ret = self.session.get(url, params=params, headers=self.headers)
|
||||
self.log.debug("GET returned (code: %s): %s" % (
|
||||
ret.status_code, ret.text))
|
||||
return ret.json(), ret.status_code, ret.url, 'GET'
|
||||
@ -405,9 +405,13 @@ class PagureAPIClient():
|
||||
self._manage_error(*resp)
|
||||
return resp[0]['username']
|
||||
|
||||
def get_project_branches(self):
|
||||
def get_project_branches(self, with_commits=False):
|
||||
path = '%s/git/branches' % self.project
|
||||
resp = self.get(self.base_url + path)
|
||||
if with_commits:
|
||||
params = dict(with_commits=True)
|
||||
else:
|
||||
params = None
|
||||
resp = self.get(self.base_url + path, params=params)
|
||||
self._manage_error(*resp)
|
||||
return resp[0].get('branches', [])
|
||||
|
||||
@ -614,6 +618,13 @@ class PagureConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
|
||||
for name in branches]
|
||||
return BranchFlag.PRESENT, branch_infos
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
pagure = self.get_project_api_client(project.name)
|
||||
branches = pagure.get_project_branches(with_commits=True)
|
||||
|
||||
self.log.info("Got branches with commits for %s" % project.name)
|
||||
return branches[branch_name]
|
||||
|
||||
def isBranchProtected(self, project_name, branch_name,
|
||||
zuul_event_id=None):
|
||||
return True
|
||||
|
@ -125,6 +125,9 @@ class PagureSource(BaseSource):
|
||||
def getProjectBranches(self, project, tenant, min_ltime=-1):
|
||||
return self.connection.getProjectBranches(project, tenant, min_ltime)
|
||||
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
return self.connection.getProjectBranchSha(project, branch_name)
|
||||
|
||||
def getProjectBranchCacheLtime(self):
|
||||
return self.connection._branch_cache.ltime
|
||||
|
||||
|
@ -151,12 +151,12 @@ class TimerDriver(Driver, TriggerInterface):
|
||||
|
||||
self._addJobsInner(tenant, pipeline,
|
||||
cron_args, jitter, timespec,
|
||||
jobs)
|
||||
ef.dereference, jobs)
|
||||
self._removeJobs(tenant, jobs)
|
||||
self.tenant_jobs[tenant.name] = jobs
|
||||
|
||||
def _addJobsInner(self, tenant, pipeline, cron_args, jitter,
|
||||
timespec, jobs):
|
||||
timespec, dereference, jobs):
|
||||
# jobs is a dict of args->job that we mutate
|
||||
existing_jobs = self.tenant_jobs.get(tenant.name, {})
|
||||
for project_name, pcs in tenant.layout.project_configs.items():
|
||||
@ -171,7 +171,7 @@ class TimerDriver(Driver, TriggerInterface):
|
||||
try:
|
||||
for branch in tenant.getProjectBranches(project_name):
|
||||
args = (tenant.name, pipeline.name, project_name,
|
||||
branch, timespec,)
|
||||
branch, dereference, timespec,)
|
||||
existing_job = existing_jobs.get(args)
|
||||
if jitter:
|
||||
# Resolve jitter here so that it is the same
|
||||
@ -211,7 +211,7 @@ class TimerDriver(Driver, TriggerInterface):
|
||||
tenant, pipeline, project_name)
|
||||
|
||||
def _onTrigger(self, tenant_name, pipeline_name, project_name, branch,
|
||||
timespec):
|
||||
dereference, timespec):
|
||||
if not self.election_won:
|
||||
return
|
||||
|
||||
@ -226,13 +226,13 @@ class TimerDriver(Driver, TriggerInterface):
|
||||
with self.tracer.start_as_current_span(
|
||||
"TimerEvent", attributes=attributes):
|
||||
self._dispatchEvent(tenant_name, pipeline_name, project_name,
|
||||
branch, timespec)
|
||||
branch, dereference, timespec)
|
||||
except Exception:
|
||||
self.stop_event.set()
|
||||
self.log.exception("Error when dispatching timer event")
|
||||
|
||||
def _dispatchEvent(self, tenant_name, pipeline_name, project_name,
|
||||
branch, timespec):
|
||||
branch, dereference, timespec):
|
||||
self.log.debug('Got trigger for tenant %s and pipeline %s '
|
||||
'project %s branch %s with timespec %s',
|
||||
tenant_name, pipeline_name, project_name,
|
||||
@ -255,6 +255,11 @@ class TimerDriver(Driver, TriggerInterface):
|
||||
# change cache.
|
||||
change_key = project.source.getChangeKey(event)
|
||||
with self.project_update_locks[project.canonical_name]:
|
||||
if dereference:
|
||||
event.newrev = project.source.getProjectBranchSha(
|
||||
project, branch)
|
||||
else:
|
||||
event.newrev = None
|
||||
project.source.getChange(change_key, refresh=True,
|
||||
event=event)
|
||||
log = get_annotated_logger(self.log, event)
|
||||
|
@ -17,12 +17,14 @@ from zuul.model import EventFilter, TriggerEvent
|
||||
|
||||
|
||||
class TimerEventFilter(EventFilter):
|
||||
def __init__(self, connection_name, trigger, types=[], timespecs=[]):
|
||||
def __init__(self, connection_name, trigger, types=[], timespecs=[],
|
||||
dereference=False):
|
||||
EventFilter.__init__(self, connection_name, trigger)
|
||||
|
||||
self._types = [x.pattern for x in types]
|
||||
self.types = types
|
||||
self.timespecs = timespecs
|
||||
self.dereference = dereference
|
||||
|
||||
def __repr__(self):
|
||||
ret = '<TimerEventFilter'
|
||||
|
@ -29,16 +29,21 @@ class TimerTrigger(BaseTrigger):
|
||||
efilters = []
|
||||
for trigger in to_list(trigger_conf):
|
||||
types = [make_regex('timer')]
|
||||
f = TimerEventFilter(connection_name=connection_name,
|
||||
trigger=self,
|
||||
types=types,
|
||||
timespecs=to_list(trigger['time']))
|
||||
|
||||
f = TimerEventFilter(
|
||||
connection_name=connection_name,
|
||||
trigger=self,
|
||||
types=types,
|
||||
timespecs=to_list(trigger['time']),
|
||||
dereference=trigger.get('dereference', False),
|
||||
)
|
||||
efilters.append(f)
|
||||
|
||||
return efilters
|
||||
|
||||
|
||||
def getSchema():
|
||||
timer_trigger = {v.Required('time'): str}
|
||||
timer_trigger = {
|
||||
v.Required('time'): str,
|
||||
'dereference': bool,
|
||||
}
|
||||
return timer_trigger
|
||||
|
@ -231,6 +231,10 @@ class BaseSource(object, metaclass=abc.ABCMeta):
|
||||
|
||||
return 'master'
|
||||
|
||||
@abc.abstractmethod
|
||||
def getProjectBranchSha(self, project, branch_name):
|
||||
"""Return the SHA of the specified branch"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def getProjectBranchCacheLtime(self):
|
||||
"""Return the current ltime of the project branch cache."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user