Merge branch 'dev'

Change-Id: I65fc0ed6fed411ec3c453989b748ee6022081366
This commit is contained in:
James E. Blair 2012-09-27 10:50:01 -07:00
commit bb480fd092
14 changed files with 130 additions and 66 deletions

4
.mailmap Normal file
View File

@ -0,0 +1,4 @@
# Format is:
# <preferred e-mail> <other e-mail 1>
# <preferred e-mail> <other e-mail 2>
Zhongyue Luo <zhongyue.nah@intel.com> <lzyeval@gmail.com>

View File

@ -1,3 +1,3 @@
James E. Blair <jeblair@hp.com>
Clark Boylan <clark.boylan@gmail.com>
Zhongyue Luo <lzyeval@gmail.com>
Zhongyue Luo <zhongyue.nah@intel.com>

View File

@ -86,7 +86,7 @@ And multiple changes are separated by a carat ("^"). E.g.::
The OpenStack project uses the following script to update the
repository in a workspace and merge appropriate changes:
https://github.com/openstack/openstack-ci-puppet/blob/master/modules/jenkins_slave/files/slave_scripts/gerrit-git-prep.sh
https://github.com/openstack/openstack-ci-puppet/blob/master/modules/jenkins/files/slave_scripts/gerrit-git-prep.sh
Gerrit events that do not include a change (e.g., ref-updated events
which are emitted after a git ref is updated (i.e., a commit is merged

View File

@ -345,7 +345,7 @@ succeeds. In the above example, project-unittest, project-pep8, and
project-pyflakes are only executed if project-merge succeeds. This
can help avoid running unnecessary jobs.
.. seealso:: The OpenStack Zuul configuration for a comprehensive example: https://github.com/openstack/openstack-ci-puppet/blob/master/modules/openstack-ci-config/files/zuul/layout.yaml
.. seealso:: The OpenStack Zuul configuration for a comprehensive example: https://github.com/openstack/openstack-ci-puppet/blob/master/modules/openstack_project/files/zuul/layout.yaml
logging.conf

View File

@ -13,3 +13,4 @@ layout_config=/etc/zuul/layout.yaml
log_config=/etc/zuul/logging.yaml
pidfile=/var/run/zuul/zuul.pid
state_dir=/var/lib/zuul
git_dir=/var/lib/zuul/git

View File

@ -206,12 +206,12 @@ class FakeChange(object):
def getPatchsetCreatedEvent(self, patchset):
event = {"type": "patchset-created",
"change": {"project": self.project,
"branch": self.branch,
"id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
"number": str(self.number),
"subject": self.subject,
"owner": {"name": "User Name"},
"url": "https://hostname/3"},
"branch": self.branch,
"id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
"number": str(self.number),
"subject": self.subject,
"owner": {"name": "User Name"},
"url": "https://hostname/3"},
"patchSet": self.patchsets[patchset - 1],
"uploader": {"name": "User Name"}}
return event
@ -305,8 +305,8 @@ class FakeChange(object):
return json.loads(json.dumps(self.data))
def setMerged(self):
if (self.depends_on_change
and self.depends_on_change.data['status'] != 'MERGED'):
if (self.depends_on_change and
self.depends_on_change.data['status'] != 'MERGED'):
return
if self.fail_merge:
return
@ -362,14 +362,17 @@ class FakeGerrit(object):
class FakeJenkinsEvent(object):
def __init__(self, name, number, parameters, phase, status=None):
data = {'build':
{'full_url': 'https://server/job/%s/%s/' % (name, number),
'number': number,
'parameters': parameters,
'phase': phase,
'url': 'job/%s/%s/' % (name, number)},
'name': name,
'url': 'job/%s/' % name}
data = {
'build': {
'full_url': 'https://server/job/%s/%s/' % (name, number),
'number': number,
'parameters': parameters,
'phase': phase,
'url': 'job/%s/%s/' % (name, number),
},
'name': name,
'url': 'job/%s/' % name,
}
if status:
data['build']['status'] = status
self.body = json.dumps(data)
@ -422,29 +425,34 @@ class FakeJenkinsJob(threading.Thread):
if self.canceled:
self.jenkins.all_jobs.remove(self)
return
self.callback.jenkins_endpoint(FakeJenkinsEvent(
self.name, self.number, self.parameters,
'STARTED'))
self.callback.jenkins_endpoint(FakeJenkinsEvent(self.name,
self.number,
self.parameters,
'STARTED'))
if self.jenkins.hold_jobs_in_build:
self._wait()
self.log.debug("Job %s continuing" % (self.parameters['UUID']))
result = 'SUCCESS'
if ('ZUUL_REF' in self.parameters) and self.jenkins.fakeShouldFailTest(
self.name,
self.parameters['ZUUL_REF']):
if (('ZUUL_REF' in self.parameters) and
self.jenkins.fakeShouldFailTest(self.name,
self.parameters['ZUUL_REF'])):
result = 'FAILURE'
if self.aborted:
result = 'ABORTED'
self.jenkins.fakeAddHistory(name=self.name, number=self.number,
result=result)
self.callback.jenkins_endpoint(FakeJenkinsEvent(
self.name, self.number, self.parameters,
'COMPLETED', result))
self.callback.jenkins_endpoint(FakeJenkinsEvent(
self.name, self.number, self.parameters,
'FINISHED', result))
self.callback.jenkins_endpoint(FakeJenkinsEvent(self.name,
self.number,
self.parameters,
'COMPLETED',
result))
self.callback.jenkins_endpoint(FakeJenkinsEvent(self.name,
self.number,
self.parameters,
'FINISHED',
result))
self.jenkins.all_jobs.remove(self)
@ -479,8 +487,8 @@ class FakeJenkins(object):
self.log.debug("releasing job %s" % (job.parameters['UUID']))
job.release()
else:
self.log.debug("not releasing job %s" % (
job.parameters['UUID']))
self.log.debug("not releasing job %s" %
(job.parameters['UUID']))
self.log.debug("done releasing jobs %s (%s)" % (regex,
len(self.all_jobs)))
@ -580,11 +588,16 @@ class FakeURLOpener(object):
res = urlparse.urlparse(self.url)
path = res.path
project = '/'.join(path.split('/')[2:-2])
ret = ''
ret = '001e# service=git-upload-pack\n'
ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
'multi_ack thin-pack side-band side-band-64k ofs-delta '
'shallow no-progress include-tag multi_ack_detailed no-done\n')
path = os.path.join(UPSTREAM_ROOT, project)
repo = git.Repo(path)
for ref in repo.refs:
ret += ref.object.hexsha + '\t' + ref.path + '\n'
r = ref.object.hexsha + ' ' + ref.path + '\n'
ret += '%04x%s' % (len(r) + 4, r)
ret += '0000'
return ret
@ -810,7 +823,7 @@ class testScheduler(unittest.TestCase):
def test_independent_queues(self):
"Test that changes end up in the right queues"
self.fake_jenkins.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
A.addApproval('CRVW', 2)
@ -1117,13 +1130,18 @@ class testScheduler(unittest.TestCase):
def test_post(self):
"Test that post jobs run"
e = {"type": "ref-updated",
"submitter": {"name": "User Name"},
"refUpdate": {"oldRev":
"90f173846e3af9154517b88543ffbd1691f31366",
"newRev":
"d479a0bfcb34da57a31adb2a595c0cf687812543",
"refName": "master", "project": "org/project"}}
e = {
"type": "ref-updated",
"submitter": {
"name": "User Name",
},
"refUpdate": {
"oldRev": "90f173846e3af9154517b88543ffbd1691f31366",
"newRev": "d479a0bfcb34da57a31adb2a595c0cf687812543",
"refName": "master",
"project": "org/project",
}
}
self.fake_gerrit.addEvent(e)
self.waitUntilSettled()

View File

@ -4,3 +4,5 @@ Paste
webob
paramiko
GitPython>=0.3.2.RC1
lockfile
python-daemon

View File

@ -10,8 +10,8 @@ commands = nosetests {posargs}
downloadcache = ~/cache/pip
[testenv:pep8]
deps = pep8==1.2
commands = pep8 --repeat --show-source --exclude=.venv,.tox,dist,doc,build .
deps = pep8==1.3.3
commands = pep8 --ignore=E125 --repeat --show-source --exclude=.venv,.tox,dist,doc,build .
[testenv:cover]
setenv = NOSE_WITH_COVERAGE=1

View File

@ -16,7 +16,15 @@
import argparse
import ConfigParser
import daemon
import daemon.pidlockfile
try:
import daemon.pidlockfile as pid_file_module
pid_file_module # workaround for pyflakes issue #13
except:
# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
# instead it depends on lockfile-0.9.1 which uses pidfile.
import daemon.pidfile as pid_file_module
import logging.config
import os
import signal
@ -93,8 +101,11 @@ class Server(object):
signal.signal(signal.SIGHUP, self.reconfigure_handler)
signal.signal(signal.SIGUSR1, self.exit_handler)
while True:
signal.pause()
try:
signal.pause()
except KeyboardInterrupt:
print "Ctrl + C: asking scheduler to exit nicely...\n"
self.exit_handler( signal.SIGINT, None )
if __name__ == '__main__':
server = Server()
@ -120,7 +131,7 @@ if __name__ == '__main__':
pid_fn = os.path.expanduser(server.config.get('zuul', 'pidfile'))
else:
pid_fn = '/var/run/zuul/zuul.pid'
pid = daemon.pidlockfile.TimeoutPIDLockFile(pid_fn, 10)
pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
if server.args.nodaemon:
server.setup_logging()

View File

@ -155,7 +155,7 @@ class ExtendedJenkins(jenkins.Jenkins):
# Jenkins returns a 302 from this URL, unless Referer is not set,
# then you get a 404.
request = urllib2.Request(self.server + CANCEL_QUEUE % locals(),
headers={'Referer': self.server})
headers={'Referer': self.server})
self.jenkins_open(request)
def get_build_info(self, name, number):
@ -227,8 +227,9 @@ class Jenkins(object):
params['ZUUL_BRANCH'] = change.branch
params['GERRIT_CHANGES'] = changes_str
params['ZUUL_CHANGES'] = changes_str
params['ZUUL_REF'] = 'refs/zuul/%s/%s' % (change.branch,
change.current_build_set.ref)
params['ZUUL_REF'] = ('refs/zuul/%s/%s' %
(change.branch,
change.current_build_set.ref))
zuul_changes = ' '.join(['%s,%s' % (c.number, c.patchset)
for c in dependent_changes + [change]])

View File

@ -140,8 +140,8 @@ class Gerrit(object):
data = json.loads(lines[0])
if not data:
return False
self.log.debug("Received data from Gerrit query: \n%s" % (
pprint.pformat(data)))
self.log.debug("Received data from Gerrit query: \n%s" %
(pprint.pformat(data)))
return data
def _open(self):

View File

@ -486,8 +486,7 @@ class Change(Changeish):
return '<Change 0x%x %s>' % (id(self), self._id())
def equals(self, other):
if (self.number == other.number and
self.patchset == other.patchset):
if self.number == other.number and self.patchset == other.patchset:
return True
return False
@ -508,8 +507,7 @@ class Ref(Changeish):
return self.newrev
def equals(self, other):
if (self.ref == other.ref and
self.newrev == other.newrev):
if self.ref == other.ref and self.newrev == other.newrev:
return True
return False
@ -565,7 +563,7 @@ class TriggerEvent(object):
class EventFilter(object):
def __init__(self, types=[], branches=[], refs=[], approvals={},
comment_filters=[]):
comment_filters=[]):
self._types = types
self._branches = branches
self._refs = refs

View File

@ -98,8 +98,8 @@ class Scheduler(threading.Thread):
branches=toList(trigger.get('branch')),
refs=toList(trigger.get('ref')),
approvals=approvals,
comment_filters=toList(
trigger.get('comment_filter')))
comment_filters=
toList(trigger.get('comment_filter')))
manager.event_filters.append(f)
for config_job in data['jobs']:
@ -533,8 +533,8 @@ class BasePipelineManager(object):
if hasattr(change, 'refspec') and not ref:
change.current_build_set.setConfiguration()
ref = change.current_build_set.getRef()
merged = self.sched.merger.mergeChanges([change], ref,
mode=model.MERGE_IF_NECESSARY)
mode = model.MERGE_IF_NECESSARY
merged = self.sched.merger.mergeChanges([change], ref, mode=mode)
if not merged:
self.log.info("Unable to merge change %s" % change)
self.pipeline.setUnableToMerge(change)

View File

@ -117,12 +117,41 @@ class Gerrit(object):
message, action)
def _getInfoRefs(self, project):
url = "https://%s/p/%s/info/refs" % (self.server, project)
url = "https://%s/p/%s/info/refs?service=git-upload-pack" % (
self.server, project)
data = urllib2.urlopen(url).read()
ret = {}
for line in data.split('\n'):
if not line:
read_headers = False
read_advertisement = False
if data[4] != '#':
raise Exception("Gerrit repository does not support "
"git-upload-pack")
i = 0
while i < len(data):
if len(data) - i < 4:
raise Exception("Invalid length in info/refs")
plen = int(data[i:i + 4], 16)
i += 4
# It's the length of the packet, including the 4 bytes of the
# length itself, unless it's null, in which case the length is
# not included.
if plen > 0:
plen -= 4
if len(data) - i < plen:
raise Exception("Invalid data in info/refs")
line = data[i:i + plen]
i += plen
if not read_headers:
if plen == 0:
read_headers = True
continue
if not read_advertisement:
read_advertisement = True
continue
if plen == 0:
# The terminating null
continue
line = line.strip()
revision, ref = line.split()
ret[ref] = revision
return ret