Merge branch 'master' into feature/zuulv3

Change-Id: I37a3c5d4f12917b111b7eb624f8b68689687ebc4
changes/31/441731/5
Joshua Hesketh 6 years ago committed by James E. Blair
commit 25695cbb51

1
.gitignore vendored

@ -2,6 +2,7 @@
*.egg
*.egg-info
*.pyc
.idea
.test
.testrepository
.tox

@ -1,2 +1,7 @@
# This is a cross-platform list tracking distribution packages needed by tests;
# see http://docs.openstack.org/infra/bindep/ for additional information.
mysql-client [test]
mysql-server [test]
libjpeg-dev [test]
zookeeperd [platform:dpkg]

@ -38,6 +38,9 @@ Create a connection with gerrit.
Path to SSH key to use when logging into above server.
``sshkey=/home/zuul/.ssh/id_rsa``
**keepalive**
Optional: Keepalive timeout, 0 means no keepalive.
``keepalive=60``
Gerrit Configuration
~~~~~~~~~~~~~~~~~~~~
@ -77,3 +80,15 @@ SMTP
Who the report should be emailed to by default.
This can be overridden by individual pipelines.
``default_to=you@example.com``
SQL
----
Only one connection per a database is permitted.
**driver=sql**
**dburi**
Database connection information in the form of a URI understood by
sqlalchemy. eg http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls
``dburi=mysql://user:pass@localhost/db``

@ -34,7 +34,7 @@ SMTP
A simple email reporter is also available.
A :ref:`connection` that uses the smtp driver must be supplied to the
trigger.
reporter.
SMTP Configuration
~~~~~~~~~~~~~~~~~~
@ -60,3 +60,42 @@ providing alternatives as arguments to the reporter. For example, ::
to: you@example.com
from: alternative@example.com
subject: Change {change} failed
SQL
---
This reporter is used to store results in a database.
A :ref:`connection` that uses the sql driver must be supplied to the
reporter.
SQL Configuration
~~~~~~~~~~~~~~~~~
zuul.conf contains the database connection and credentials. To store different
reports in different databases you'll need to create a new connection per
database.
The sql reporter is used to store the results from individual builds rather
than the change. As such the sql reporter does nothing on "start" or
"merge-failure".
**score**
A score to store for the result of the build. eg: -1 might indicate a failed
build similar to the vote posted back via the gerrit reporter.
For example ::
pipelines:
- name: post-merge
manager: IndependentPipelineManager
source: my_gerrit
trigger:
my_gerrit:
- event: change-merged
success:
mydb_conn:
score: 1
failure:
mydb_conn:
score: -1

@ -148,11 +148,9 @@
case 'skipped':
$status.addClass('label-info');
break;
case 'in progress':
case 'queued':
case 'lost':
// 'in progress' 'queued' 'lost' 'aborted' ...
default:
$status.addClass('label-default');
break;
}
$status.text(result);
return $status;

@ -37,6 +37,7 @@ server=review.example.com
;baseurl=https://review.example.com/r
user=jenkins
sshkey=/home/jenkins/.ssh/id_rsa
;keepalive=60
[connection smtp]
driver=smtp
@ -44,3 +45,7 @@ server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
[connection mydatabase]
driver=sql
dburi=mysql+pymysql://user@localhost/zuul

@ -17,3 +17,5 @@ babel>=1.0
six>=1.6.0
ansible>=2.0.0.1
kazoo
sqlalchemy
alembic

@ -31,3 +31,7 @@ console_scripts =
source-dir = doc/source
build-dir = doc/build
all_files = 1
[extras]
mysql_reporter=
PyMySQL

@ -11,3 +11,4 @@ testrepository>=0.0.17
testtools>=0.9.32
sphinxcontrib-programoutput
mock
PyMySQL

@ -37,12 +37,15 @@ import sys
import tempfile
import threading
import time
import uuid
import git
import gear
import fixtures
import kazoo.client
import kazoo.exceptions
import pymysql
import statsd
import testtools
import testtools.content
@ -51,6 +54,7 @@ from git.exc import NoSuchPathError
import zuul.driver.gerrit.gerritsource as gerritsource
import zuul.driver.gerrit.gerritconnection as gerritconnection
import zuul.connection.sql
import zuul.scheduler
import zuul.webapp
import zuul.rpclistener
@ -273,6 +277,25 @@ class FakeChange(object):
"eventCreatedOn": 1487613810}
return event
def getRefUpdatedEvent(self):
path = os.path.join(self.upstream_root, self.project)
repo = git.Repo(path)
oldrev = repo.heads[self.branch].commit.hexsha
event = {
"type": "ref-updated",
"submitter": {
"name": "User Name",
},
"refUpdate": {
"oldRev": oldrev,
"newRev": self.patchsets[-1]['revision'],
"refName": self.branch,
"project": self.project,
}
}
return event
def addApproval(self, category, value, username='reviewer_john',
granted_on=None, message=''):
if not granted_on:
@ -1067,6 +1090,43 @@ class ChrootedKazooFixture(fixtures.Fixture):
_tmp_client.stop()
class MySQLSchemaFixture(fixtures.Fixture):
def setUp(self):
super(MySQLSchemaFixture, self).setUp()
random_bits = ''.join(random.choice(string.ascii_lowercase +
string.ascii_uppercase)
for x in range(8))
self.name = '%s_%s' % (random_bits, os.getpid())
self.passwd = uuid.uuid4().hex
db = pymysql.connect(host="localhost",
user="openstack_citest",
passwd="openstack_citest",
db="openstack_citest")
cur = db.cursor()
cur.execute("create database %s" % self.name)
cur.execute(
"grant all on %s.* to '%s'@'localhost' identified by '%s'" %
(self.name, self.name, self.passwd))
cur.execute("flush privileges")
self.dburi = 'mysql+pymysql://%s:%s@localhost/%s' % (self.name,
self.passwd,
self.name)
self.addDetail('dburi', testtools.content.text_content(self.dburi))
self.addCleanup(self.cleanup)
def cleanup(self):
db = pymysql.connect(host="localhost",
user="openstack_citest",
passwd="openstack_citest",
db="openstack_citest")
cur = db.cursor()
cur.execute("drop database %s" % self.name)
cur.execute("drop user '%s'@'localhost'" % self.name)
cur.execute("flush privileges")
class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
wait_timeout = 20
@ -1358,6 +1418,9 @@ class ZuulTestCase(BaseTestCase):
getGerritConnection))
# Set up smtp related fakes
# TODO(jhesketh): This should come from lib.connections for better
# coverage
# Register connections from the config
self.smtp_messages = []
def FakeSMTPFactory(*args, **kw):
@ -1868,3 +1931,20 @@ class ZuulTestCase(BaseTestCase):
class AnsibleZuulTestCase(ZuulTestCase):
"""ZuulTestCase but with an actual ansible launcher running"""
run_ansible = True
class ZuulDBTestCase(ZuulTestCase):
def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
super(ZuulDBTestCase, self).setup_config(config_file)
for section_name in self.config.sections():
con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
section_name, re.I)
if not con_match:
continue
if self.config.get(section_name, 'driver') == 'sql':
f = MySQLSchemaFixture()
self.useFixture(f)
if (self.config.get(section_name, 'dburi') ==
'$MYSQL_FIXTURE_DBURI$'):
self.config.set(section_name, 'dburi', f.dburi)

@ -1,4 +1,16 @@
pipelines:
- name: check
manager: IndependentPipelineManager
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
- name: gate
manager: DependentPipelineManager
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
@ -18,28 +30,54 @@ pipelines:
gerrit:
verified: -2
- name: post
manager: IndependentPipelineManager
trigger:
gerrit:
- event: ref-updated
ref: ^(?!refs/).*$
projects:
- name: org/project
check:
- integration
gate:
- integration
- name: org/project1
check:
- integration
gate:
- integration
- integration
post:
- postjob
- name: org/project2
check:
- integration
gate:
- integration
- integration
- name: org/project3
check:
- integration
gate:
- integration
- integration
- name: org/project4
check:
- integration
gate:
- integration
- integration
- name: org/project5
check:
- integration
gate:
- integration
- integration
- name: org/project6
check:
- integration
gate:
- integration
- integration

@ -0,0 +1,23 @@
pipelines:
- name: check
manager: IndependentPipelineManager
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
jobs:
- name: mutex-one
mutex: test-mutex
- name: mutex-two
mutex: test-mutex
projects:
- name: org/project
check:
- project-test1

@ -0,0 +1,27 @@
pipelines:
- name: check
manager: IndependentPipelineManager
source:
review_gerrit
trigger:
review_gerrit:
- event: patchset-created
success:
review_gerrit:
verified: 1
resultsdb:
score: 1
failure:
review_gerrit:
verified: -1
resultsdb:
score: -1
resultsdb_failures:
score: -1
projects:
- name: org/project
check:
- project-merge:
- project-test1
- project-test2

@ -0,0 +1,50 @@
[gearman]
server=127.0.0.1
[zuul]
layout_config=layout-connections-multiple-voters.yaml
url_pattern=http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}
job_name_in_report=true
[merger]
git_dir=/tmp/zuul-test/git
git_user_email=zuul@example.com
git_user_name=zuul
zuul_url=http://zuul.example.com/p
[swift]
authurl=https://identity.api.example.org/v2.0/
user=username
key=password
tenant_name=" "
default_container=logs
region_name=EXP
logserver_prefix=http://logs.example.org/server.app/
[connection review_gerrit]
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
[connection alt_voting_gerrit]
driver=gerrit
server=alt_review.example.com
user=civoter
sshkey=none
[connection outgoing_smtp]
driver=smtp
server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
[connection resultsdb]
driver=sql
dburi=mysql+pymysql://bad:creds@host/db
[connection resultsdb_failures]
driver=sql
dburi=mysql+pymysql://bad:creds@host/db

@ -29,13 +29,13 @@ logserver_prefix=http://logs.example.org/server.app/
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
sshkey=fake_id_rsa1
[connection alt_voting_gerrit]
driver=gerrit
server=review.example.com
user=civoter
sshkey=none
sshkey=fake_id_rsa2
[connection outgoing_smtp]
driver=smtp
@ -43,3 +43,12 @@ server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
# TODOv3(jeblair): commented out until sqlalchemy conenction ported to
# v3 driver syntax
#[connection resultsdb] driver=sql
#dburi=$MYSQL_FIXTURE_DBURI$
#[connection resultsdb_failures]
#driver=sql
#dburi=$MYSQL_FIXTURE_DBURI$

@ -29,7 +29,7 @@ logserver_prefix=http://logs.example.org/server.app/
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
sshkey=fake_id_rsa_path
[connection smtp]
driver=smtp

@ -89,6 +89,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -105,11 +106,34 @@ class TestCloner(ZuulTestCase):
'be correct' % (project, number))
work = self.getWorkspaceRepos(projects)
upstream_repo_path = os.path.join(self.upstream_root, 'org/project1')
self.assertEquals(
# project1 is the zuul_project so the origin should be set to the
# zuul_url since that is the most up to date.
cache_repo_path = os.path.join(cache_root, 'org/project1')
self.assertNotEqual(
work['org/project1'].remotes.origin.url,
cache_repo_path,
'workspace repo origin should not be the cache'
)
zuul_url_repo_path = os.path.join(self.git_root, 'org/project1')
self.assertEqual(
work['org/project1'].remotes.origin.url,
zuul_url_repo_path,
'workspace repo origin should be the zuul url'
)
# project2 is not the zuul_project so the origin should be set
# to upstream since that is the best we can do
cache_repo_path = os.path.join(cache_root, 'org/project2')
self.assertNotEqual(
work['org/project2'].remotes.origin.url,
cache_repo_path,
'workspace repo origin should not be the cache'
)
upstream_repo_path = os.path.join(self.upstream_root, 'org/project2')
self.assertEqual(
work['org/project2'].remotes.origin.url,
upstream_repo_path,
'workspace repo origin should be upstream, not cache'
'workspace repo origin should be the upstream url'
)
self.worker.hold_jobs_in_build = False
@ -147,6 +171,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -217,6 +242,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -331,6 +357,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -393,6 +420,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -479,6 +507,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -544,6 +573,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_url=self.src_root,
@ -565,56 +595,158 @@ class TestCloner(ZuulTestCase):
self.worker.release()
self.waitUntilSettled()
def test_periodic_update(self):
# Test that the merger correctly updates its local repository
# before running a periodic job.
# Prime the merger with the current state
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Merge a different change
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
B.setMerged()
# Start a periodic job
self.worker.hold_jobs_in_build = True
self.launcher.negative_function_cache_ttl = 0
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
# The pipeline triggers every second, so we should have seen
# several by now.
time.sleep(5)
self.waitUntilSettled()
builds = self.builds[:]
self.worker.hold_jobs_in_build = False
# Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued.
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-no-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
self.worker.release()
self.waitUntilSettled()
projects = ['org/project']
self.assertEquals(2, len(builds), "Two builds are running")
upstream = self.getUpstreamRepos(projects)
self.assertEqual(upstream['org/project'].commit('master').hexsha,
B.patchsets[0]['revision'])
states = [
{'org/project':
str(upstream['org/project'].commit('master')),
},
{'org/project':
str(upstream['org/project'].commit('master')),
},
]
for number, build in enumerate(builds):
self.log.debug("Build parameters: %s", build.parameters)
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_url=self.git_root,
)
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
for project in projects:
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, number))
shutil.rmtree(self.workspace_root)
self.worker.hold_jobs_in_build = False
self.worker.release()
self.waitUntilSettled()
def test_post_checkout(self):
project = "org/project"
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
repo.head.reference = repo.heads['master']
commits = []
for i in range(0, 3):
commits.append(self.create_commit(project))
newRev = commits[1]
self.worker.hold_jobs_in_build = True
project = "org/project1"
A = self.fake_gerrit.addFakeChange(project, 'master', 'A')
event = A.getRefUpdatedEvent()
A.setMerged()
self.fake_gerrit.addEvent(event)
self.waitUntilSettled()
build = self.builds[0]
state = {'org/project1': build.parameters['ZUUL_COMMIT']}
build.release()
self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=[project],
workspace=self.workspace_root,
zuul_branch=None,
zuul_ref='master',
zuul_url=self.src_root,
zuul_project=project,
zuul_newrev=newRev,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
)
cloner.execute()
repos = self.getWorkspaceRepos([project])
cloned_sha = repos[project].rev_parse('HEAD').hexsha
self.assertEqual(newRev, cloned_sha)
work = self.getWorkspaceRepos([project])
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, 0))
shutil.rmtree(self.workspace_root)
def test_post_and_master_checkout(self):
project = "org/project1"
master_project = "org/project2"
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
repo.head.reference = repo.heads['master']
commits = []
for i in range(0, 3):
commits.append(self.create_commit(project))
newRev = commits[1]
self.worker.hold_jobs_in_build = True
projects = ["org/project1", "org/project2"]
A = self.fake_gerrit.addFakeChange(projects[0], 'master', 'A')
event = A.getRefUpdatedEvent()
A.setMerged()
self.fake_gerrit.addEvent(event)
self.waitUntilSettled()
build = self.builds[0]
upstream = self.getUpstreamRepos(projects)
state = {'org/project1':
build.parameters['ZUUL_COMMIT'],
'org/project2':
str(upstream['org/project2'].commit('master')),
}
build.release()
self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=[project, master_project],
projects=projects,
workspace=self.workspace_root,
zuul_branch=None,
zuul_ref='master',
zuul_url=self.src_root,
zuul_project=project,
zuul_newrev=newRev
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
)
cloner.execute()
repos = self.getWorkspaceRepos([project, master_project])
cloned_sha = repos[project].rev_parse('HEAD').hexsha
self.assertEqual(newRev, cloned_sha)
self.assertEqual(
repos[master_project].rev_parse('HEAD').hexsha,
repos[master_project].rev_parse('master').hexsha)
work = self.getWorkspaceRepos(projects)
for project in projects:
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, 0))
shutil.rmtree(self.workspace_root)

@ -12,14 +12,26 @@
# License for the specific language governing permissions and limitations
# under the License.
from tests.base import ZuulTestCase
import sqlalchemy as sa
from unittest import skip
from tests.base import ZuulTestCase, ZuulDBTestCase
def _get_reporter_from_connection_name(reporters, connection_name):
# Reporters are placed into lists for each action they may exist in.
# Search through the given list for the correct reporter by its conncetion
# name
for r in reporters:
if r.connection.connection_name == connection_name:
return r
class TestConnections(ZuulTestCase):
config_file = 'zuul-connections-same-gerrit.conf'
tenant_config_file = 'config/zuul-connections-same-gerrit/main.yaml'
def test_multiple_connections(self):
def test_multiple_gerrit_connections(self):
"Test multiple connections to the one gerrit"
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
@ -45,9 +57,184 @@ class TestConnections(ZuulTestCase):
self.assertEqual(B.patchsets[-1]['approvals'][0]['by']['username'],
'civoter')
def _test_sql_tables_created(self, metadata_table=None):
"Test the tables for storing results are created properly"
buildset_table = 'zuul_buildset'
build_table = 'zuul_build'
class TestMultipleGerrits(ZuulTestCase):
insp = sa.engine.reflection.Inspector(
self.connections['resultsdb'].engine)
self.assertEqual(9, len(insp.get_columns(buildset_table)))
self.assertEqual(10, len(insp.get_columns(build_table)))
@skip("Disabled for early v3 development")
def test_sql_tables_created(self):
"Test the default table is created"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
self._test_sql_tables_created()
def _test_sql_results(self):
"Test results are entered into an sql table"
# Grab the sa tables
reporter = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].success_actions,
'resultsdb'
)
# Add a success result
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Add a failed result for a negative score
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
self.worker.addFailTest('project-test1', B)
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
conn = self.connections['resultsdb'].engine.connect()
result = conn.execute(
sa.sql.select([reporter.connection.zuul_buildset_table]))
buildsets = result.fetchall()
self.assertEqual(2, len(buildsets))
buildset0 = buildsets[0]
buildset1 = buildsets[1]
self.assertEqual('check', buildset0['pipeline'])
self.assertEqual('org/project', buildset0['project'])
self.assertEqual(1, buildset0['change'])
self.assertEqual(1, buildset0['patchset'])
self.assertEqual(1, buildset0['score'])
self.assertEqual('Build succeeded.', buildset0['message'])
buildset0_builds = conn.execute(
sa.sql.select([reporter.connection.zuul_build_table]).
where(
reporter.connection.zuul_build_table.c.buildset_id ==
buildset0['id']
)
).fetchall()
# Check the first result, which should be the project-merge job
self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
self.assertEqual('http://logs.example.com/1/1/check/project-merge/0',
buildset0_builds[0]['log_url'])
self.assertEqual('check', buildset1['pipeline'])
self.assertEqual('org/project', buildset1['project'])
self.assertEqual(2, buildset1['change'])
self.assertEqual(1, buildset1['patchset'])
self.assertEqual(-1, buildset1['score'])
self.assertEqual('Build failed.', buildset1['message'])
buildset1_builds = conn.execute(
sa.sql.select([reporter.connection.zuul_build_table]).
where(
reporter.connection.zuul_build_table.c.buildset_id ==
buildset1['id']
)
).fetchall()
# Check the second last result, which should be the project-test1 job
# which failed
self.assertEqual('project-test1', buildset1_builds[-2]['job_name'])
self.assertEqual("FAILURE", buildset1_builds[-2]['result'])
self.assertEqual('http://logs.example.com/2/1/check/project-test1/4',
buildset1_builds[-2]['log_url'])
@skip("Disabled for early v3 development")
def test_sql_results(self):
"Test results are entered into the default sql table"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
self._test_sql_results()
@skip("Disabled for early v3 development")
def test_multiple_sql_connections(self):
"Test putting results in different databases"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
# Add a successful result
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Add a failed result
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
self.worker.addFailTest('project-test1', B)
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Grab the sa tables for resultsdb
reporter1 = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].success_actions,
'resultsdb'
)
conn = self.connections['resultsdb'].engine.connect()
buildsets_resultsdb = conn.execute(sa.sql.select(
[reporter1.connection.zuul_buildset_table])).fetchall()
# Should have been 2 buildset reported to the resultsdb (both success
# and failure report)
self.assertEqual(2, len(buildsets_resultsdb))
# The first one should have passed
self.assertEqual('check', buildsets_resultsdb[0]['pipeline'])
self.assertEqual('org/project', buildsets_resultsdb[0]['project'])
self.assertEqual(1, buildsets_resultsdb[0]['change'])
self.assertEqual(1, buildsets_resultsdb[0]['patchset'])
self.assertEqual(1, buildsets_resultsdb[0]['score'])
self.assertEqual('Build succeeded.', buildsets_resultsdb[0]['message'])
# Grab the sa tables for resultsdb_failures
reporter2 = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].failure_actions,
'resultsdb_failures'
)
conn = self.connections['resultsdb_failures'].engine.connect()
buildsets_resultsdb_failures = conn.execute(sa.sql.select(
[reporter2.connection.zuul_buildset_table])).fetchall()
# The failure db should only have 1 buildset failed
self.assertEqual(1, len(buildsets_resultsdb_failures))
self.assertEqual('check', buildsets_resultsdb_failures[0]['pipeline'])
self.assertEqual(
'org/project', buildsets_resultsdb_failures[0]['project'])
self.assertEqual(2, buildsets_resultsdb_failures[0]['change'])
self.assertEqual(1, buildsets_resultsdb_failures[0]['patchset'])
self.assertEqual(-1, buildsets_resultsdb_failures[0]['score'])
self.assertEqual(
'Build failed.', buildsets_resultsdb_failures[0]['message'])
class TestConnectionsBadSQL(ZuulDBTestCase):
def setup_config(self, config_file='zuul-connections-bad-sql.conf'):
super(TestConnectionsBadSQL, self).setup_config(config_file)
@skip("Disabled for early v3 development")
def test_unable_to_connect(self):
"Test the SQL reporter fails gracefully when unable to connect"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
# Trigger a reporter. If no errors are raised, the reporter has been
# disabled correctly
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
class TestMultipleGerrits(ZuulTestCase):
config_file = 'zuul-connections-multiple-gerrits.conf'
tenant_config_file = 'config/zuul-connections-multiple-gerrits/main.yaml'

@ -2920,6 +2920,50 @@ class TestScheduler(ZuulTestCase):
self.launch_server.release('.*')
self.waitUntilSettled()
@skip("Disabled for early v3 development")
def test_timer_sshkey(self):
"Test that a periodic job can setup SSH key authentication"
self.worker.hold_jobs_in_build = True
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
# The pipeline triggers every second, so we should have seen
# several by now.
time.sleep(5)
self.waitUntilSettled()
self.assertEqual(len(self.builds), 2)
ssh_wrapper = os.path.join(self.git_root, ".ssh_wrapper_gerrit")
self.assertTrue(os.path.isfile(ssh_wrapper))
with open(ssh_wrapper) as f:
ssh_wrapper_content = f.read()
self.assertIn("fake_id_rsa", ssh_wrapper_content)
# In the unit tests Merger runs in the same process,
# so we see its' environment variables
self.assertEqual(os.environ['GIT_SSH'], ssh_wrapper)
self.worker.release('.*')
self.waitUntilSettled()
self.assertEqual(len(self.history), 2)
self.assertEqual(self.getJobFromHistory(
'project-bitrot-stable-old').result, 'SUCCESS')
self.assertEqual(self.getJobFromHistory(
'project-bitrot-stable-older').result, 'SUCCESS')
# Stop queuing timer triggered jobs and let any that may have
# queued through so that end of test assertions pass.
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-no-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
self.waitUntilSettled()
self.worker.release('.*')
self.waitUntilSettled()
def test_client_enqueue_change(self):
"Test that the RPC client can enqueue a change"
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')

@ -0,0 +1,33 @@
#!/bin/bash -xe
# This script will be run by OpenStack CI before unit tests are run,
# it sets up the test system as needed.
# Developers should setup their test systems in a similar way.
# This setup needs to be run as a user that can run sudo.
# The root password for the MySQL database; pass it in via
# MYSQL_ROOT_PW.
DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
# This user and its password are used by the tests, if you change it,
# your tests might fail.
DB_USER=openstack_citest
DB_PW=openstack_citest
sudo -H mysqladmin -u root password $DB_ROOT_PW
# It's best practice to remove anonymous users from the database. If
# a anonymous user exists, then it matches first for connections and
# other connections from that host will not work.
sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
DELETE FROM mysql.user WHERE User='';
FLUSH PRIVILEGES;
GRANT ALL PRIVILEGES ON *.*
TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"
# Now create our database.
mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e "
SET default_storage_engine=MYISAM;
DROP DATABASE IF EXISTS openstack_citest;
CREATE DATABASE openstack_citest CHARACTER SET utf8;"

@ -0,0 +1 @@
Generic single-database configuration.

@ -0,0 +1,70 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
# from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

@ -0,0 +1,53 @@
"""Set up initial reporter tables
Revision ID: 4d3ebd7f06b9
Revises:
Create Date: 2015-12-06 15:27:38.080020
"""
# revision identifiers, used by Alembic.
revision = '4d3ebd7f06b9'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build'
def upgrade():
op.create_table(
BUILDSET_TABLE,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
sa.Column('pipeline', sa.String(255)),
sa.Column('project', sa.String(255)),
sa.Column('change', sa.Integer, nullable=True),
sa.Column('patchset', sa.Integer, nullable=True),
sa.Column('ref', sa.String(255)),
sa.Column('score', sa.Integer),
sa.Column('message', sa.TEXT()),
)
op.create_table(
BUILD_TABLE,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
sa.ForeignKey(BUILDSET_TABLE + ".id")),
sa.Column('uuid', sa.String(36)),
sa.Column('job_name', sa.String(255)),
sa.Column('result', sa.String(255)),
sa.Column('start_time', sa.DateTime()),
sa.Column('end_time', sa.DateTime()),
sa.Column('voting', sa.Boolean),
sa.Column('log_url', sa.String(255)),
sa.Column('node_name', sa.String(255)),
)
def downgrade():
raise Exception("Downgrades not supported")

@ -0,0 +1,69 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# NOTE(jhesketh): We may use alembic for other db components of zuul in the
# future. Use a sub-folder for the reporters own versions.
script_location = alembic/sql_reporter
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = mysql+pymysql://user@localhost/database
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

@ -0,0 +1,104 @@
# Copyright 2014 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import alembic
import alembic.config
import sqlalchemy as sa
import voluptuous as v
from zuul.connection import BaseConnection
BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build'
class SQLConnection(BaseConnection):
driver_name = 'sql'
log = logging.getLogger("connection.sql")
def __init__(self, connection_name, connection_config):
super(SQLConnection, self).__init__(connection_name, connection_config)
self.dburi = None
self.engine = None
self.connection = None
self.tables_established = False
try:
self.dburi = self.connection_config.get('dburi')
self.engine = sa.create_engine(self.dburi)
self._migrate()
self._setup_tables()
self.tables_established = True
except sa.exc.NoSuchModuleError:
self.log.exception(
"The required module for the dburi dialect isn't available. "
"SQL connection %s will be unavailable." % connection_name)
except sa.exc.OperationalError:
self.log.exception(
"Unable to connect to the database or establish the required "
"tables. Reporter %s is disabled" % self)
def _migrate(self):
"""Perform the alembic migrations for this connection"""
with self.engine.begin() as conn:
context = alembic.migration.MigrationContext.configure(conn)
current_rev = context.get_current_revision()
self.log.debug('Current migration revision: %s' % current_rev)
config = alembic.config.Config()
config.set_main_option("script_location",
"zuul:alembic/sql_reporter")
config.set_main_option("sqlalchemy.url",
self.connection_config.get('dburi'))
alembic.command.upgrade(config, 'head')
def _setup_tables(self):
metadata = sa.MetaData()
self.zuul_buildset_table = sa.Table(
BUILDSET_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column