Merge "Add support for sqlalchemy reporter"
This commit is contained in:
commit
4da3416c3e
@ -1 +1,6 @@
|
||||
# This is a cross-platform list tracking distribution packages needed by tests;
|
||||
# see http://docs.openstack.org/infra/bindep/ for additional information.
|
||||
|
||||
mysql-client [test]
|
||||
mysql-server [test]
|
||||
libjpeg-dev [test]
|
||||
|
@ -80,3 +80,15 @@ SMTP
|
||||
Who the report should be emailed to by default.
|
||||
This can be overridden by individual pipelines.
|
||||
``default_to=you@example.com``
|
||||
|
||||
SQL
|
||||
----
|
||||
|
||||
Only one connection per a database is permitted.
|
||||
|
||||
**driver=sql**
|
||||
|
||||
**dburi**
|
||||
Database connection information in the form of a URI understood by
|
||||
sqlalchemy. eg http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls
|
||||
``dburi=mysql://user:pass@localhost/db``
|
||||
|
@ -34,7 +34,7 @@ SMTP
|
||||
A simple email reporter is also available.
|
||||
|
||||
A :ref:`connection` that uses the smtp driver must be supplied to the
|
||||
trigger.
|
||||
reporter.
|
||||
|
||||
SMTP Configuration
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@ -60,3 +60,42 @@ providing alternatives as arguments to the reporter. For example, ::
|
||||
to: you@example.com
|
||||
from: alternative@example.com
|
||||
subject: Change {change} failed
|
||||
|
||||
SQL
|
||||
---
|
||||
|
||||
This reporter is used to store results in a database.
|
||||
|
||||
A :ref:`connection` that uses the sql driver must be supplied to the
|
||||
reporter.
|
||||
|
||||
SQL Configuration
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
zuul.conf contains the database connection and credentials. To store different
|
||||
reports in different databases you'll need to create a new connection per
|
||||
database.
|
||||
|
||||
The sql reporter is used to store the results from individual builds rather
|
||||
than the change. As such the sql reporter does nothing on "start" or
|
||||
"merge-failure".
|
||||
|
||||
**score**
|
||||
A score to store for the result of the build. eg: -1 might indicate a failed
|
||||
build similar to the vote posted back via the gerrit reporter.
|
||||
|
||||
For example ::
|
||||
|
||||
pipelines:
|
||||
- name: post-merge
|
||||
manager: IndependentPipelineManager
|
||||
source: my_gerrit
|
||||
trigger:
|
||||
my_gerrit:
|
||||
- event: change-merged
|
||||
success:
|
||||
mydb_conn:
|
||||
score: 1
|
||||
failure:
|
||||
mydb_conn:
|
||||
score: -1
|
||||
|
@ -44,3 +44,7 @@ server=localhost
|
||||
port=25
|
||||
default_from=zuul@example.com
|
||||
default_to=you@example.com
|
||||
|
||||
[connection mydatabase]
|
||||
driver=sql
|
||||
dburi=mysql+pymysql://user@localhost/zuul
|
||||
|
@ -15,3 +15,5 @@ apscheduler>=3.0
|
||||
PrettyTable>=0.6,<0.8
|
||||
babel>=1.0
|
||||
six>=1.6.0
|
||||
sqlalchemy
|
||||
alembic
|
||||
|
@ -31,3 +31,7 @@ console_scripts =
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
|
||||
[extras]
|
||||
mysql_reporter=
|
||||
PyMySQL
|
||||
|
@ -11,3 +11,4 @@ testrepository>=0.0.17
|
||||
testtools>=0.9.32
|
||||
sphinxcontrib-programoutput
|
||||
mock
|
||||
PyMySQL
|
||||
|
@ -34,16 +34,20 @@ import subprocess
|
||||
import swiftclient
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
|
||||
import git
|
||||
import gear
|
||||
import fixtures
|
||||
import pymysql
|
||||
import statsd
|
||||
import testtools
|
||||
from git import GitCommandError
|
||||
|
||||
import zuul.connection.gerrit
|
||||
import zuul.connection.smtp
|
||||
import zuul.connection.sql
|
||||
import zuul.scheduler
|
||||
import zuul.webapp
|
||||
import zuul.rpclistener
|
||||
@ -855,6 +859,43 @@ class FakeSwiftClientConnection(swiftclient.client.Connection):
|
||||
return endpoint, ''
|
||||
|
||||
|
||||
class MySQLSchemaFixture(fixtures.Fixture):
|
||||
def setUp(self):
|
||||
super(MySQLSchemaFixture, self).setUp()
|
||||
|
||||
random_bits = ''.join(random.choice(string.ascii_lowercase +
|
||||
string.ascii_uppercase)
|
||||
for x in range(8))
|
||||
self.name = '%s_%s' % (random_bits, os.getpid())
|
||||
self.passwd = uuid.uuid4().hex
|
||||
db = pymysql.connect(host="localhost",
|
||||
user="openstack_citest",
|
||||
passwd="openstack_citest",
|
||||
db="openstack_citest")
|
||||
cur = db.cursor()
|
||||
cur.execute("create database %s" % self.name)
|
||||
cur.execute(
|
||||
"grant all on %s.* to '%s'@'localhost' identified by '%s'" %
|
||||
(self.name, self.name, self.passwd))
|
||||
cur.execute("flush privileges")
|
||||
|
||||
self.dburi = 'mysql+pymysql://%s:%s@localhost/%s' % (self.name,
|
||||
self.passwd,
|
||||
self.name)
|
||||
self.addDetail('dburi', testtools.content.text_content(self.dburi))
|
||||
self.addCleanup(self.cleanup)
|
||||
|
||||
def cleanup(self):
|
||||
db = pymysql.connect(host="localhost",
|
||||
user="openstack_citest",
|
||||
passwd="openstack_citest",
|
||||
db="openstack_citest")
|
||||
cur = db.cursor()
|
||||
cur.execute("drop database %s" % self.name)
|
||||
cur.execute("drop user '%s'@'localhost'" % self.name)
|
||||
cur.execute("flush privileges")
|
||||
|
||||
|
||||
class BaseTestCase(testtools.TestCase):
|
||||
log = logging.getLogger("zuul.test")
|
||||
|
||||
@ -1039,6 +1080,8 @@ class ZuulTestCase(BaseTestCase):
|
||||
self.addCleanup(self.shutdown)
|
||||
|
||||
def configure_connections(self):
|
||||
# TODO(jhesketh): This should come from lib.connections for better
|
||||
# coverage
|
||||
# Register connections from the config
|
||||
self.smtp_messages = []
|
||||
|
||||
@ -1087,6 +1130,9 @@ class ZuulTestCase(BaseTestCase):
|
||||
elif con_driver == 'smtp':
|
||||
self.connections[con_name] = \
|
||||
zuul.connection.smtp.SMTPConnection(con_name, con_config)
|
||||
elif con_driver == 'sql':
|
||||
self.connections[con_name] = \
|
||||
zuul.connection.sql.SQLConnection(con_name, con_config)
|
||||
else:
|
||||
raise Exception("Unknown driver, %s, for connection %s"
|
||||
% (con_config['driver'], con_name))
|
||||
@ -1429,3 +1475,20 @@ class ZuulTestCase(BaseTestCase):
|
||||
|
||||
pprint.pprint(self.statsd.stats)
|
||||
raise Exception("Key %s not found in reported stats" % key)
|
||||
|
||||
|
||||
class ZuulDBTestCase(ZuulTestCase):
|
||||
def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
|
||||
super(ZuulDBTestCase, self).setup_config(config_file)
|
||||
for section_name in self.config.sections():
|
||||
con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
|
||||
section_name, re.I)
|
||||
if not con_match:
|
||||
continue
|
||||
|
||||
if self.config.get(section_name, 'driver') == 'sql':
|
||||
f = MySQLSchemaFixture()
|
||||
self.useFixture(f)
|
||||
if (self.config.get(section_name, 'dburi') ==
|
||||
'$MYSQL_FIXTURE_DBURI$'):
|
||||
self.config.set(section_name, 'dburi', f.dburi)
|
||||
|
27
tests/fixtures/layout-sql-reporter.yaml
vendored
Normal file
27
tests/fixtures/layout-sql-reporter.yaml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
pipelines:
|
||||
- name: check
|
||||
manager: IndependentPipelineManager
|
||||
source:
|
||||
review_gerrit
|
||||
trigger:
|
||||
review_gerrit:
|
||||
- event: patchset-created
|
||||
success:
|
||||
review_gerrit:
|
||||
verified: 1
|
||||
resultsdb:
|
||||
score: 1
|
||||
failure:
|
||||
review_gerrit:
|
||||
verified: -1
|
||||
resultsdb:
|
||||
score: -1
|
||||
resultsdb_failures:
|
||||
score: -1
|
||||
|
||||
projects:
|
||||
- name: org/project
|
||||
check:
|
||||
- project-merge:
|
||||
- project-test1
|
||||
- project-test2
|
50
tests/fixtures/zuul-connections-bad-sql.conf
vendored
Normal file
50
tests/fixtures/zuul-connections-bad-sql.conf
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
[gearman]
|
||||
server=127.0.0.1
|
||||
|
||||
[zuul]
|
||||
layout_config=layout-connections-multiple-voters.yaml
|
||||
url_pattern=http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}
|
||||
job_name_in_report=true
|
||||
|
||||
[merger]
|
||||
git_dir=/tmp/zuul-test/git
|
||||
git_user_email=zuul@example.com
|
||||
git_user_name=zuul
|
||||
zuul_url=http://zuul.example.com/p
|
||||
|
||||
[swift]
|
||||
authurl=https://identity.api.example.org/v2.0/
|
||||
user=username
|
||||
key=password
|
||||
tenant_name=" "
|
||||
|
||||
default_container=logs
|
||||
region_name=EXP
|
||||
logserver_prefix=http://logs.example.org/server.app/
|
||||
|
||||
[connection review_gerrit]
|
||||
driver=gerrit
|
||||
server=review.example.com
|
||||
user=jenkins
|
||||
sshkey=none
|
||||
|
||||
[connection alt_voting_gerrit]
|
||||
driver=gerrit
|
||||
server=alt_review.example.com
|
||||
user=civoter
|
||||
sshkey=none
|
||||
|
||||
[connection outgoing_smtp]
|
||||
driver=smtp
|
||||
server=localhost
|
||||
port=25
|
||||
default_from=zuul@example.com
|
||||
default_to=you@example.com
|
||||
|
||||
[connection resultsdb]
|
||||
driver=sql
|
||||
dburi=mysql+pymysql://bad:creds@host/db
|
||||
|
||||
[connection resultsdb_failures]
|
||||
driver=sql
|
||||
dburi=mysql+pymysql://bad:creds@host/db
|
@ -40,3 +40,11 @@ server=localhost
|
||||
port=25
|
||||
default_from=zuul@example.com
|
||||
default_to=you@example.com
|
||||
|
||||
[connection resultsdb]
|
||||
driver=sql
|
||||
dburi=$MYSQL_FIXTURE_DBURI$
|
||||
|
||||
[connection resultsdb_failures]
|
||||
driver=sql
|
||||
dburi=$MYSQL_FIXTURE_DBURI$
|
||||
|
@ -15,9 +15,21 @@
|
||||
import logging
|
||||
import testtools
|
||||
|
||||
import zuul.connection.gerrit
|
||||
import sqlalchemy as sa
|
||||
|
||||
from tests.base import ZuulTestCase
|
||||
import zuul.connection.gerrit
|
||||
import zuul.connection.sql
|
||||
|
||||
from tests.base import ZuulTestCase, ZuulDBTestCase
|
||||
|
||||
|
||||
def _get_reporter_from_connection_name(reporters, connection_name):
|
||||
# Reporters are placed into lists for each action they may exist in.
|
||||
# Search through the given list for the correct reporter by its conncetion
|
||||
# name
|
||||
for r in reporters:
|
||||
if r.connection.connection_name == connection_name:
|
||||
return r
|
||||
|
||||
|
||||
class TestGerritConnection(testtools.TestCase):
|
||||
@ -28,11 +40,18 @@ class TestGerritConnection(testtools.TestCase):
|
||||
zuul.connection.gerrit.GerritConnection.driver_name)
|
||||
|
||||
|
||||
class TestConnections(ZuulTestCase):
|
||||
def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
|
||||
super(TestConnections, self).setup_config(config_file)
|
||||
class TestSQLConnection(testtools.TestCase):
|
||||
log = logging.getLogger("zuul.test_connection")
|
||||
|
||||
def test_multiple_connections(self):
|
||||
def test_driver_name(self):
|
||||
self.assertEqual(
|
||||
'sql',
|
||||
zuul.connection.sql.SQLConnection.driver_name
|
||||
)
|
||||
|
||||
|
||||
class TestConnections(ZuulDBTestCase):
|
||||
def test_multiple_gerrit_connections(self):
|
||||
"Test multiple connections to the one gerrit"
|
||||
|
||||
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
|
||||
@ -58,6 +77,178 @@ class TestConnections(ZuulTestCase):
|
||||
self.assertEqual(B.patchsets[-1]['approvals'][0]['by']['username'],
|
||||
'civoter')
|
||||
|
||||
def _test_sql_tables_created(self, metadata_table=None):
|
||||
"Test the tables for storing results are created properly"
|
||||
buildset_table = 'zuul_buildset'
|
||||
build_table = 'zuul_build'
|
||||
|
||||
insp = sa.engine.reflection.Inspector(
|
||||
self.connections['resultsdb'].engine)
|
||||
|
||||
self.assertEqual(9, len(insp.get_columns(buildset_table)))
|
||||
self.assertEqual(10, len(insp.get_columns(build_table)))
|
||||
|
||||
def test_sql_tables_created(self):
|
||||
"Test the default table is created"
|
||||
self.config.set('zuul', 'layout_config',
|
||||
'tests/fixtures/layout-sql-reporter.yaml')
|
||||
self.sched.reconfigure(self.config)
|
||||
self._test_sql_tables_created()
|
||||
|
||||
def _test_sql_results(self):
|
||||
"Test results are entered into an sql table"
|
||||
# Grab the sa tables
|
||||
reporter = _get_reporter_from_connection_name(
|
||||
self.sched.layout.pipelines['check'].success_actions,
|
||||
'resultsdb'
|
||||
)
|
||||
|
||||
# Add a success result
|
||||
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
|
||||
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
|
||||
# Add a failed result for a negative score
|
||||
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
|
||||
self.worker.addFailTest('project-test1', B)
|
||||
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
|
||||
conn = self.connections['resultsdb'].engine.connect()
|
||||
result = conn.execute(
|
||||
sa.sql.select([reporter.connection.zuul_buildset_table]))
|
||||
|
||||
buildsets = result.fetchall()
|
||||
self.assertEqual(2, len(buildsets))
|
||||
buildset0 = buildsets[0]
|
||||
buildset1 = buildsets[1]
|
||||
|
||||
self.assertEqual('check', buildset0['pipeline'])
|
||||
self.assertEqual('org/project', buildset0['project'])
|
||||
self.assertEqual(1, buildset0['change'])
|
||||
self.assertEqual(1, buildset0['patchset'])
|
||||
self.assertEqual(1, buildset0['score'])
|
||||
self.assertEqual('Build succeeded.', buildset0['message'])
|
||||
|
||||
buildset0_builds = conn.execute(
|
||||
sa.sql.select([reporter.connection.zuul_build_table]).
|
||||
where(
|
||||
reporter.connection.zuul_build_table.c.buildset_id ==
|
||||
buildset0['id']
|
||||
)
|
||||
).fetchall()
|
||||
|
||||
# Check the first result, which should be the project-merge job
|
||||
self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
|
||||
self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
|
||||
self.assertEqual('http://logs.example.com/1/1/check/project-merge/0',
|
||||
buildset0_builds[0]['log_url'])
|
||||
|
||||
self.assertEqual('check', buildset1['pipeline'])
|
||||
self.assertEqual('org/project', buildset1['project'])
|
||||
self.assertEqual(2, buildset1['change'])
|
||||
self.assertEqual(1, buildset1['patchset'])
|
||||
self.assertEqual(-1, buildset1['score'])
|
||||
self.assertEqual('Build failed.', buildset1['message'])
|
||||
|
||||
buildset1_builds = conn.execute(
|
||||
sa.sql.select([reporter.connection.zuul_build_table]).
|
||||
where(
|
||||
reporter.connection.zuul_build_table.c.buildset_id ==
|
||||
buildset1['id']
|
||||
)
|
||||
).fetchall()
|
||||
|
||||
# Check the second last result, which should be the project-test1 job
|
||||
# which failed
|
||||
self.assertEqual('project-test1', buildset1_builds[-2]['job_name'])
|
||||
self.assertEqual("FAILURE", buildset1_builds[-2]['result'])
|
||||
self.assertEqual('http://logs.example.com/2/1/check/project-test1/4',
|
||||
buildset1_builds[-2]['log_url'])
|
||||
|
||||
def test_sql_results(self):
|
||||
"Test results are entered into the default sql table"
|
||||
self.config.set('zuul', 'layout_config',
|
||||
'tests/fixtures/layout-sql-reporter.yaml')
|
||||
self.sched.reconfigure(self.config)
|
||||
self._test_sql_results()
|
||||
|
||||
def test_multiple_sql_connections(self):
|
||||
"Test putting results in different databases"
|
||||
self.config.set('zuul', 'layout_config',
|
||||
'tests/fixtures/layout-sql-reporter.yaml')
|
||||
self.sched.reconfigure(self.config)
|
||||
|
||||
# Add a successful result
|
||||
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
|
||||
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
|
||||
# Add a failed result
|
||||
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
|
||||
self.worker.addFailTest('project-test1', B)
|
||||
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
|
||||
# Grab the sa tables for resultsdb
|
||||
reporter1 = _get_reporter_from_connection_name(
|
||||
self.sched.layout.pipelines['check'].success_actions,
|
||||
'resultsdb'
|
||||
)
|
||||
|
||||
conn = self.connections['resultsdb'].engine.connect()
|
||||
buildsets_resultsdb = conn.execute(sa.sql.select(
|
||||
[reporter1.connection.zuul_buildset_table])).fetchall()
|
||||
# Should have been 2 buildset reported to the resultsdb (both success
|
||||
# and failure report)
|
||||
self.assertEqual(2, len(buildsets_resultsdb))
|
||||
|
||||
# The first one should have passed
|
||||
self.assertEqual('check', buildsets_resultsdb[0]['pipeline'])
|
||||
self.assertEqual('org/project', buildsets_resultsdb[0]['project'])
|
||||
self.assertEqual(1, buildsets_resultsdb[0]['change'])
|
||||
self.assertEqual(1, buildsets_resultsdb[0]['patchset'])
|
||||
self.assertEqual(1, buildsets_resultsdb[0]['score'])
|
||||
self.assertEqual('Build succeeded.', buildsets_resultsdb[0]['message'])
|
||||
|
||||
# Grab the sa tables for resultsdb_failures
|
||||
reporter2 = _get_reporter_from_connection_name(
|
||||
self.sched.layout.pipelines['check'].failure_actions,
|
||||
'resultsdb_failures'
|
||||
)
|
||||
|
||||
conn = self.connections['resultsdb_failures'].engine.connect()
|
||||
buildsets_resultsdb_failures = conn.execute(sa.sql.select(
|
||||
[reporter2.connection.zuul_buildset_table])).fetchall()
|
||||
# The failure db should only have 1 buildset failed
|
||||
self.assertEqual(1, len(buildsets_resultsdb_failures))
|
||||
|
||||
self.assertEqual('check', buildsets_resultsdb_failures[0]['pipeline'])
|
||||
self.assertEqual(
|
||||
'org/project', buildsets_resultsdb_failures[0]['project'])
|
||||
self.assertEqual(2, buildsets_resultsdb_failures[0]['change'])
|
||||
self.assertEqual(1, buildsets_resultsdb_failures[0]['patchset'])
|
||||
self.assertEqual(-1, buildsets_resultsdb_failures[0]['score'])
|
||||
self.assertEqual(
|
||||
'Build failed.', buildsets_resultsdb_failures[0]['message'])
|
||||
|
||||
|
||||
class TestConnectionsBadSQL(ZuulDBTestCase):
|
||||
def setup_config(self, config_file='zuul-connections-bad-sql.conf'):
|
||||
super(TestConnectionsBadSQL, self).setup_config(config_file)
|
||||
|
||||
def test_unable_to_connect(self):
|
||||
"Test the SQL reporter fails gracefully when unable to connect"
|
||||
self.config.set('zuul', 'layout_config',
|
||||
'tests/fixtures/layout-sql-reporter.yaml')
|
||||
self.sched.reconfigure(self.config)
|
||||
|
||||
# Trigger a reporter. If no errors are raised, the reporter has been
|
||||
# disabled correctly
|
||||
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
|
||||
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
|
||||
|
||||
class TestMultipleGerrits(ZuulTestCase):
|
||||
def setup_config(self,
|
||||
|
@ -12,18 +12,18 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
import logging
|
||||
import testtools
|
||||
|
||||
import zuul.reporter
|
||||
import zuul.reporter.gerrit
|
||||
import zuul.reporter.smtp
|
||||
import zuul.reporter.sql
|
||||
|
||||
|
||||
class TestSMTPReporter(testtools.TestCase):
|
||||
log = logging.getLogger("zuul.test_reporter")
|
||||
|
||||
def setUp(self):
|
||||
super(TestSMTPReporter, self).setUp()
|
||||
|
||||
def test_reporter_abc(self):
|
||||
# We only need to instantiate a class for this
|
||||
reporter = zuul.reporter.smtp.SMTPReporter({}) # noqa
|
||||
@ -35,12 +35,30 @@ class TestSMTPReporter(testtools.TestCase):
|
||||
class TestGerritReporter(testtools.TestCase):
|
||||
log = logging.getLogger("zuul.test_reporter")
|
||||
|
||||
def setUp(self):
|
||||
super(TestGerritReporter, self).setUp()
|
||||
|
||||
def test_reporter_abc(self):
|
||||
# We only need to instantiate a class for this
|
||||
reporter = zuul.reporter.gerrit.GerritReporter(None) # noqa
|
||||
|
||||
def test_reporter_name(self):
|
||||
self.assertEqual('gerrit', zuul.reporter.gerrit.GerritReporter.name)
|
||||
|
||||
|
||||
class TestSQLReporter(testtools.TestCase):
|
||||
log = logging.getLogger("zuul.test_reporter")
|
||||
|
||||
def test_reporter_abc(self):
|
||||
# We only need to instantiate a class for this
|
||||
# First mock out _setup_tables
|
||||
def _fake_setup_tables(self):
|
||||
pass
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
'zuul.reporter.sql.SQLReporter._setup_tables',
|
||||
_fake_setup_tables
|
||||
))
|
||||
|
||||
reporter = zuul.reporter.sql.SQLReporter() # noqa
|
||||
|
||||
def test_reporter_name(self):
|
||||
self.assertEqual(
|
||||
'sql', zuul.reporter.sql.SQLReporter.name)
|
||||
|
33
tools/test-setup.sh
Executable file
33
tools/test-setup.sh
Executable file
@ -0,0 +1,33 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# This script will be run by OpenStack CI before unit tests are run,
|
||||
# it sets up the test system as needed.
|
||||
# Developers should setup their test systems in a similar way.
|
||||
|
||||
# This setup needs to be run as a user that can run sudo.
|
||||
|
||||
# The root password for the MySQL database; pass it in via
|
||||
# MYSQL_ROOT_PW.
|
||||
DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
|
||||
|
||||
# This user and its password are used by the tests, if you change it,
|
||||
# your tests might fail.
|
||||
DB_USER=openstack_citest
|
||||
DB_PW=openstack_citest
|
||||
|
||||
sudo -H mysqladmin -u root password $DB_ROOT_PW
|
||||
|
||||
# It's best practice to remove anonymous users from the database. If
|
||||
# a anonymous user exists, then it matches first for connections and
|
||||
# other connections from that host will not work.
|
||||
sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
|
||||
DELETE FROM mysql.user WHERE User='';
|
||||
FLUSH PRIVILEGES;
|
||||
GRANT ALL PRIVILEGES ON *.*
|
||||
TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"
|
||||
|
||||
# Now create our database.
|
||||
mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e "
|
||||
SET default_storage_engine=MYISAM;
|
||||
DROP DATABASE IF EXISTS openstack_citest;
|
||||
CREATE DATABASE openstack_citest CHARACTER SET utf8;"
|
1
zuul/alembic/sql_reporter/README
Normal file
1
zuul/alembic/sql_reporter/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
70
zuul/alembic/sql_reporter/env.py
Normal file
70
zuul/alembic/sql_reporter/env.py
Normal file
@ -0,0 +1,70 @@
|
||||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
# from logging.config import fileConfig
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
zuul/alembic/sql_reporter/script.py.mako
Normal file
24
zuul/alembic/sql_reporter/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,53 @@
|
||||
"""Set up initial reporter tables
|
||||
|
||||
Revision ID: 4d3ebd7f06b9
|
||||
Revises:
|
||||
Create Date: 2015-12-06 15:27:38.080020
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4d3ebd7f06b9'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
BUILDSET_TABLE = 'zuul_buildset'
|
||||
BUILD_TABLE = 'zuul_build'
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
BUILDSET_TABLE,
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('zuul_ref', sa.String(255)),
|
||||
sa.Column('pipeline', sa.String(255)),
|
||||
sa.Column('project', sa.String(255)),
|
||||
sa.Column('change', sa.Integer, nullable=True),
|
||||
sa.Column('patchset', sa.Integer, nullable=True),
|
||||
sa.Column('ref', sa.String(255)),
|
||||
sa.Column('score', sa.Integer),
|
||||
sa.Column('message', sa.TEXT()),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
BUILD_TABLE,
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('buildset_id', sa.Integer,
|
||||
sa.ForeignKey(BUILDSET_TABLE + ".id")),
|
||||
sa.Column('uuid', sa.String(36)),
|
||||
sa.Column('job_name', sa.String(255)),
|
||||
sa.Column('result', sa.String(255)),
|
||||
sa.Column('start_time', sa.DateTime()),
|
||||
sa.Column('end_time', sa.DateTime()),
|
||||
sa.Column('voting', sa.Boolean),
|
||||
sa.Column('log_url', sa.String(255)),
|
||||
sa.Column('node_name', sa.String(255)),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
raise Exception("Downgrades not supported")
|
69
zuul/alembic_reporter.ini
Normal file
69
zuul/alembic_reporter.ini
Normal file
@ -0,0 +1,69 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# NOTE(jhesketh): We may use alembic for other db components of zuul in the
|
||||
# future. Use a sub-folder for the reporters own versions.
|
||||
script_location = alembic/sql_reporter
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = mysql+pymysql://user@localhost/database
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
104
zuul/connection/sql.py
Normal file
104
zuul/connection/sql.py
Normal file
@ -0,0 +1,104 @@
|
||||
# Copyright 2014 Rackspace Australia
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
import alembic
|
||||
import alembic.config
|
||||
import sqlalchemy as sa
|
||||
import voluptuous as v
|
||||
|
||||
from zuul.connection import BaseConnection
|
||||
|
||||
BUILDSET_TABLE = 'zuul_buildset'
|
||||
BUILD_TABLE = 'zuul_build'
|
||||
|
||||
|
||||
class SQLConnection(BaseConnection):
|
||||
driver_name = 'sql'
|
||||
log = logging.getLogger("connection.sql")
|
||||
|
||||
def __init__(self, connection_name, connection_config):
|
||||
|
||||
super(SQLConnection, self).__init__(connection_name, connection_config)
|
||||
|
||||
self.dburi = None
|
||||
self.engine = None
|
||||
self.connection = None
|
||||
self.tables_established = False
|
||||
try:
|
||||
self.dburi = self.connection_config.get('dburi')
|
||||
self.engine = sa.create_engine(self.dburi)
|
||||
self._migrate()
|
||||
self._setup_tables()
|
||||
self.tables_established = True
|
||||
except sa.exc.NoSuchModuleError:
|
||||
self.log.exception(
|
||||
"The required module for the dburi dialect isn't available. "
|
||||
"SQL connection %s will be unavailable." % connection_name)
|
||||
except sa.exc.OperationalError:
|
||||
self.log.exception(
|
||||
"Unable to connect to the database or establish the required "
|
||||
"tables. Reporter %s is disabled" % self)
|
||||
|
||||
def _migrate(self):
|
||||
"""Perform the alembic migrations for this connection"""
|
||||
with self.engine.begin() as conn:
|
||||
context = alembic.migration.MigrationContext.configure(conn)
|
||||
current_rev = context.get_current_revision()
|
||||
self.log.debug('Current migration revision: %s' % current_rev)
|
||||
|
||||
config = alembic.config.Config()
|
||||
config.set_main_option("script_location",
|
||||
"zuul:alembic/sql_reporter")
|
||||
config.set_main_option("sqlalchemy.url",
|
||||
self.connection_config.get('dburi'))
|
||||
|
||||
alembic.command.upgrade(config, 'head')
|
||||
|
||||
def _setup_tables(self):
|
||||
metadata = sa.MetaData()
|
||||
|
||||
self.zuul_buildset_table = sa.Table(
|
||||
BUILDSET_TABLE, metadata,
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('zuul_ref', sa.String(255)),
|
||||
sa.Column('pipeline', sa.String(255)),
|
||||
sa.Column('project', sa.String(255)),
|
||||
sa.Column('change', sa.Integer, nullable=True),
|
||||
sa.Column('patchset', sa.Integer, nullable=True),
|
||||
sa.Column('ref', sa.String(255)),
|
||||
sa.Column('score', sa.Integer),
|
||||
sa.Column('message', sa.TEXT()),
|
||||
)
|
||||
|
||||
self.zuul_build_table = sa.Table(
|
||||
BUILD_TABLE, metadata,
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('buildset_id', sa.Integer,
|
||||
sa.ForeignKey(BUILDSET_TABLE + ".id")),
|
||||
sa.Column('uuid', sa.String(36)),
|
||||
sa.Column('job_name', sa.String(255)),
|
||||
sa.Column('result', sa.String(255)),
|
||||
sa.Column('start_time', sa.DateTime()),
|
||||
sa.Column('end_time', sa.DateTime()),
|
||||
sa.Column('voting', sa.Boolean),
|
||||
sa.Column('log_url', sa.String(255)),
|
||||
sa.Column('node_name', sa.String(255)),
|
||||
)
|
||||
|
||||
|
||||
def getSchema():
|
||||
sql_connection = v.Any(str, v.Schema({}, extra=True))
|
||||
return sql_connection
|
@ -167,6 +167,7 @@ class LayoutSchema(object):
|
||||
'reporter': {
|
||||
'gerrit': 'zuul.reporter.gerrit',
|
||||
'smtp': 'zuul.reporter.smtp',
|
||||
'sql': 'zuul.reporter.sql',
|
||||
},
|
||||
}
|
||||
standard_drivers = {
|
||||
|
@ -17,6 +17,7 @@ import re
|
||||
|
||||
import zuul.connection.gerrit
|
||||
import zuul.connection.smtp
|
||||
import zuul.connection.sql
|
||||
|
||||
|
||||
def configure_connections(config):
|
||||
@ -48,6 +49,9 @@ def configure_connections(config):
|
||||
elif con_driver == 'smtp':
|
||||
connections[con_name] = \
|
||||
zuul.connection.smtp.SMTPConnection(con_name, con_config)
|
||||
elif con_driver == 'sql':
|
||||
connections[con_name] = \
|
||||
zuul.connection.sql.SQLConnection(con_name, con_config)
|
||||
else:
|
||||
raise Exception("Unknown driver, %s, for connection %s"
|
||||
% (con_config['driver'], con_name))
|
||||
|
@ -64,40 +64,43 @@ class BaseReporter(object):
|
||||
}
|
||||
return format_methods[self._action]
|
||||
|
||||
def _formatItemReport(self, pipeline, item):
|
||||
def _formatItemReport(self, pipeline, item, with_jobs=True):
|
||||
"""Format a report from the given items. Usually to provide results to
|
||||
a reporter taking free-form text."""
|
||||
ret = self._getFormatter()(pipeline, item)
|
||||
ret = self._getFormatter()(pipeline, item, with_jobs)
|
||||
|
||||
if pipeline.footer_message:
|
||||
ret += '\n' + pipeline.footer_message
|
||||
|
||||
return ret
|
||||
|
||||
def _formatItemReportStart(self, pipeline, item):
|
||||
def _formatItemReportStart(self, pipeline, item, with_jobs=True):
|
||||
msg = "Starting %s jobs." % pipeline.name
|
||||
if self.sched.config.has_option('zuul', 'status_url'):
|
||||
msg += "\n" + self.sched.config.get('zuul', 'status_url')
|
||||
return msg
|
||||
|
||||
def _formatItemReportSuccess(self, pipeline, item):
|
||||
return (pipeline.success_message + '\n\n' +
|
||||
self._formatItemReportJobs(pipeline, item))
|
||||
def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
|
||||
msg = pipeline.success_message
|
||||
if with_jobs:
|
||||
msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
|
||||
return msg
|
||||
|
||||
def _formatItemReportFailure(self, pipeline, item):
|
||||
def _formatItemReportFailure(self, pipeline, item, with_jobs=True):
|
||||
if item.dequeued_needing_change:
|
||||
msg = 'This change depends on a change that failed to merge.\n'
|
||||
elif not pipeline.didMergerSucceed(item):
|
||||
msg = pipeline.merge_failure_message
|
||||
else:
|
||||
msg = (pipeline.failure_message + '\n\n' +
|
||||
self._formatItemReportJobs(pipeline, item))
|
||||
msg = pipeline.failure_message
|
||||
if with_jobs:
|
||||
msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
|
||||
return msg
|
||||
|
||||
def _formatItemReportMergeFailure(self, pipeline, item):
|
||||
def _formatItemReportMergeFailure(self, pipeline, item, with_jobs=True):
|
||||
return pipeline.merge_failure_message
|
||||
|
||||
def _formatItemReportDisabled(self, pipeline, item):
|
||||
def _formatItemReportDisabled(self, pipeline, item, with_jobs=True):
|
||||
if item.current_build_set.result == 'SUCCESS':
|
||||
return self._formatItemReportSuccess(pipeline, item)
|
||||
elif item.current_build_set.result == 'FAILURE':
|
||||
|
94
zuul/reporter/sql.py
Normal file
94
zuul/reporter/sql.py
Normal file
@ -0,0 +1,94 @@
|
||||
# Copyright 2015 Rackspace Australia
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import voluptuous as v
|
||||
|
||||
from zuul.reporter import BaseReporter
|
||||
|
||||
|
||||
class SQLReporter(BaseReporter):
|
||||
"""Sends off reports to a database."""
|
||||
|
||||
name = 'sql'
|
||||
log = logging.getLogger("zuul.reporter.mysql.SQLReporter")
|
||||
|
||||
def __init__(self, reporter_config={}, sched=None, connection=None):
|
||||
super(SQLReporter, self).__init__(
|
||||
reporter_config, sched, connection)
|
||||
self.result_score = reporter_config.get('score', None)
|
||||
|
||||
def report(self, source, pipeline, item):
|
||||
"""Create an entry into a database."""
|
||||
|
||||
if not self.connection.tables_established:
|
||||
self.log.warn("SQL reporter (%s) is disabled " % self)
|
||||
return
|
||||
|
||||
if self.sched.config.has_option('zuul', 'url_pattern'):
|
||||
url_pattern = self.sched.config.get('zuul', 'url_pattern')
|
||||
else:
|
||||
url_pattern = None
|
||||
|
||||
score = self.reporter_config['score']\
|
||||
if 'score' in self.reporter_config else 0
|
||||
|
||||
with self.connection.engine.begin() as conn:
|
||||
buildset_ins = self.connection.zuul_buildset_table.insert().values(
|
||||
zuul_ref=item.current_build_set.ref,
|
||||
pipeline=item.pipeline.name,
|
||||
project=item.change.project.name,
|
||||
change=item.change.number,
|
||||
patchset=item.change.patchset,
|
||||
ref=item.change.refspec,
|
||||
score=score,
|
||||
message=self._formatItemReport(
|
||||
pipeline, item, with_jobs=False),
|
||||
)
|
||||
buildset_ins_result = conn.execute(buildset_ins)
|
||||
build_inserts = []
|
||||
|
||||
for job in pipeline.getJobs(item):
|
||||
build = item.current_build_set.getBuild(job.name)
|
||||
if not build:
|
||||
# build hasn't began. The sql reporter can only send back
|
||||
# stats about builds. It doesn't understand how to store
|
||||
# information about the change.
|
||||
continue
|
||||
|
||||
(result, url) = item.formatJobResult(job, url_pattern)
|
||||
|
||||
build_inserts.append({
|
||||
'buildset_id': buildset_ins_result.inserted_primary_key,
|
||||
'uuid': build.uuid,
|
||||
'job_name': build.job.name,
|
||||
'result': result,
|
||||
'start_time': datetime.datetime.fromtimestamp(
|
||||
build.start_time),
|
||||
'end_time': datetime.datetime.fromtimestamp(
|
||||
build.end_time),
|
||||
'voting': build.job.voting,
|
||||
'log_url': url,
|
||||
'node_name': build.node_name,
|
||||
})
|
||||
conn.execute(self.connection.zuul_build_table.insert(),
|
||||
build_inserts)
|
||||
|
||||
|
||||
def getSchema():
|
||||
sql_reporter = v.Schema({
|
||||
'score': int,
|
||||
})
|
||||
return sql_reporter
|
@ -359,6 +359,7 @@ class Scheduler(threading.Thread):
|
||||
'reporter': {
|
||||
'gerrit': 'zuul.reporter.gerrit:GerritReporter',
|
||||
'smtp': 'zuul.reporter.smtp:SMTPReporter',
|
||||
'sql': 'zuul.reporter.sql:SQLReporter',
|
||||
},
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user