Merge "Change uuid columns to bigint"

This commit is contained in:
Jenkins 2015-11-19 14:07:01 +00:00 committed by Gerrit Code Review
commit ee1c388a7f
4 changed files with 533 additions and 28 deletions

View File

@ -13,7 +13,6 @@
# under the License.
import datetime
import uuid
from oslo_db.sqlalchemy import models # noqa
import sqlalchemy as sa
@ -48,10 +47,8 @@ class SubunitBase(models.ModelBase):
class Test(BASE, SubunitBase):
__tablename__ = 'tests'
__table_args__ = (sa.Index('ix_id', 'id'),
sa.Index('ix_test_id', 'test_id'))
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
__table_args__ = (sa.Index('ix_test_id', 'test_id'),)
id = sa.Column(sa.BigInteger, primary_key=True)
test_id = sa.Column(sa.String(256))
run_count = sa.Column(sa.Integer())
success = sa.Column(sa.Integer())
@ -61,9 +58,7 @@ class Test(BASE, SubunitBase):
class Run(BASE, SubunitBase):
__tablename__ = 'runs'
__table_args__ = (sa.Index('ix_run_id', 'id'), )
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
id = sa.Column(sa.BigInteger, primary_key=True)
skips = sa.Column(sa.Integer())
fails = sa.Column(sa.Integer())
passes = sa.Column(sa.Integer())
@ -80,11 +75,11 @@ class TestRun(BASE, SubunitBase):
sa.UniqueConstraint('test_id', 'run_id',
name='ix_test_run_test_id_run_id'))
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
test_id = sa.Column(sa.String(36), sa.ForeignKey('tests.id'),
id = sa.Column(sa.BigInteger, primary_key=True)
test_id = sa.Column(sa.BigInteger, sa.ForeignKey('tests.id'),
nullable=False)
run_id = sa.Column(sa.String(36), sa.ForeignKey('runs.id'), nullable=False)
run_id = sa.Column(sa.BigInteger, sa.ForeignKey('runs.id'),
nullable=False)
status = sa.Column(sa.String(256))
start_time = sa.Column(sa.DateTime())
start_time_microsecond = sa.Column(sa.Integer(), default=0)
@ -96,11 +91,10 @@ class RunMetadata(BASE, SubunitBase):
__tablename__ = 'run_metadata'
__table_args__ = (sa.Index('ix_run_metadata_run_id', 'run_id'),)
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
id = sa.Column(sa.BigInteger, primary_key=True)
key = sa.Column(sa.String(255))
value = sa.Column(sa.String(255))
run_id = sa.Column(sa.String(36), sa.ForeignKey('runs.id'))
run_id = sa.Column(sa.BigInteger, sa.ForeignKey('runs.id'))
class TestRunMetadata(BASE, SubunitBase):
@ -108,11 +102,10 @@ class TestRunMetadata(BASE, SubunitBase):
__table_args__ = (sa.Index('ix_test_run_metadata_test_run_id',
'test_run_id'),)
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
id = sa.Column(sa.BigInteger, primary_key=True)
key = sa.Column(sa.String(255))
value = sa.Column(sa.String(255))
test_run_id = sa.Column(sa.String(36), sa.ForeignKey('test_runs.id'))
test_run_id = sa.Column(sa.BigInteger, sa.ForeignKey('test_runs.id'))
class TestMetadata(BASE, SubunitBase):
@ -120,19 +113,17 @@ class TestMetadata(BASE, SubunitBase):
__table_args__ = (sa.Index('ix_test_metadata_test_id',
'test_id'),)
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
id = sa.Column(sa.BigInteger, primary_key=True)
key = sa.Column(sa.String(255))
value = sa.Column(sa.String(255))
test_id = sa.Column(sa.String(36), sa.ForeignKey('tests.id'))
test_id = sa.Column(sa.BigInteger, sa.ForeignKey('tests.id'))
class Attachments(BASE, SubunitBase):
__tablename__ = 'attachments'
__table_args__ = (sa.Index('ix_attachemnts_id',
'test_run_id'),)
id = sa.Column(sa.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
test_run_id = sa.Column(sa.String(36))
id = sa.Column(sa.BigInteger, primary_key=True)
test_run_id = sa.Column(sa.BigInteger)
label = sa.Column(sa.String(255))
attachment = sa.Column(sa.LargeBinary())

View File

@ -0,0 +1,342 @@
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""uuid to integer ids
Revision ID: 2822a408bdd0
Revises: b96122f780
Create Date: 2015-10-14 14:18:18.820521
"""
# revision identifiers, used by Alembic.
revision = '2822a408bdd0'
down_revision = 'b96122f780'
from alembic import context
from alembic import op
import sqlalchemy as sa
def upgrade():
migration_context = context.get_context()
if migration_context.dialect.name == 'sqlite':
new_id_type = sa.Integer
else:
new_id_type = sa.BigInteger
# Create new tests table
op.create_table('tests_new',
sa.Column('id', sa.String(36)),
sa.Column('new_id', new_id_type, primary_key=True,
autoincrement=True, nullable=False),
sa.Column('test_id', sa.String(256), nullable=False),
sa.Column('run_count', sa.Integer()),
sa.Column('success', sa.Integer()),
sa.Column('failure', sa.Integer()),
sa.Column('run_time', sa.Float(), nullable=True),
mysql_engine='InnoDB')
# Create new test_runs table
op.create_table('test_runs_new',
sa.Column('id', sa.String(36)),
sa.Column('new_id', new_id_type, primary_key=True,
nullable=False),
sa.Column('test_id', sa.String(36), nullable=False),
sa.Column('new_test_id', new_id_type),
sa.Column('run_id', sa.String(36), nullable=False),
sa.Column('new_run_id', new_id_type),
sa.Column('status', sa.String(256)),
sa.Column('start_time', sa.DateTime()),
sa.Column('stop_time', sa.DateTime()),
sa.Column('start_time_microsecond', sa.Integer(),
default=0),
sa.Column('stop_time_microsecond', sa.Integer(),
default=0),
mysql_engine='InnoDB')
# Create new runs table
op.create_table('runs_new',
sa.Column('id', sa.String(36)),
sa.Column('new_id', new_id_type(), primary_key=True,
nullable=False, autoincrement=True),
sa.Column('skips', sa.Integer()),
sa.Column('fails', sa.Integer()),
sa.Column('passes', sa.Integer()),
sa.Column('run_time', sa.Float()),
sa.Column('artifacts', sa.Text()),
sa.Column('run_at', sa.DateTime()),
mysql_engine='InnoDB')
# Create new run_metadata table
op.create_table('run_metadata_new',
sa.Column('id', sa.String(36),
nullable=False),
sa.Column('new_id', new_id_type(), nullable=False,
primary_key=True, autoincrement=True),
sa.Column('key', sa.String(255)),
sa.Column('value', sa.String(255)),
sa.Column('run_id', sa.String(36), nullable=False),
sa.Column('new_run_id', new_id_type),
mysql_engine='InnoDB')
# Create new test runs metadata table
op.create_table('test_run_metadata_new',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('new_id', new_id_type, primary_key=True,
nullable=False, autoincrement=True),
sa.Column('key', sa.String(255)),
sa.Column('value', sa.String(255)),
sa.Column('test_run_id', sa.String(36),
nullable=False),
sa.Column('new_test_run_id', new_id_type),
mysql_engine='InnoDB')
# Create new test metadata table
op.create_table('test_metadata_new',
sa.Column('id', sa.String(36),
nullable=False),
sa.Column('new_id', new_id_type, primary_key=True,
nullable=False, autoincrement=True),
sa.Column('key', sa.String(255)),
sa.Column('value', sa.String(255)),
sa.Column('test_id', sa.String(36), nullable=False),
sa.Column('new_test_id', new_id_type),
mysql_engine='InnoDB')
# Create new tests attachments table
op.create_table('attachments_new',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('new_id', new_id_type, primary_key=True,
nullable=False, autoincrement=True),
sa.Column('test_run_id', sa.String(36), nullable=False),
sa.Column('new_test_run_id', new_id_type),
sa.Column('label', sa.String(255)),
sa.Column('attachment', sa.LargeBinary()),
mysql_engine='InnoDB')
# Now populate new tables
if migration_context.dialect.name == 'postgresql':
key_word = 'key'
else:
key_word = '`key`'
op.execute('INSERT INTO tests_new (id, test_id, run_count, success, '
'failure, run_time) SELECT id, test_id, run_count, success, '
'failure, run_time FROM tests')
op.execute('INSERT INTO runs_new (id, skips, fails, passes, run_time, '
'artifacts, run_at) SELECT id, skips, fails, passes, run_time, '
'artifacts, run_at FROM runs')
op.execute('INSERT INTO test_runs_new (id, test_id, new_test_id, run_id, '
'new_run_id, status, start_time, stop_time, '
'start_time_microsecond, stop_time_microsecond) SELECT tr.id, '
'tr.test_id, tn.new_id, tr.run_id, rn.new_id, status, '
'start_time, stop_time, start_time_microsecond, '
'stop_time_microsecond FROM test_runs tr INNER JOIN runs_new '
'rn ON rn.id = tr.run_id INNER JOIN tests_new tn '
'ON tn.id=tr.test_id')
op.execute('INSERT INTO test_metadata_new (id, {}, value, test_id, '
'new_test_id) SELECT tm.id, tm.key, tm.value, tm.test_id, '
'tn.new_id FROM test_metadata tm INNER JOIN tests_new tn '
'ON tn.id = tm.test_id'.format(key_word))
op.execute('INSERT INTO test_run_metadata_new (id, {}, value, '
'test_run_id, new_test_run_id) SELECT trm.id, trm.key, '
'trm.value, trm.test_run_id, trn.new_id FROM test_run_metadata '
'trm INNER JOIN test_runs_new trn ON trm.test_run_id = '
'trn.id'.format(key_word))
op.execute('INSERT INTO attachments_new (id, test_run_id, '
'new_test_run_id, label, attachment) SELECT a.id, '
'a.test_run_id, trn.new_id, a.label, a.attachment FROM '
'attachments a INNER JOIN test_runs_new trn '
'ON a.test_run_id = trn.id')
op.execute('INSERT INTO run_metadata_new (id, {}, value, run_id, '
'new_run_id) SELECT rm.id, rm.key, rm.value, rm.run_id, '
'rn.new_id FROM run_metadata rm INNER JOIN runs_new rn '
'ON rm.run_id = rn.id'.format(key_word))
# Switch columns
if migration_context.dialect.name == 'postgresql':
op.drop_column('attachments_new', 'id')
op.alter_column('attachments_new', 'new_id', new_column_name='id',
existing_type=new_id_type,
autoincrement=True)
op.drop_column('attachments_new', 'test_run_id')
op.alter_column('attachments_new', 'new_test_run_id',
new_column_name='test_run_id')
op.drop_column('test_run_metadata_new', 'id')
op.alter_column('test_run_metadata_new', 'new_id',
new_column_name='id',
existing_type=new_id_type,
autoincrement=True)
op.drop_column('test_run_metadata_new', 'test_run_id')
op.alter_column('test_run_metadata_new', 'new_test_run_id',
new_column_name='test_run_id',
existing_type=new_id_type)
op.drop_column('run_metadata_new', 'id')
op.alter_column('run_metadata_new', 'new_id',
new_column_name='id')
op.drop_column('run_metadata_new', 'run_id')
op.alter_column('run_metadata_new', 'new_run_id',
new_column_name='run_id',
existing_type=new_id_type)
op.drop_column('test_metadata_new', 'id')
op.alter_column('test_metadata_new', 'new_id',
new_column_name='id')
op.drop_column('test_metadata_new', 'test_id')
op.alter_column('test_metadata_new', 'new_test_id',
new_column_name='test_id',
existing_type=new_id_type)
op.drop_column('test_runs_new', 'id')
op.alter_column('test_runs_new', 'new_id',
new_column_name='id')
op.drop_column('test_runs_new', 'test_id')
op.alter_column('test_runs_new', 'new_test_id',
new_column_name='test_id',
existing_type=new_id_type)
op.drop_column('test_runs_new', 'run_id')
op.alter_column('test_runs_new', 'new_run_id',
new_column_name='run_id',
existing_type=new_id_type)
op.drop_column('tests_new', 'id')
op.alter_column('tests_new', 'new_id',
new_column_name='id')
op.drop_column('runs_new', 'id')
op.alter_column('runs_new', 'new_id',
new_column_name='id')
else:
with op.batch_alter_table("attachments_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
batch_op.drop_column('test_run_id')
batch_op.alter_column('new_test_run_id',
new_column_name='test_run_id',
existing_type=new_id_type)
with op.batch_alter_table("test_run_metadata_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
batch_op.drop_column('test_run_id')
batch_op.alter_column('new_test_run_id',
new_column_name='test_run_id',
existing_type=new_id_type)
with op.batch_alter_table("run_metadata_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
batch_op.drop_column('run_id')
batch_op.alter_column('new_run_id', new_column_name='run_id',
existing_type=new_id_type)
with op.batch_alter_table("test_metadata_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
batch_op.drop_column('test_id')
batch_op.alter_column('new_test_id', new_column_name='test_id',
existing_type=new_id_type)
with op.batch_alter_table("test_runs_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
batch_op.drop_column('test_id')
batch_op.alter_column('new_test_id', new_column_name='test_id',
existing_type=new_id_type)
batch_op.drop_column('run_id')
batch_op.alter_column('new_run_id', new_column_name='run_id',
existing_type=new_id_type)
with op.batch_alter_table("tests_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
with op.batch_alter_table("runs_new") as batch_op:
batch_op.drop_column('id')
batch_op.alter_column('new_id', new_column_name='id',
primary_key=True,
existing_type=new_id_type,
autoincrement=True)
# Sanity checks
errors = []
for table in ('tests', 'runs', 'test_runs', 'test_metadata',
'test_run_metadata', 'run_metadata', 'attachments'):
old_count = op.get_bind().execute(
"SELECT COUNT(id) FROM {}".format(table)).first()[0]
new_count = op.get_bind().execute(
"SELECT COUNT(id) FROM {}_new".format(table)).first()[0]
if old_count != new_count:
errors.append("{} has {} rows and {}_new has {} rows".format(
table, old_count, table, new_count))
if errors:
raise RuntimeError("Failed count checks: {}".format(','.join(errors)))
# Rename tables
op.rename_table("tests", "tests_old")
op.rename_table("runs", "runs_old")
op.rename_table("test_runs", "test_runs_old")
op.rename_table("test_metadata", "test_metadata_old")
op.rename_table("test_run_metadata", "test_run_metadata_old")
op.rename_table("run_metadata", "run_metadata_old")
op.rename_table("attachments", "attachments_old")
op.rename_table("tests_new", "tests")
op.rename_table("runs_new", "runs")
op.rename_table("test_runs_new", "test_runs")
op.rename_table("test_metadata_new", "test_metadata")
op.rename_table("test_run_metadata_new", "test_run_metadata")
op.rename_table("run_metadata_new", "run_metadata")
op.rename_table("attachments_new", "attachments")
# Drop olds
op.drop_table("test_run_metadata_old")
op.drop_table("attachments_old")
op.drop_table("test_metadata_old")
op.drop_table("run_metadata_old")
op.drop_table("test_runs_old")
op.drop_table("runs_old")
op.drop_table("tests_old")
# Create indexes -- sqlite keeps the old ones around for some reason
if migration_context.dialect.name != 'sqlite':
op.create_index('ix_test_ids', 'tests', ['id', 'test_id'],
mysql_length={'test_id': 30})
op.create_index('ix_test_key_value', 'test_metadata',
['key', 'value'])
op.create_index('ix_test_run_key_value', 'test_run_metadata',
['key', 'value'])
op.create_index('ix_run_key_value', 'run_metadata',
['key', 'value'])
op.create_index('ix_test_id_status', 'test_runs',
['test_id', 'status'], mysql_length={'status': 30})
op.create_index('ix_test_id_start_time', 'test_runs',
['test_id', 'start_time'])
op.create_unique_constraint('uq_test_runs', 'test_runs',
['test_id', 'run_id'])
op.create_index('ix_tests_test_id', 'tests', ['test_id'], mysql_length=30)
op.create_index('ix_test_runs_test_id', 'test_runs', ['test_id'])
op.create_index('ix_test_runs_run_id', 'test_runs', ['run_id'])
op.create_index('ix_test_runs_start_time', 'test_runs', ['start_time'])
op.create_index('ix_test_runs_stop_time', 'test_runs', ['stop_time'])
def downgrade():
raise NotImplementedError()

View File

@ -14,7 +14,6 @@
import datetime
import six
from six import moves
import testscenarios
@ -272,7 +271,7 @@ class TestDatabaseAPI(base.TestCase):
'skip': run_num,
'fail': run_num + 1,
'pass': run_num + 2,
'id': six.text_type(runs[run_num].id),
'id': runs[run_num].id,
'run_time': 3.0,
'metadata': {
u'test_key': u'fun',
@ -285,7 +284,7 @@ class TestDatabaseAPI(base.TestCase):
'skip': run_num,
'fail': run_num + 1,
'pass': run_num + 2,
'id': six.text_type(runs[run_num].id),
'id': runs[run_num].id,
'run_time': 3.0,
'metadata': {
u'test_key': u'fun',

View File

@ -16,10 +16,11 @@
import datetime
import os
import uuid
from alembic import config
from alembic import script
import six
from six.moves import configparser as ConfigParser
import sqlalchemy
from sqlalchemy.engine import reflection
@ -398,3 +399,175 @@ class TestWalkMigrations(base.TestCase):
runs = [indx for indx in indxs if indx['column_names'] == ['run_id']]
self.assertEqual(1, len(tests))
self.assertEqual(1, len(runs))
def _pre_upgrade_2822a408bdd0(self, engine):
data = {}
# Add run
runs = get_table(engine, 'runs')
run = {'id': six.text_type(uuid.uuid4()),
'skips': 0,
'fails': 0,
'passes': 1,
'run_time': 1.0,
'artifacts': 'https://am_i_really_a_fake_url',
'run_at': datetime.datetime.utcnow()}
runs.insert().values(run).execute()
data['run'] = run
# Add test_metadata
run_metadatas = get_table(engine, 'run_metadata')
run_metadata = {'id': six.text_type(uuid.uuid4()),
'run_id': run['id'],
'key': 'attrs',
'value': 'an_attr'}
run_metadatas.insert().values(run_metadata).execute()
data['run_metadata'] = run_metadata
# Add test
tests = get_table(engine, 'tests')
test = {'id': six.text_type(uuid.uuid4()),
'test_id': 'I_am_a_real_test!',
'success': 1,
'failure': 0}
tests.insert().values(test).execute()
data['test'] = test
# Add test_metadata
test_metadatas = get_table(engine, 'test_metadata')
test_metadata = {'id': six.text_type(uuid.uuid4()),
'test_id': test['id'],
'key': 'a_real_key',
'value': 'an_attr'}
test_metadatas.insert().values(test_metadata).execute()
data['test_metadata'] = test_metadata
# Add test run
now = datetime.datetime.now()
future_now = now + datetime.timedelta(0, 4)
test_runs = get_table(engine, 'test_runs')
test_run = {'id': six.text_type(uuid.uuid4()),
'test_id': test['id'],
'run_id': run['id'],
'start_time': now,
'status': 'success',
'stop_time': future_now}
test_runs.insert().values(test_run).execute()
data['test_run'] = test_run
# Add test_run_metadata
test_run_metadatas = get_table(engine, 'test_run_metadata')
test_run_metadata = {'id': six.text_type(uuid.uuid4()),
'test_run_id': test_run['id'],
'key': 'attrs',
'value': 'an_attr'}
test_run_metadatas.insert().values(test_run_metadata).execute()
data['test_run_metadata'] = test_run_metadata
attachments = get_table(engine, 'attachments')
attachment = {'id': six.text_type(uuid.uuid4()),
'test_run_id': test_run['id'],
'label': 'an_attachment',
'attachment': b'something'}
attachments.insert().values(attachment).execute()
data['attachment'] = attachment
return data
def _check_2822a408bdd0(self, engine, data):
# Check Primary Keys
insp = reflection.Inspector(engine)
runs_pk = insp.get_pk_constraint('runs')
self.assertEqual(['id'], runs_pk['constrained_columns'])
run_meta_pk = insp.get_pk_constraint('run_metadata')
self.assertEqual(['id'], run_meta_pk['constrained_columns'])
tests_pk = insp.get_pk_constraint('tests')
self.assertEqual(['id'], tests_pk['constrained_columns'])
test_meta_pk = insp.get_pk_constraint('test_metadata')
self.assertEqual(['id'], test_meta_pk['constrained_columns'])
test_runs_pk = insp.get_pk_constraint('runs')
self.assertEqual(['id'], test_runs_pk['constrained_columns'])
test_run_meta_pk = insp.get_pk_constraint('tests')
self.assertEqual(['id'], test_run_meta_pk['constrained_columns'])
attach_pk = insp.get_pk_constraint('attachments')
self.assertEqual(['id'], attach_pk['constrained_columns'])
if engine.dialect.name == 'sqlite':
new_id_type = sqlalchemy.Integer
else:
new_id_type = sqlalchemy.BigInteger
# Check id column type
runs_col = [x for x in insp.get_columns(
'runs') if x['name'] == 'id'][0]
self.assertIsInstance(runs_col['type'], new_id_type)
run_meta_col = [x for x in insp.get_columns(
'run_metadata') if x['name'] == 'id'][0]
self.assertIsInstance(run_meta_col['type'], new_id_type)
tests_col = [x for x in insp.get_columns(
'tests') if x['name'] == 'id'][0]
self.assertIsInstance(tests_col['type'], new_id_type)
test_meta_col = [x for x in insp.get_columns(
'test_metadata') if x['name'] == 'id'][0]
self.assertIsInstance(test_meta_col['type'], new_id_type)
test_runs_col = [x for x in insp.get_columns(
'test_runs') if x['name'] == 'id'][0]
self.assertIsInstance(test_runs_col['type'], new_id_type)
test_run_meta_col = [x for x in insp.get_columns(
'test_run_metadata') if x['name'] == 'id'][0]
self.assertIsInstance(test_run_meta_col['type'], new_id_type)
attach_col = [x for x in insp.get_columns(
'attachments') if x['name'] == 'id'][0]
self.assertIsInstance(attach_col['type'], new_id_type)
# Check all the new ids match
runs_t = get_table(engine, 'runs')
run_ids = [x[0] for x in runs_t.select().execute()]
run_metadatas_t = get_table(engine, 'run_metadata')
tests_t = get_table(engine, 'tests')
test_metadatas_t = get_table(engine, 'test_metadata')
test_runs_t = get_table(engine, 'test_runs')
test_runs_raw = list(test_runs_t.select().execute())
test_run_test_ids = [x[1] for x in test_runs_raw]
test_run_metadatas_t = get_table(engine, 'test_run_metadata')
attachments_t = get_table(engine, 'attachments')
# Get test we inserted before migration
test_row = list(tests_t.select().where(
tests_t.c.test_id == data['test']['test_id']).execute())[0]
self.assertIn(test_row[0], test_run_test_ids)
# Check test run
test_run_row = list(test_runs_t.select().where(
test_runs_t.c.test_id == test_row[0]).execute())[0]
self.assertEqual(test_run_row[3], data['test_run']['status'])
self.assertEqual(test_run_row[4].replace(microsecond=0),
data['test_run']['start_time'].replace(microsecond=0))
self.assertEqual(test_run_row[5].replace(microsecond=0),
data['test_run']['stop_time'].replace(microsecond=0))
self.assertIn(test_run_row[2], run_ids)
# Check run
run_row = list(runs_t.select().where(
runs_t.c.id == test_run_row[2]).execute())[0]
self.assertEqual(data['run']['artifacts'], run_row[5])
# Check run metadata
run_metadata_row = list(run_metadatas_t.select().where(
run_metadatas_t.c.run_id == run_row[0]).execute())[0]
self.assertEqual(data['run_metadata']['key'], run_metadata_row[1])
self.assertEqual(data['run_metadata']['value'], run_metadata_row[2])
# Check test metadata
test_metadata_row = list(test_metadatas_t.select().where(
test_metadatas_t.c.test_id == test_row[0]).execute())[0]
self.assertEqual(data['test_metadata']['key'], test_metadata_row[1])
self.assertEqual(data['test_metadata']['value'], test_metadata_row[2])
# Check test run metadata
test_run_metadata_row = list(test_run_metadatas_t.select().where(
test_run_metadatas_t.c.test_run_id == test_run_row[0]).execute())
test_run_metadata_row = test_run_metadata_row[0]
self.assertEqual(data['test_run_metadata']['key'],
test_run_metadata_row[1])
self.assertEqual(data['test_run_metadata']['value'],
test_run_metadata_row[2])
# Check attachment
attachment_row = list(attachments_t.select().where(
attachments_t.c.test_run_id == test_run_row[0]).execute())[0]
self.assertEqual(data['attachment']['label'], attachment_row[2])
self.assertEqual(data['attachment']['attachment'], attachment_row[3])