Fix columns migrating for PostgreSQL

Columns generated in event table and t_datetime in
trait table shouldn't be type of double precision.

Change-Id: Ib9c5c8de034b5d1ccaa070b6de7c0d682160200c
Closes-Bug: #1337851
This commit is contained in:
Artur Svechnikov 2014-07-14 19:22:25 +04:00 committed by gordon chung
parent c1323cc47c
commit afc9834a34
2 changed files with 69 additions and 2 deletions

View File

@ -0,0 +1,67 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE (gordc): this is a copy of 024 migration script which missed pgsql
import sqlalchemy as sa
from ceilometer.storage.sqlalchemy import migration
from ceilometer.storage.sqlalchemy import models
def _convert_data_type(table, col, from_t, to_t, pk_attr='id', index=False):
temp_col_n = 'convert_data_type_temp_col'
# Override column we're going to convert with from_t, since the type we're
# replacing could be custom and we need to tell SQLALchemy how to perform
# CRUD operations with it.
table = sa.Table(table.name, table.metadata, sa.Column(col, from_t),
extend_existing=True)
sa.Column(temp_col_n, to_t).create(table)
key_attr = getattr(table.c, pk_attr)
orig_col = getattr(table.c, col)
new_col = getattr(table.c, temp_col_n)
query = sa.select([key_attr, orig_col])
for key, value in migration.paged(query):
(table.update().where(key_attr == key).values({temp_col_n: value}).
execute())
orig_col.drop()
new_col.alter(name=col)
if index:
sa.Index('ix_%s_%s' % (table.name, col), new_col).create()
def upgrade(migrate_engine):
if migrate_engine.name == 'postgresql':
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
_convert_data_type(event, 'generated', sa.Float(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
trait = sa.Table('trait', meta, autoload=True)
_convert_data_type(trait, 't_datetime', sa.Float(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
def downgrade(migrate_engine):
if migrate_engine.name == 'postgresql':
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
_convert_data_type(event, 'generated', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)
trait = sa.Table('trait', meta, autoload=True)
_convert_data_type(trait, 't_datetime', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)

View File

@ -55,11 +55,11 @@ def make_test_data(conn, start, end, interval, event_types):
while timestamp <= end:
data = []
for i in range(event_types):
traits = [models.Trait('id1_%d' % i, 1, uuid.uuid4()),
traits = [models.Trait('id1_%d' % i, 1, str(uuid.uuid4())),
models.Trait('id2_%d' % i, 2, random.randint(1,10)),
models.Trait('id3_%d' % i, 3, random.random()),
models.Trait('id4_%d' % i, 4, timestamp)]
data.append(models.Event(uuid.uuid4(),
data.append(models.Event(str(uuid.uuid4()),
'event_type%d' % i,
timestamp,
traits))