Handle engine creation inside of Connection object
We now store sqlalchemy engine object inside of our Connection object. This allows us to open multiple concurrent connections. Change-Id: I57b6d89f30c065643dbfab958e032f2adac59cd6
This commit is contained in:
		
				
					committed by
					
						
						Gerrit Code Review
					
				
			
			
				
	
			
			
			
						parent
						
							693204a37e
						
					
				
				
					commit
					f178612bef
				
			@@ -176,17 +176,26 @@ class Connection(base.Connection):
 | 
				
			|||||||
            conf.database.connection = \
 | 
					            conf.database.connection = \
 | 
				
			||||||
                os.environ.get('CEILOMETER_TEST_SQL_URL', url)
 | 
					                os.environ.get('CEILOMETER_TEST_SQL_URL', url)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @staticmethod
 | 
					        # NOTE(Alexei_987) Related to bug #1271103
 | 
				
			||||||
    def _get_db_session():
 | 
					        #                  we steal objects from sqlalchemy_session
 | 
				
			||||||
        return sqlalchemy_session.get_session()
 | 
					        #                  to manage their lifetime on our own.
 | 
				
			||||||
 | 
					        #                  This is needed to open several db connections
 | 
				
			||||||
 | 
					        self._engine = sqlalchemy_session.get_engine()
 | 
				
			||||||
 | 
					        self._maker = sqlalchemy_session.get_maker(self._engine)
 | 
				
			||||||
 | 
					        sqlalchemy_session._ENGINE = None
 | 
				
			||||||
 | 
					        sqlalchemy_session._MAKER = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def _get_db_session(self):
 | 
				
			||||||
 | 
					        return self._maker()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def upgrade(self):
 | 
					    def upgrade(self):
 | 
				
			||||||
        migration.db_sync(self._get_db_session().get_bind())
 | 
					        migration.db_sync(self._engine)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def clear(self):
 | 
					    def clear(self):
 | 
				
			||||||
        engine = self._get_db_session().get_bind()
 | 
					 | 
				
			||||||
        for table in reversed(models.Base.metadata.sorted_tables):
 | 
					        for table in reversed(models.Base.metadata.sorted_tables):
 | 
				
			||||||
            engine.execute(table.delete())
 | 
					            self._engine.execute(table.delete())
 | 
				
			||||||
 | 
					        self._maker.close_all()
 | 
				
			||||||
 | 
					        self._engine.dispose()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @staticmethod
 | 
					    @staticmethod
 | 
				
			||||||
    def _create_or_update(session, model_class, _id, source=None, **kwargs):
 | 
					    def _create_or_update(session, model_class, _id, source=None, **kwargs):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -15,7 +15,6 @@ from logging import config as log_config
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from alembic import context
 | 
					from alembic import context
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import ceilometer.openstack.common.db.sqlalchemy.session as sqlalchemy_session
 | 
					 | 
				
			||||||
from ceilometer.storage.sqlalchemy import models
 | 
					from ceilometer.storage.sqlalchemy import models
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# this is the Alembic Config object, which provides
 | 
					# this is the Alembic Config object, which provides
 | 
				
			||||||
@@ -56,23 +55,20 @@ def run_migrations_offline():
 | 
				
			|||||||
        context.run_migrations()
 | 
					        context.run_migrations()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_migrations_online():
 | 
					def run_migrations_online(engine):
 | 
				
			||||||
    """Run migrations in 'online' mode.
 | 
					    """Run migrations in 'online' mode.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    In this scenario we need to create an Engine
 | 
					    In this scenario we need to create an Engine
 | 
				
			||||||
    and associate a connection with the context.
 | 
					    and associate a connection with the context.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    engine = sqlalchemy_session.get_session().get_bind()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    connection = engine.connect()
 | 
					    connection = engine.connect()
 | 
				
			||||||
    context.configure(connection=connection, target_metadata=target_metadata)
 | 
					    context.configure(connection=connection, target_metadata=target_metadata)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    with context.begin_transaction():
 | 
					    with context.begin_transaction():
 | 
				
			||||||
        context.run_migrations()
 | 
					        context.run_migrations()
 | 
				
			||||||
    connection.close()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
if context.is_offline_mode():
 | 
					if context.is_offline_mode():
 | 
				
			||||||
    run_migrations_offline()
 | 
					    run_migrations_offline()
 | 
				
			||||||
else:
 | 
					else:
 | 
				
			||||||
    run_migrations_online()
 | 
					    run_migrations_online(config._engine)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -32,7 +32,9 @@ def db_sync(engine):
 | 
				
			|||||||
    db_version(engine)  # This is needed to create a version stamp in empty DB
 | 
					    db_version(engine)  # This is needed to create a version stamp in empty DB
 | 
				
			||||||
    repository = _find_migrate_repo()
 | 
					    repository = _find_migrate_repo()
 | 
				
			||||||
    versioning_api.upgrade(engine, repository)
 | 
					    versioning_api.upgrade(engine, repository)
 | 
				
			||||||
    alembic.command.upgrade(_alembic_config(), "head")
 | 
					    config = _alembic_config()
 | 
				
			||||||
 | 
					    config._engine = engine
 | 
				
			||||||
 | 
					    alembic.command.upgrade(config, "head")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def _alembic_config():
 | 
					def _alembic_config():
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -26,6 +26,7 @@ import warnings
 | 
				
			|||||||
import six
 | 
					import six
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ceilometer.openstack.common.fixture import config
 | 
					from ceilometer.openstack.common.fixture import config
 | 
				
			||||||
 | 
					import ceilometer.openstack.common.fixture.mockpatch as oslo_mock
 | 
				
			||||||
from ceilometer import storage
 | 
					from ceilometer import storage
 | 
				
			||||||
from ceilometer.tests import base as test_base
 | 
					from ceilometer.tests import base as test_base
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -51,6 +52,9 @@ class TestBase(test_base.BaseTestCase):
 | 
				
			|||||||
                self.skipTest(str(e))
 | 
					                self.skipTest(str(e))
 | 
				
			||||||
        self.conn.upgrade()
 | 
					        self.conn.upgrade()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        self.useFixture(oslo_mock.Patch('ceilometer.storage.get_connection',
 | 
				
			||||||
 | 
					                                        return_value=self.conn))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.CONF([], project='ceilometer')
 | 
					        self.CONF([], project='ceilometer')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Set a default location for the pipeline config file so the
 | 
					        # Set a default location for the pipeline config file so the
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -28,10 +28,11 @@ import repr
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from mock import patch
 | 
					from mock import patch
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import ceilometer.openstack.common.db.sqlalchemy.session as sqlalchemy_session
 | 
					from ceilometer.openstack.common.fixture import config
 | 
				
			||||||
from ceilometer.openstack.common import timeutils
 | 
					from ceilometer.openstack.common import timeutils
 | 
				
			||||||
from ceilometer.storage import models
 | 
					from ceilometer.storage import models
 | 
				
			||||||
from ceilometer.storage.sqlalchemy import models as sql_models
 | 
					from ceilometer.storage.sqlalchemy import models as sql_models
 | 
				
			||||||
 | 
					from ceilometer.tests import base as tests_base
 | 
				
			||||||
from ceilometer.tests import db as tests_db
 | 
					from ceilometer.tests import db as tests_db
 | 
				
			||||||
from ceilometer.tests.storage import test_storage_scenarios as scenarios
 | 
					from ceilometer.tests.storage import test_storage_scenarios as scenarios
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -172,21 +173,26 @@ class EventTest(EventTestBase):
 | 
				
			|||||||
        self.assertTrue(repr.repr(ev))
 | 
					        self.assertTrue(repr.repr(ev))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ModelTest(tests_db.TestBase):
 | 
					class ModelTest(tests_base.BaseTestCase):
 | 
				
			||||||
    database_connection = 'mysql://localhost'
 | 
					    database_connection = 'mysql://localhost'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_model_table_args(self):
 | 
					    def test_model_table_args(self):
 | 
				
			||||||
 | 
					        self.CONF = self.useFixture(config.Config()).conf
 | 
				
			||||||
 | 
					        self.CONF.set_override('connection', self.database_connection,
 | 
				
			||||||
 | 
					                               group='database')
 | 
				
			||||||
        self.assertIsNotNone(sql_models.table_args())
 | 
					        self.assertIsNotNone(sql_models.table_args())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class RelationshipTest(scenarios.DBTestBase):
 | 
					class RelationshipTest(scenarios.DBTestBase):
 | 
				
			||||||
    database_connection = 'mysql://localhost'
 | 
					    # Note: Do not derive from SQLAlchemyEngineTestBase, since we
 | 
				
			||||||
 | 
					    # don't want to automatically inherit all the Meter setup.
 | 
				
			||||||
 | 
					    database_connection = 'sqlite://'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_clear_metering_data_meta_tables(self):
 | 
					    def test_clear_metering_data_meta_tables(self):
 | 
				
			||||||
        timeutils.utcnow.override_time = datetime.datetime(2012, 7, 2, 10, 45)
 | 
					        timeutils.utcnow.override_time = datetime.datetime(2012, 7, 2, 10, 45)
 | 
				
			||||||
        self.conn.clear_expired_metering_data(3 * 60)
 | 
					        self.conn.clear_expired_metering_data(3 * 60)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        session = sqlalchemy_session.get_session()
 | 
					        session = self.conn._get_db_session()
 | 
				
			||||||
        meta_tables = [sql_models.MetaText, sql_models.MetaFloat,
 | 
					        meta_tables = [sql_models.MetaText, sql_models.MetaFloat,
 | 
				
			||||||
                       sql_models.MetaBigInt, sql_models.MetaBool]
 | 
					                       sql_models.MetaBigInt, sql_models.MetaBool]
 | 
				
			||||||
        for table in meta_tables:
 | 
					        for table in meta_tables:
 | 
				
			||||||
@@ -200,7 +206,7 @@ class RelationshipTest(scenarios.DBTestBase):
 | 
				
			|||||||
        timeutils.utcnow.override_time = datetime.datetime(2012, 7, 2, 10, 45)
 | 
					        timeutils.utcnow.override_time = datetime.datetime(2012, 7, 2, 10, 45)
 | 
				
			||||||
        self.conn.clear_expired_metering_data(3 * 60)
 | 
					        self.conn.clear_expired_metering_data(3 * 60)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        session = sqlalchemy_session.get_session()
 | 
					        session = self.conn._get_db_session()
 | 
				
			||||||
        self.assertEqual(session.query(sql_models.sourceassoc)
 | 
					        self.assertEqual(session.query(sql_models.sourceassoc)
 | 
				
			||||||
            .filter(~sql_models.sourceassoc.c.meter_id.in_(
 | 
					            .filter(~sql_models.sourceassoc.c.meter_id.in_(
 | 
				
			||||||
                session.query(sql_models.Meter.id)
 | 
					                session.query(sql_models.Meter.id)
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user