diff --git a/doc/source/userdoc/installation.guide.rst b/doc/source/userdoc/installation.guide.rst index 1cb3e4a9..8b60920a 100644 --- a/doc/source/userdoc/installation.guide.rst +++ b/doc/source/userdoc/installation.guide.rst @@ -125,14 +125,28 @@ To install into a virtual environment Make the necessary changes in ``sahara-venv/etc/sahara.conf``. For details see :doc:`Sahara Configuration Guide ` -5. Create database schema: +5. If you use Sahara with MySQL database, then for storing big Job Binaries + in Sahara Internal Database you must configure size of max allowed packet. + Edit ``my.cnf`` and change parameter: + +.. sourcecode:: ini + + ... + [mysqld] + ... + max_allowed_packet = 256M +.. + + and restart mysql server. + +6. Create database schema: .. sourcecode:: console $ sahara-venv/bin/sahara-db-manage --config-file sahara-venv/etc/sahara.conf upgrade head .. -6. To start Sahara call: +7. To start Sahara call: .. sourcecode:: console diff --git a/sahara/db/base.py b/sahara/db/base.py index 83b026ea..fcf43072 100644 --- a/sahara/db/base.py +++ b/sahara/db/base.py @@ -33,3 +33,7 @@ class Base(object): def __init__(self): self.db = importutils.import_module(CONF.db_driver) + + +def is_mysql_avail(): + return CONF.database.connection.startswith('mysql') diff --git a/sahara/db/migration/alembic_migrations/versions/001_icehouse.py b/sahara/db/migration/alembic_migrations/versions/001_icehouse.py index 043e5177..ac1a9102 100644 --- a/sahara/db/migration/alembic_migrations/versions/001_icehouse.py +++ b/sahara/db/migration/alembic_migrations/versions/001_icehouse.py @@ -111,7 +111,7 @@ def upgrade(): sa.Column('tenant_id', sa.String(length=36), nullable=True), sa.Column('name', sa.String(length=80), nullable=False), - sa.Column('data', sa.LargeBinary(), nullable=True), + sa.Column('data', st.LargeBinary(), nullable=True), sa.Column('datasize', sa.BIGINT(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'tenant_id'), diff --git a/sahara/db/sqlalchemy/models.py b/sahara/db/sqlalchemy/models.py index b95a99c6..6c104e5a 100644 --- a/sahara/db/sqlalchemy/models.py +++ b/sahara/db/sqlalchemy/models.py @@ -331,8 +331,7 @@ class JobBinaryInternal(mb.SaharaBase): id = _id_column() tenant_id = sa.Column(sa.String(36)) name = sa.Column(sa.String(80), nullable=False) - - data = sa.orm.deferred(sa.Column(sa.LargeBinary)) + data = sa.orm.deferred(sa.Column(st.LargeBinary())) datasize = sa.Column(sa.BIGINT) diff --git a/sahara/db/sqlalchemy/types.py b/sahara/db/sqlalchemy/types.py index 5771b0e7..c37f2a4a 100644 --- a/sahara/db/sqlalchemy/types.py +++ b/sahara/db/sqlalchemy/types.py @@ -14,8 +14,10 @@ # limitations under the License. import sqlalchemy as sa +from sqlalchemy.dialects import mysql from sqlalchemy.ext import mutable +from sahara.db import base from sahara.openstack.common import jsonutils @@ -108,3 +110,9 @@ def JsonDictType(): def JsonListType(): """Returns an SQLAlchemy Column Type suitable to store a Json array.""" return MutableList.as_mutable(JsonEncoded) + + +def LargeBinary(): + if base.is_mysql_avail(): + return mysql.LONGBLOB + return sa.LargeBinary diff --git a/sahara/tests/unit/db/migration/test_migrations.py b/sahara/tests/unit/db/migration/test_migrations.py index 031442ab..be9494fc 100644 --- a/sahara/tests/unit/db/migration/test_migrations.py +++ b/sahara/tests/unit/db/migration/test_migrations.py @@ -37,6 +37,8 @@ postgres=# create database openstack_citest with owner openstack_citest; """ +import os + from oslo.config import cfg from sahara.openstack.common.db.sqlalchemy import utils as db_utils @@ -323,6 +325,17 @@ class TestMigrations(base.BaseWalkMigrationTestCase, base.CommonTestsMixIn): self.assertColumnsExists(engine, 'instances', instances_columns) self.assertColumnCount(engine, 'instances', instances_columns) + self._data_001(engine, data) + + def _data_001(self, engine, data): + datasize = 512 * 1024 # 512kB + data = os.urandom(datasize) + t = db_utils.get_table(engine, 'job_binary_internal') + engine.execute(t.insert(), data=data, id='123', name='name') + new_data = engine.execute(t.select()).fetchone().data + self.assertEqual(data, new_data) + engine.execute(t.delete()) + def _check_002(self, engine, data): # currently, 002 is just a placeholder self._check_001(engine, data)