Merge "Fix big job binary objects in mysql"
This commit is contained in:
commit
bac66c407d
@ -125,14 +125,28 @@ To install into a virtual environment
|
||||
Make the necessary changes in ``sahara-venv/etc/sahara.conf``.
|
||||
For details see :doc:`Sahara Configuration Guide <configuration.guide>`
|
||||
|
||||
5. Create database schema:
|
||||
5. If you use Sahara with MySQL database, then for storing big Job Binaries
|
||||
in Sahara Internal Database you must configure size of max allowed packet.
|
||||
Edit ``my.cnf`` and change parameter:
|
||||
|
||||
.. sourcecode:: ini
|
||||
|
||||
...
|
||||
[mysqld]
|
||||
...
|
||||
max_allowed_packet = 256M
|
||||
..
|
||||
|
||||
and restart mysql server.
|
||||
|
||||
6. Create database schema:
|
||||
|
||||
.. sourcecode:: console
|
||||
|
||||
$ sahara-venv/bin/sahara-db-manage --config-file sahara-venv/etc/sahara.conf upgrade head
|
||||
..
|
||||
|
||||
6. To start Sahara call:
|
||||
7. To start Sahara call:
|
||||
|
||||
.. sourcecode:: console
|
||||
|
||||
|
@ -33,3 +33,7 @@ class Base(object):
|
||||
|
||||
def __init__(self):
|
||||
self.db = importutils.import_module(CONF.db_driver)
|
||||
|
||||
|
||||
def is_mysql_avail():
|
||||
return CONF.database.connection.startswith('mysql')
|
||||
|
@ -111,7 +111,7 @@ def upgrade():
|
||||
sa.Column('tenant_id', sa.String(length=36),
|
||||
nullable=True),
|
||||
sa.Column('name', sa.String(length=80), nullable=False),
|
||||
sa.Column('data', sa.LargeBinary(), nullable=True),
|
||||
sa.Column('data', st.LargeBinary(), nullable=True),
|
||||
sa.Column('datasize', sa.BIGINT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name', 'tenant_id'),
|
||||
|
@ -331,8 +331,7 @@ class JobBinaryInternal(mb.SaharaBase):
|
||||
id = _id_column()
|
||||
tenant_id = sa.Column(sa.String(36))
|
||||
name = sa.Column(sa.String(80), nullable=False)
|
||||
|
||||
data = sa.orm.deferred(sa.Column(sa.LargeBinary))
|
||||
data = sa.orm.deferred(sa.Column(st.LargeBinary()))
|
||||
datasize = sa.Column(sa.BIGINT)
|
||||
|
||||
|
||||
|
@ -14,8 +14,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from sqlalchemy.ext import mutable
|
||||
|
||||
from sahara.db import base
|
||||
from sahara.openstack.common import jsonutils
|
||||
|
||||
|
||||
@ -108,3 +110,9 @@ def JsonDictType():
|
||||
def JsonListType():
|
||||
"""Returns an SQLAlchemy Column Type suitable to store a Json array."""
|
||||
return MutableList.as_mutable(JsonEncoded)
|
||||
|
||||
|
||||
def LargeBinary():
|
||||
if base.is_mysql_avail():
|
||||
return mysql.LONGBLOB
|
||||
return sa.LargeBinary
|
||||
|
@ -37,6 +37,8 @@ postgres=# create database openstack_citest with owner openstack_citest;
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from sahara.openstack.common.db.sqlalchemy import utils as db_utils
|
||||
@ -323,6 +325,17 @@ class TestMigrations(base.BaseWalkMigrationTestCase, base.CommonTestsMixIn):
|
||||
self.assertColumnsExists(engine, 'instances', instances_columns)
|
||||
self.assertColumnCount(engine, 'instances', instances_columns)
|
||||
|
||||
self._data_001(engine, data)
|
||||
|
||||
def _data_001(self, engine, data):
|
||||
datasize = 512 * 1024 # 512kB
|
||||
data = os.urandom(datasize)
|
||||
t = db_utils.get_table(engine, 'job_binary_internal')
|
||||
engine.execute(t.insert(), data=data, id='123', name='name')
|
||||
new_data = engine.execute(t.select()).fetchone().data
|
||||
self.assertEqual(data, new_data)
|
||||
engine.execute(t.delete())
|
||||
|
||||
def _check_002(self, engine, data):
|
||||
# currently, 002 is just a placeholder
|
||||
self._check_001(engine, data)
|
||||
|
Loading…
Reference in New Issue
Block a user