Add support for SQLAlchemy 2.0
In addition: * remove usage of LegacyEngineFacade * extend migrations tests to check that we actually for something rather that relying only on 'for' loop * fix `_pickle.PicklingError: Can't pickle ...` error that occures only on mac m1 * update u-c file Change-Id: I6030dcc93439ad5f4786cc94707d3f0d49cf3adb
This commit is contained in:
parent
96345af0cb
commit
e5d9fc8e63
@ -90,6 +90,18 @@
|
||||
vars:
|
||||
tox_env: py311
|
||||
|
||||
- job:
|
||||
name: rally-tox-py311-sqlalchemy14
|
||||
parent: rally-tox-py311
|
||||
vars:
|
||||
tox_env: py311-sa14
|
||||
|
||||
- job:
|
||||
name: rally-tox-py311-sqlalchemy2
|
||||
parent: rally-tox-py311
|
||||
vars:
|
||||
tox_env: py311-sa2
|
||||
|
||||
- job:
|
||||
name: rally-tox-samples
|
||||
parent: rally-tox-base
|
||||
|
@ -12,7 +12,8 @@
|
||||
- rally-tox-py38
|
||||
- rally-tox-py39
|
||||
- rally-tox-py310
|
||||
- rally-tox-py311
|
||||
- rally-tox-py311-sqlalchemy14
|
||||
- rally-tox-py311-sqlalchemy2
|
||||
- rally-tox-samples
|
||||
- rally-tox-functional
|
||||
- rally-tox-self
|
||||
@ -29,7 +30,8 @@
|
||||
- rally-tox-py38
|
||||
- rally-tox-py39
|
||||
- rally-tox-py310
|
||||
- rally-tox-py311
|
||||
- rally-tox-py311-sqlalchemy14
|
||||
- rally-tox-py311-sqlalchemy2
|
||||
- rally-tox-functional
|
||||
- rally-tox-self
|
||||
- rally-install-ubuntu-focal
|
||||
|
@ -20,6 +20,11 @@ Changelog
|
||||
unreleased
|
||||
----------
|
||||
|
||||
Changed
|
||||
~~~~~~~
|
||||
|
||||
* Add support for SQLAlchemy 2.0
|
||||
|
||||
Fixed
|
||||
~~~~~
|
||||
|
||||
|
@ -43,11 +43,12 @@ these objects be simple dictionaries.
|
||||
import datetime as dt
|
||||
import functools
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
|
||||
from oslo_db import exception as db_exc
|
||||
from oslo_db import options as db_options
|
||||
from oslo_db.sqlalchemy import session as db_session
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm # noqa
|
||||
|
||||
@ -65,36 +66,32 @@ db_options.set_defaults(
|
||||
|
||||
_FACADE = None
|
||||
_SESSION_MAKER = None
|
||||
_CONTEXT = None
|
||||
|
||||
|
||||
def _create_facade_lazily():
|
||||
def _get_facade():
|
||||
global _FACADE
|
||||
|
||||
if _FACADE is None:
|
||||
_FACADE = db_session.EngineFacade.from_config(CONF)
|
||||
ctx = enginefacade.transaction_context()
|
||||
ctx.configure(
|
||||
sqlite_fk=False,
|
||||
expire_on_commit=False
|
||||
)
|
||||
_FACADE = ctx.writer
|
||||
|
||||
return _FACADE
|
||||
|
||||
|
||||
def get_engine():
|
||||
facade = _create_facade_lazily()
|
||||
return facade.get_engine()
|
||||
|
||||
|
||||
def get_session():
|
||||
global _SESSION_MAKER
|
||||
|
||||
if not _SESSION_MAKER:
|
||||
_SESSION_MAKER = sa.orm.sessionmaker()
|
||||
_SESSION_MAKER.configure(bind=get_engine())
|
||||
|
||||
return _SESSION_MAKER()
|
||||
return _get_facade().get_engine()
|
||||
|
||||
|
||||
def engine_reset():
|
||||
global _FACADE, _SESSION_MAKER
|
||||
global _FACADE, _CONTEXT
|
||||
|
||||
_FACADE = None
|
||||
_SESSION_MAKER = None
|
||||
_CONTEXT = None
|
||||
|
||||
|
||||
def serialize(data):
|
||||
@ -123,20 +120,19 @@ def serialize(data):
|
||||
raise ValueError("Failed to serialize %r data type." % type(data).__name__)
|
||||
|
||||
|
||||
def _get_context():
|
||||
global _CONTEXT
|
||||
if _CONTEXT is None:
|
||||
_CONTEXT = threading.local()
|
||||
return _CONTEXT
|
||||
|
||||
|
||||
def with_session(f):
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
session = get_session()
|
||||
session.expire_on_commit = False
|
||||
try:
|
||||
with _get_facade().using(_get_context()) as session:
|
||||
result = f(session, *args, **kwargs)
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
return serialize(result)
|
||||
|
||||
@ -202,7 +198,7 @@ def task_get(session, uuid=None, detailed=False):
|
||||
@with_session
|
||||
def task_get_status(session, uuid=None):
|
||||
task = (session.query(models.Task)
|
||||
.options(sa.orm.load_only("status"))
|
||||
.options(sa.orm.load_only(models.Task.status))
|
||||
.filter_by(uuid=uuid).first())
|
||||
if not task:
|
||||
raise exceptions.DBRecordNotFound(
|
||||
@ -304,7 +300,7 @@ def task_list(session, status=None, env=None, tags=None, uuids_only=False):
|
||||
query = query.filter(models.Task.uuid.in_(uuids))
|
||||
|
||||
if uuids_only:
|
||||
query = query.options(sa.orm.load_only("uuid"))
|
||||
query = query.options(sa.orm.load_only(models.Task.uuid))
|
||||
|
||||
for task in query.all():
|
||||
task = task.as_dict()
|
||||
@ -524,7 +520,7 @@ def env_get(session, uuid_or_name):
|
||||
def env_get_status(session, uuid):
|
||||
resp = (session.query(models.Env)
|
||||
.filter_by(uuid=uuid)
|
||||
.options(sa.orm.load_only("status"))
|
||||
.options(sa.orm.load_only(models.Env.status))
|
||||
.first())
|
||||
if not resp:
|
||||
raise exceptions.DBRecordNotFound(
|
||||
|
@ -36,7 +36,7 @@ def run_migrations_online():
|
||||
and associate a connection with the context.
|
||||
"""
|
||||
engine = api.get_engine()
|
||||
with engine.connect() as connection:
|
||||
with engine.begin() as connection:
|
||||
context.configure(connection=connection,
|
||||
render_as_batch=True,
|
||||
target_metadata=target_metadata)
|
||||
|
@ -48,10 +48,10 @@ def upgrade():
|
||||
connection = op.get_bind()
|
||||
|
||||
for workload in connection.execute(workload_helper.select()):
|
||||
runner = json.loads(workload["runner"])
|
||||
runner = json.loads(workload.runner)
|
||||
runner.pop("type")
|
||||
values = {"runner": json.dumps(runner)}
|
||||
hooks = workload["hooks"]
|
||||
hooks = workload.hooks
|
||||
if hooks:
|
||||
values["hooks"] = []
|
||||
for hook in json.loads(hooks):
|
||||
|
@ -71,8 +71,9 @@ def upgrade():
|
||||
|
||||
for workload in connection.execute(workload_helper.select()):
|
||||
full_data = []
|
||||
for wdata in connection.execute(workload_data_helper.select(
|
||||
workload_data_helper.c.workload_uuid == workload.uuid)):
|
||||
for wdata in connection.execute(
|
||||
workload_data_helper.select().where(
|
||||
workload_data_helper.c.workload_uuid == workload.uuid)):
|
||||
chunk_data = wdata.chunk_data["raw"]
|
||||
|
||||
require_updating = False
|
||||
|
@ -131,10 +131,10 @@ def upgrade():
|
||||
# 7287df262dbc did not fail. nothing to do
|
||||
return
|
||||
|
||||
envs = [env["uuid"] for env in connection.execute(envs_helper.select())]
|
||||
envs = [env.uuid for env in connection.execute(envs_helper.select())]
|
||||
|
||||
for deployment in connection.execute(deployments_helper.select()):
|
||||
if deployment["uuid"] in envs:
|
||||
if deployment.uuid in envs:
|
||||
# this deployment had been migrated by 7287df262dbc. Nothing to do
|
||||
continue
|
||||
status = "FAILED TO CREATE"
|
||||
|
@ -19,14 +19,13 @@ import datetime as dt
|
||||
import uuid
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.ext.declarative
|
||||
import sqlalchemy.orm # noqa (used as sa.orm)
|
||||
|
||||
from rally.common.db import sa_types
|
||||
from rally import consts
|
||||
|
||||
|
||||
BASE = sa.ext.declarative.declarative_base()
|
||||
BASE = sa.orm.declarative_base()
|
||||
|
||||
|
||||
def UUID():
|
||||
|
@ -131,6 +131,19 @@ class CheckPRSValidator(validation.Validator):
|
||||
return self.fail(msg)
|
||||
|
||||
|
||||
def _runs_per_second(rps_cfg, start_timer, number_of_processes):
|
||||
"""At the given second return desired rps."""
|
||||
|
||||
if not isinstance(rps_cfg, dict):
|
||||
return float(rps_cfg) / number_of_processes
|
||||
stage_order = (time.time() - start_timer) / rps_cfg.get(
|
||||
"duration", 1) - 1
|
||||
rps = (float(rps_cfg["start"] + rps_cfg["step"] * stage_order)
|
||||
/ number_of_processes)
|
||||
|
||||
return min(rps, float(rps_cfg["end"]))
|
||||
|
||||
|
||||
@validation.add("check_rps")
|
||||
@runner.configure(name="rps")
|
||||
class RPSScenarioRunner(runner.ScenarioRunner):
|
||||
@ -232,18 +245,6 @@ class RPSScenarioRunner(runner.ScenarioRunner):
|
||||
max_cpu_used = min(cpu_count,
|
||||
self.config.get("max_cpu_count", cpu_count))
|
||||
|
||||
def runs_per_second(rps_cfg, start_timer, number_of_processes):
|
||||
"""At the given second return desired rps."""
|
||||
|
||||
if not isinstance(rps_cfg, dict):
|
||||
return float(rps_cfg) / number_of_processes
|
||||
stage_order = (time.time() - start_timer) / rps_cfg.get(
|
||||
"duration", 1) - 1
|
||||
rps = (float(rps_cfg["start"] + rps_cfg["step"] * stage_order)
|
||||
/ number_of_processes)
|
||||
|
||||
return min(rps, float(rps_cfg["end"]))
|
||||
|
||||
processes_to_start = min(max_cpu_used, times,
|
||||
self.config.get("max_concurrency", times))
|
||||
times_per_worker, times_overhead = divmod(times, processes_to_start)
|
||||
@ -282,7 +283,7 @@ class RPSScenarioRunner(runner.ScenarioRunner):
|
||||
times_per_worker + (times_overhead and 1),
|
||||
concurrency_per_worker + (concurrency_overhead and 1),
|
||||
context, cls, method_name, args, event_queue,
|
||||
self.aborted, runs_per_second, self.config["rps"],
|
||||
self.aborted, _runs_per_second, self.config["rps"],
|
||||
processes_to_start
|
||||
)
|
||||
if times_overhead:
|
||||
|
@ -19,5 +19,5 @@ pyOpenSSL # Apache License, Version
|
||||
PyYAML # MIT
|
||||
python-subunit # Apache-2.0 or BSD
|
||||
requests!=2.20.0,!=2.24.0 # Apache License, Version 2.0
|
||||
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,<2.0.0 # MIT
|
||||
SQLAlchemy>=1.4.0 # MIT
|
||||
virtualenv!=16.3.0 # MIT
|
||||
|
@ -2,7 +2,7 @@
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking>=3.0 # Apache Software License
|
||||
hacking>=4.0 # Apache Software License
|
||||
|
||||
fixtures # Apache Software License/BSD License
|
||||
pytest # MIT
|
||||
@ -18,5 +18,5 @@ testtools # MIT
|
||||
|
||||
testresources # UNKNOWN
|
||||
|
||||
docutils # public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)
|
||||
docutils # BSD License/GNU General Public License (GPL)/Python Software Foundation License
|
||||
Pygments # BSD-2-Clause
|
||||
|
@ -22,6 +22,7 @@ import iso8601
|
||||
import json
|
||||
import pickle
|
||||
import pprint
|
||||
import typing as t
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
@ -277,7 +278,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
def test_walk_versions(self):
|
||||
self.walk_versions(self.engine)
|
||||
|
||||
def _check_3177d36ea270(self, engine, data):
|
||||
def _check_3177d36ea270(self, engine: sa.engine.Engine):
|
||||
self.assertEqual(
|
||||
"3177d36ea270", db.schema.schema_revision(engine=engine))
|
||||
self.assertColumnExists(engine, "deployments", "credentials")
|
||||
@ -348,7 +349,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for deployment in self._54e844ebfbc3_deployments:
|
||||
conf = json.dumps(self._54e844ebfbc3_deployments[deployment])
|
||||
conn.execute(
|
||||
@ -425,7 +426,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
}
|
||||
|
||||
def _check_54e844ebfbc3(self, engine, data):
|
||||
def _check_54e844ebfbc3(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("54e844ebfbc3",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
@ -433,9 +434,11 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
deployments_found = conn.execute(
|
||||
deployment_table.select()).fetchall()
|
||||
self.assertEqual(len(self._54e844ebfbc3_deployments),
|
||||
len(deployments_found))
|
||||
for deployment in deployments_found:
|
||||
# check deployment
|
||||
self.assertIn(deployment.uuid, original_deployments)
|
||||
@ -493,7 +496,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
|
||||
self._08e1515a576c_deployment_uuid = "08e1515a576c-uuuu-uuuu-iiii-dddd"
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{"uuid": self._08e1515a576c_deployment_uuid,
|
||||
@ -515,7 +518,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"deployment_uuid": self._08e1515a576c_deployment_uuid
|
||||
}])
|
||||
|
||||
def _check_08e1515a576c(self, engine, data):
|
||||
def _check_08e1515a576c(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("08e1515a576c",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
@ -524,8 +527,10 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
tasks_found = conn.execute(task_table.select()).fetchall()
|
||||
self.assertEqual(len(self._08e1515a576c_logs),
|
||||
len(tasks_found))
|
||||
for task in tasks_found:
|
||||
actual_log = json.loads(task.verification_log)
|
||||
self.assertIsInstance(actual_log, dict)
|
||||
@ -549,7 +554,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
self._e654a0648db0_task_uuid = str(uuid.uuid4())
|
||||
self._e654a0648db0_deployment_uuid = str(uuid.uuid4())
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -605,7 +610,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
]
|
||||
)
|
||||
|
||||
def _check_e654a0648db0(self, engine, data):
|
||||
def _check_e654a0648db0(self, engine: sa.engine.Engine):
|
||||
self.assertEqual(
|
||||
"e654a0648db0", db.schema.schema_revision(engine=engine))
|
||||
|
||||
@ -616,7 +621,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
tag_table = db_utils.get_table(engine, "tags")
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
|
||||
# Check task
|
||||
|
||||
@ -789,7 +794,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
task_result_table = db_utils.get_table(engine, "task_results")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
# create deployment
|
||||
conf = {
|
||||
"admin": {"username": "admin",
|
||||
@ -830,14 +835,14 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_6ad4f426f005(self, engine, data):
|
||||
def _check_6ad4f426f005(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("6ad4f426f005",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
task_result_table = db_utils.get_table(engine, "task_results")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
task_results = conn.execute(task_result_table.select()).fetchall()
|
||||
self.assertEqual(1, len(task_results))
|
||||
task_result = task_results[0]
|
||||
@ -899,7 +904,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for deployment in self._32fada9b2fde_deployments:
|
||||
conf = json.dumps(
|
||||
self._32fada9b2fde_deployments[deployment])
|
||||
@ -912,7 +917,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"users": b(json.dumps([]))
|
||||
}])
|
||||
|
||||
def _check_32fada9b2fde(self, engine, data):
|
||||
def _check_32fada9b2fde(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("32fada9b2fde",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
@ -920,9 +925,11 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
deployments_found = conn.execute(
|
||||
deployment_table.select()).fetchall()
|
||||
self.assertEqual(len(self._32fada9b2fde_deployments),
|
||||
len(deployments_found))
|
||||
for deployment in deployments_found:
|
||||
# check deployment
|
||||
self.assertIn(deployment.uuid, original_deployments)
|
||||
@ -1003,7 +1010,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
vstatus = consts.TaskStatus.FINISHED
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{"uuid": self._484cd9413e66_deployment_uuid,
|
||||
@ -1016,7 +1023,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
for i in range(len(self._484cd9413e66_verifications)):
|
||||
verification = self._484cd9413e66_verifications[i]
|
||||
vuuid = "uuid-%s" % i
|
||||
vuuid = f"484cd9413e66-uuid-{i}"
|
||||
conn.execute(
|
||||
verifications_table.insert(),
|
||||
[{"uuid": vuuid,
|
||||
@ -1039,15 +1046,18 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"data": json.dumps(data)
|
||||
}])
|
||||
|
||||
def _check_484cd9413e66(self, engine, data):
|
||||
def _check_484cd9413e66(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("484cd9413e66",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
verifications_table = db_utils.get_table(engine, "verifications")
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
verifications = conn.execute(
|
||||
verifications_table.select()).fetchall()
|
||||
self.assertEqual(len(self._484cd9413e66_verifications),
|
||||
len(verifications))
|
||||
for i in range(len(verifications)):
|
||||
verification_orig = self._484cd9413e66_verifications[i]
|
||||
verification = verifications[i]
|
||||
@ -1090,13 +1100,11 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
self.assertEqual(
|
||||
verification_orig["total"].get("expected_failures", 0),
|
||||
verification.expected_failures)
|
||||
|
||||
conn.execute(
|
||||
verifications_table.delete().where(
|
||||
verifications_table.c.uuid == verification.uuid)
|
||||
verifications_table.c.uuid == verification.uuid
|
||||
)
|
||||
)
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
conn.execute(
|
||||
deployment_table.delete().where(
|
||||
deployment_table.c.uuid
|
||||
@ -1142,7 +1150,8 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
verifications_table = db_utils.get_table(engine, "verifications")
|
||||
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{"uuid": self._37fdbb373e8d_deployment_uuid,
|
||||
@ -1173,12 +1182,14 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"tests": json.dumps(tests)
|
||||
}])
|
||||
|
||||
def _check_37fdbb373e8d(self, engine, data):
|
||||
def _check_37fdbb373e8d(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("37fdbb373e8d",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
verifications_table = db_utils.get_table(engine, "verifications")
|
||||
with engine.connect() as conn:
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.begin() as conn:
|
||||
verifications = conn.execute(
|
||||
verifications_table.select()).fetchall()
|
||||
self.assertEqual(len(verifications),
|
||||
@ -1199,7 +1210,6 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
verifications_table.c.uuid == v.uuid)
|
||||
)
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
conn.execute(
|
||||
deployment_table.delete().where(
|
||||
deployment_table.c.uuid
|
||||
@ -1226,29 +1236,34 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
]
|
||||
|
||||
tags_table = db_utils.get_table(engine, "tags")
|
||||
with engine.connect() as conn:
|
||||
for t in self._a6f364988fc2_tags:
|
||||
with engine.begin() as conn:
|
||||
for tag in self._a6f364988fc2_tags:
|
||||
conn.execute(
|
||||
tags_table.insert(),
|
||||
[{
|
||||
"uuid": t["uuid"],
|
||||
"enum_tag_types": t["type"],
|
||||
"type": t["type"],
|
||||
"tag": t["tag"]
|
||||
"uuid": tag["uuid"],
|
||||
"enum_tag_types": tag["type"],
|
||||
"type": tag["type"],
|
||||
"tag": tag["tag"]
|
||||
}])
|
||||
|
||||
def _check_a6f364988fc2(self, engine, data):
|
||||
def _check_a6f364988fc2(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("a6f364988fc2",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
tags_table = db_utils.get_table(engine, "tags")
|
||||
with engine.connect() as conn:
|
||||
tags = conn.execute(tags_table.select()).fetchall()
|
||||
with engine.begin() as conn:
|
||||
tags: t.Sequence[sa.Row] = (
|
||||
conn.execute(tags_table.select()).fetchall()
|
||||
)
|
||||
self.assertEqual(len(tags), len(self._a6f364988fc2_tags))
|
||||
|
||||
for i in range(len(tags)):
|
||||
actual_tag = tags[i]._mapping
|
||||
for k in ("uuid", "type", "tag"):
|
||||
self.assertEqual(self._a6f364988fc2_tags[i][k], tags[i][k])
|
||||
self.assertEqual(
|
||||
self._a6f364988fc2_tags[i][k], actual_tag[k]
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
tags_table.delete().where(
|
||||
@ -1276,7 +1291,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
verifications_table = db_utils.get_table(engine, "verifications")
|
||||
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{"uuid": self._f33f4610dcda_deployment_uuid,
|
||||
@ -1307,12 +1322,12 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"unexpected_success": v["unexpected_success"]
|
||||
}])
|
||||
|
||||
def _check_f33f4610dcda(self, engine, data):
|
||||
def _check_f33f4610dcda(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("f33f4610dcda",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
verifications_table = db_utils.get_table(engine, "verifications")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
verifications = conn.execute(
|
||||
verifications_table.select()).fetchall()
|
||||
self.assertEqual(len(verifications),
|
||||
@ -1361,7 +1376,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"status": "verifying"},
|
||||
}
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{"uuid": self._4ef544102ba7_deployment_uuid,
|
||||
@ -1374,7 +1389,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}])
|
||||
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for task in self.tasks:
|
||||
conn.execute(
|
||||
task_table.insert(),
|
||||
@ -1388,7 +1403,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}])
|
||||
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for task in self.tasks:
|
||||
conn.execute(
|
||||
subtask_table.insert(),
|
||||
@ -1401,7 +1416,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"uuid": "subtask_" + self.tasks[task]["uuid"]
|
||||
}])
|
||||
|
||||
def _check_4ef544102ba7(self, engine, data):
|
||||
def _check_4ef544102ba7(self, engine: sa.engine.Engine):
|
||||
self.assertEqual("4ef544102ba7",
|
||||
db.schema.schema_revision(engine=engine))
|
||||
|
||||
@ -1409,7 +1424,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
subtasks_found = conn.execute(
|
||||
subtask_table.select()).fetchall()
|
||||
for subtask in subtasks_found:
|
||||
@ -1418,13 +1433,13 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
subtask_table.c.id == subtask.id)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
tasks_found = conn.execute(
|
||||
task_table.select()).fetchall()
|
||||
self.assertEqual(3, len(tasks_found))
|
||||
for task in tasks_found:
|
||||
self.assertIn("uuid", task)
|
||||
self.assertIn("status", task)
|
||||
self.assertIn("uuid", task._mapping)
|
||||
self.assertIn("status", task._mapping)
|
||||
|
||||
if task.status != org_tasks[task.uuid]["status"]:
|
||||
if task.uuid.startswith("should-not-be-changed"):
|
||||
@ -1466,7 +1481,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
deployment_status = consts.DeployStatus.DEPLOY_FINISHED
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for deployment, creds in self._92aaaa2a6bb3_deployments:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
@ -1476,7 +1491,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"credentials": pickle.dumps(creds),
|
||||
}])
|
||||
|
||||
def _check_92aaaa2a6bb3(self, engine, data):
|
||||
def _check_92aaaa2a6bb3(self, engine: sa.engine.Engine):
|
||||
expected_credentials = [
|
||||
("1-cred", {"openstack": [{"foo": "bar"}]}),
|
||||
("2-cred", {"openstack": [{"foo": "bar1"},
|
||||
@ -1487,7 +1502,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
for deployment, expected_creds in expected_credentials:
|
||||
|
||||
dep_obj = conn.execute(
|
||||
@ -1527,7 +1542,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
]
|
||||
}
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -1590,13 +1605,13 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_35fe16d4ab1c(self, engine, data):
|
||||
def _check_35fe16d4ab1c(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
workload_table = db_utils.get_table(engine, "workloads")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
task_id = self._35fe16d4ab1c_task_uuid
|
||||
task_obj = conn.execute(
|
||||
task_table.select().where(
|
||||
@ -1648,7 +1663,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"wdata": True},
|
||||
str(uuid.uuid4()): {"preprocessed": -1, "expected": None}}
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -1732,7 +1747,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_7948b83229f6(self, engine, data):
|
||||
def _check_7948b83229f6(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
@ -1741,10 +1756,15 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
subtask_uuid = None
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
task_uuid = self._7948b83229f6_task_uuid
|
||||
for workload in conn.execute(workload_table.select().where(
|
||||
workload_table.c.task_uuid == task_uuid)).fetchall():
|
||||
|
||||
workloads_found = conn.execute(workload_table.select().where(
|
||||
workload_table.c.task_uuid == task_uuid)).fetchall()
|
||||
self.assertEqual(len(self._7948b83229f6_workloads),
|
||||
len(workloads_found))
|
||||
|
||||
for workload in workloads_found:
|
||||
if subtask_uuid is None:
|
||||
subtask_uuid = workload.subtask_uuid
|
||||
if workload.uuid not in self._7948b83229f6_workloads:
|
||||
@ -1802,7 +1822,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}
|
||||
]
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -1867,7 +1887,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_046a38742e89(self, engine, data):
|
||||
def _check_046a38742e89(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
@ -1875,10 +1895,14 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
subtask_uuid = None
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
task_uuid = self._046a38742e89_task_uuid
|
||||
for workload in conn.execute(workload_table.select().where(
|
||||
workload_table.c.task_uuid == task_uuid)).fetchall():
|
||||
|
||||
workloads_found = conn.execute(workload_table.select().where(
|
||||
workload_table.c.task_uuid == task_uuid)).fetchall()
|
||||
self.assertEqual(2, len(workloads_found))
|
||||
|
||||
for workload in workloads_found:
|
||||
if subtask_uuid is None:
|
||||
subtask_uuid = workload.subtask_uuid
|
||||
|
||||
@ -2057,7 +2081,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}}}
|
||||
]
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -2138,7 +2162,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_4394bdc32cfd(self, engine, data):
|
||||
def _check_4394bdc32cfd(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
@ -2147,10 +2171,14 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
|
||||
task_uuid = None
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
subtask_id = self._4394bdc32cfd_subtask
|
||||
for workload in conn.execute(workload_table.select().where(
|
||||
workload_table.c.subtask_uuid == subtask_id)).fetchall():
|
||||
|
||||
workloads_found = conn.execute(workload_table.select().where(
|
||||
workload_table.c.subtask_uuid == subtask_id)).fetchall()
|
||||
self.assertEqual(2, len(workloads_found))
|
||||
|
||||
for workload in workloads_found:
|
||||
if task_uuid is None:
|
||||
task_uuid = workload.task_uuid
|
||||
original = [w for w in self._4394bdc32cfd_workloads
|
||||
@ -2232,7 +2260,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}
|
||||
}
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -2297,25 +2325,25 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
} for w_uuid, w in self._dc46687661df_workloads.items()]
|
||||
)
|
||||
|
||||
def _check_dc46687661df(self, engine, data):
|
||||
def _check_dc46687661df(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
subtask_table = db_utils.get_table(engine, "subtasks")
|
||||
workload_table = db_utils.get_table(engine, "workloads")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
task_uuid = self._dc46687661df_task_uuid
|
||||
for w_uuid, w in self._dc46687661df_workloads.items():
|
||||
workload = conn.execute(workload_table.select().where(
|
||||
workload_table.c.uuid == w_uuid)).first()
|
||||
|
||||
self.assertNotIn("context", workload)
|
||||
self.assertNotIn("context_execution", workload)
|
||||
self.assertNotIn("context", workload._mapping)
|
||||
self.assertNotIn("context_execution", workload._mapping)
|
||||
|
||||
self.assertEqual(w["context"],
|
||||
json.loads(workload["contexts"]))
|
||||
json.loads(workload.contexts))
|
||||
if w["start_time"] is None:
|
||||
self.assertEqual("[]", workload["contexts_results"])
|
||||
self.assertEqual("[]", workload.contexts_results)
|
||||
elif w.get("with_fake_context", False):
|
||||
self.assertEqual([{
|
||||
"plugin_cfg": {"description": mock.ANY},
|
||||
@ -2328,7 +2356,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"finished_at": 1483221609.9,
|
||||
"atomic_actions": [],
|
||||
"error": None}}],
|
||||
json.loads(workload["contexts_results"]))
|
||||
json.loads(workload.contexts_results))
|
||||
else:
|
||||
self.assertEqual([{
|
||||
"plugin_name": "AllExecutedContexts",
|
||||
@ -2348,7 +2376,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"finished_at": 1483221609.9,
|
||||
"atomic_actions": [],
|
||||
"error": None}}],
|
||||
json.loads(workload["contexts_results"]))
|
||||
json.loads(workload.contexts_results))
|
||||
|
||||
conn.execute(
|
||||
workload_table.delete().where(
|
||||
@ -2357,15 +2385,15 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
subtask = conn.execute(subtask_table.select().where(
|
||||
subtask_table.c.task_uuid == task_uuid)).first()
|
||||
|
||||
self.assertNotIn("context", subtask)
|
||||
self.assertNotIn("context_execution", subtask)
|
||||
self.assertNotIn("context", subtask._mapping)
|
||||
self.assertNotIn("context_execution", subtask._mapping)
|
||||
|
||||
self.assertEqual("{}", subtask["contexts"])
|
||||
self.assertEqual("[]", subtask["contexts_results"])
|
||||
self.assertEqual("{}", subtask.contexts)
|
||||
self.assertEqual("[]", subtask.contexts_results)
|
||||
|
||||
conn.execute(
|
||||
subtask_table.delete().where(
|
||||
subtask_table.c.uuid == subtask["uuid"]))
|
||||
subtask_table.c.uuid == subtask.uuid))
|
||||
|
||||
conn.execute(
|
||||
task_table.delete().where(task_table.c.uuid == task_uuid))
|
||||
@ -2417,7 +2445,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
# some custom unknown thing
|
||||
(str(uuid.uuid4()), {"some_special_deployment": "foo"})
|
||||
]
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -2431,10 +2459,10 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
} for d_uuid, d_cfg in self._dc0fe6de6786_deployments]
|
||||
)
|
||||
|
||||
def _check_dc0fe6de6786(self, engine, data):
|
||||
def _check_dc0fe6de6786(self, engine: sa.engine.Engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
|
||||
for d_uuid, d_cfg in self._dc0fe6de6786_deployments:
|
||||
deployment = conn.execute(deployment_table.select().where(
|
||||
@ -2490,7 +2518,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
self._bc908ac9a1fc_verifier_uuid = str(uuid.uuid4())
|
||||
self._bc908ac9a1fc_verification_uuid = str(uuid.uuid4())
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
@ -2540,21 +2568,21 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_bc908ac9a1fc(self, engine, data):
|
||||
def _check_bc908ac9a1fc(self, engine: sa.engine.Engine):
|
||||
env_table = db_utils.get_table(engine, "envs")
|
||||
platform_table = db_utils.get_table(engine, "platforms")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
verifier_table = db_utils.get_table(engine, "verifiers")
|
||||
verification_table = db_utils.get_table(engine, "verifications")
|
||||
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
|
||||
task = conn.execute(task_table.select().where(
|
||||
task_table.c.uuid == self._bc908ac9a1fc_task_uuid)).first()
|
||||
self.assertNotIn("deployment_uuid", task)
|
||||
self.assertIn("env_uuid", task)
|
||||
self.assertNotIn("deployment_uuid", task._mapping)
|
||||
self.assertIn("env_uuid", task._mapping)
|
||||
self.assertEqual(self._bc908ac9a1fc_deployments[0][0],
|
||||
task["env_uuid"])
|
||||
task.env_uuid)
|
||||
conn.execute(
|
||||
task_table.delete().where(
|
||||
task_table.c.uuid == self._bc908ac9a1fc_task_uuid))
|
||||
@ -2562,10 +2590,10 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
v_id = self._bc908ac9a1fc_verification_uuid
|
||||
verification = conn.execute(verification_table.select().where(
|
||||
verification_table.c.uuid == v_id)).first()
|
||||
self.assertNotIn("deployment_uuid", verification)
|
||||
self.assertIn("env_uuid", verification)
|
||||
self.assertNotIn("deployment_uuid", verification._mapping)
|
||||
self.assertIn("env_uuid", verification._mapping)
|
||||
self.assertEqual(self._bc908ac9a1fc_deployments[0][0],
|
||||
verification["env_uuid"])
|
||||
verification.env_uuid)
|
||||
conn.execute(
|
||||
verification_table.delete().where(
|
||||
verification_table.c.uuid == v_id))
|
||||
@ -2578,7 +2606,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
env_table.c.uuid == d_uuid)).first()
|
||||
if d_cfg.get("creds", {}):
|
||||
# openstack deployment
|
||||
env_spec = json.loads(env["spec"])
|
||||
env_spec = json.loads(env.spec)
|
||||
self.assertEqual({"existing@openstack"},
|
||||
set(env_spec.keys()))
|
||||
self.assertEqual(
|
||||
@ -2614,10 +2642,10 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
else:
|
||||
if "creds" in d_cfg:
|
||||
# empty deployment
|
||||
self.assertEqual({}, json.loads(env["spec"]))
|
||||
self.assertEqual({}, json.loads(env.spec))
|
||||
else:
|
||||
# something
|
||||
self.assertEqual(d_cfg, json.loads(env["spec"]))
|
||||
self.assertEqual(d_cfg, json.loads(env.spec))
|
||||
|
||||
platforms = conn.execute(platform_table.select().where(
|
||||
platform_table.c.env_uuid == d_uuid)).fetchall()
|
||||
|
@ -105,8 +105,8 @@ class BaseWalkMigrationMixin(object):
|
||||
# init. So we cleanup the DB.
|
||||
db.schema.schema_cleanup()
|
||||
up_and_down_versions = self._up_and_down_versions()
|
||||
for ver_up, ver_down in up_and_down_versions:
|
||||
self._migrate_up(engine, ver_up, with_data=True)
|
||||
for ver_up, prev_version in up_and_down_versions:
|
||||
self._migrate_up(engine, ver_up)
|
||||
|
||||
def _get_version_from_db(self, engine):
|
||||
"""Return latest version for each type of migrate repo from db."""
|
||||
@ -126,7 +126,7 @@ class BaseWalkMigrationMixin(object):
|
||||
|
||||
self._alembic_command(cmd, engine, self.ALEMBIC_CONFIG, version)
|
||||
|
||||
def _migrate_up(self, engine, version, with_data=False):
|
||||
def _migrate_up(self, engine, version):
|
||||
"""Migrate up to a new version of the db.
|
||||
|
||||
We allow for data insertion and post checks at every
|
||||
@ -137,18 +137,15 @@ class BaseWalkMigrationMixin(object):
|
||||
# where a failed data migration happens otherwise
|
||||
check_version = version
|
||||
try:
|
||||
if with_data:
|
||||
data = None
|
||||
pre_upgrade = getattr(
|
||||
self, "_pre_upgrade_%s" % check_version, None)
|
||||
if pre_upgrade:
|
||||
data = pre_upgrade(engine)
|
||||
pre_upgrade = getattr(
|
||||
self, "_pre_upgrade_%s" % check_version, None)
|
||||
if pre_upgrade:
|
||||
pre_upgrade(engine)
|
||||
self._migrate(engine, version, "upgrade")
|
||||
self.assertEqual(version, self._get_version_from_db(engine))
|
||||
if with_data:
|
||||
check = getattr(self, "_check_%s" % check_version, None)
|
||||
if check:
|
||||
check(engine, data)
|
||||
check = getattr(self, "_check_%s" % check_version, None)
|
||||
if check:
|
||||
check(engine)
|
||||
except Exception:
|
||||
LOG.error("Failed to migrate to version %(ver)s on engine %(eng)s"
|
||||
% {"ver": version, "eng": engine})
|
||||
|
@ -265,7 +265,7 @@ class RPSScenarioRunnerTestCase(test.TestCase):
|
||||
runner_obj = rps.RPSScenarioRunner(self.task, config)
|
||||
|
||||
runner_obj._run_scenario(fakes.FakeScenario, "do_it",
|
||||
fakes.FakeContext({}).context, {})
|
||||
{"task": {"uuid": 1}}, {})
|
||||
|
||||
self.assertEqual(config["times"], len(runner_obj.result_queue))
|
||||
|
||||
@ -279,7 +279,7 @@ class RPSScenarioRunnerTestCase(test.TestCase):
|
||||
runner_obj = rps.RPSScenarioRunner(self.task, config)
|
||||
|
||||
runner_obj._run_scenario(fakes.FakeScenario, "something_went_wrong",
|
||||
fakes.FakeContext({}).context, {})
|
||||
{"task": {"uuid": 1}}, {})
|
||||
self.assertEqual(config["times"], len(runner_obj.result_queue))
|
||||
for result_batch in runner_obj.result_queue:
|
||||
for result in result_batch:
|
||||
|
@ -129,6 +129,10 @@ class ScenarioRunnerHelpersTestCase(test.TestCase):
|
||||
["Exception", "Something went wrong"])
|
||||
|
||||
|
||||
def noop_worker_process(i):
|
||||
pass
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class ScenarioRunnerTestCase(test.TestCase):
|
||||
|
||||
@ -170,13 +174,10 @@ class ScenarioRunnerTestCase(test.TestCase):
|
||||
|
||||
processes_to_start = 10
|
||||
|
||||
def worker_process(i):
|
||||
pass
|
||||
|
||||
counter = ((i,) for i in range(100))
|
||||
|
||||
process_pool = runner_obj._create_process_pool(
|
||||
processes_to_start, worker_process, counter)
|
||||
processes_to_start, noop_worker_process, counter)
|
||||
self.assertEqual(processes_to_start, len(process_pool))
|
||||
for process in process_pool:
|
||||
self.assertIsInstance(process, multiprocessing.Process)
|
||||
|
3
tox.ini
3
tox.ini
@ -18,6 +18,8 @@ allowlist_externals = find
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
-c{toxinidir}/upper-constraints.txt
|
||||
sa14: SQLAlchemy<1.5
|
||||
sa2: SQLAlchemy>=2
|
||||
usedevelop = True
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
@ -137,7 +139,6 @@ commands = \
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
error
|
||||
ignore:.*EngineFacade is deprecated; please use oslo_db.sqlalchemy.enginefacade*:
|
||||
# Introduced with oslo.db-12.1.0
|
||||
ignore:.*Support for the MySQL NDB Cluster storage engine has been deprecated and will be removed in a future release.:DeprecationWarning:
|
||||
# Introduced with SQLAlchemy-1.4.46, can be removed when rally supports SQLAlchemy>=2.0.0
|
||||
|
@ -1,59 +1,57 @@
|
||||
alembic===1.9.4
|
||||
attrs===23.1.0
|
||||
bcrypt===4.0.1
|
||||
certifi===2023.11.17
|
||||
cffi===1.15.1
|
||||
charset-normalizer===3.2.0
|
||||
cryptography===41.0.4
|
||||
debtcollector===2.5.0
|
||||
distlib===0.3.7
|
||||
filelock===3.12.4
|
||||
idna===3.4
|
||||
iso8601===2.0.0
|
||||
Jinja2===3.1.2
|
||||
jsonschema===4.19.1
|
||||
jsonschema-specifications===2023.7.1
|
||||
Mako===1.2.4
|
||||
MarkupSafe===2.1.3
|
||||
msgpack===1.0.6
|
||||
netaddr===0.9.0
|
||||
attrs===23.2.0
|
||||
bcrypt===4.1.2
|
||||
certifi===2024.2.2
|
||||
cffi===1.16.0
|
||||
charset-normalizer===3.3.2
|
||||
cryptography===42.0.5
|
||||
debtcollector===3.0.0
|
||||
distlib===0.3.8
|
||||
filelock===3.13.3
|
||||
idna===3.6
|
||||
iso8601===2.1.0
|
||||
Jinja2===3.1.3
|
||||
jsonschema===4.19.2
|
||||
jsonschema-specifications===2023.12.1
|
||||
Mako===1.3.2
|
||||
MarkupSafe===2.1.5
|
||||
msgpack===1.0.8
|
||||
netaddr===0.10.1
|
||||
netifaces===0.11.0
|
||||
oslo.config===9.3.0
|
||||
oslo.context===5.3.0
|
||||
oslo.db===14.1.0
|
||||
oslo.i18n===6.2.0
|
||||
oslo.log===5.4.0
|
||||
oslo.serialization===5.3.0
|
||||
oslo.utils===7.0.0
|
||||
packaging===23.1
|
||||
paramiko===3.3.1
|
||||
oslo.config===9.4.0
|
||||
oslo.context===5.5.0
|
||||
oslo.db===15.0.0
|
||||
oslo.i18n===6.3.0
|
||||
oslo.log===5.5.1
|
||||
oslo.serialization===5.4.0
|
||||
oslo.utils===7.1.0
|
||||
packaging===24.0
|
||||
paramiko===3.4.0
|
||||
pbr===6.0.0
|
||||
pip===23.3.2
|
||||
platformdirs===3.10.0
|
||||
prettytable===3.9.0
|
||||
pycparser===2.21
|
||||
pip===24.0
|
||||
platformdirs===4.2.0
|
||||
prettytable===3.10.0
|
||||
pycparser===2.22
|
||||
PyNaCl===1.5.0
|
||||
pyOpenSSL===23.2.0
|
||||
pyparsing===3.1.1
|
||||
python-dateutil===2.8.2
|
||||
python-subunit===1.4.3
|
||||
pytz===2023.3.post1
|
||||
pyOpenSSL===24.1.0
|
||||
pyparsing===3.1.2
|
||||
python-dateutil===2.9.0.post0
|
||||
python-subunit===1.4.4
|
||||
PyYAML===6.0.1
|
||||
referencing===0.30.2
|
||||
referencing===0.34.0
|
||||
requests===2.31.0
|
||||
rfc3986===2.0.0
|
||||
rpds-py===0.10.3
|
||||
setuptools===69.0.3
|
||||
rpds-py===0.18.0
|
||||
setuptools===69.2.0
|
||||
six===1.16.0
|
||||
SQLAlchemy===1.4.41
|
||||
stevedore===5.1.0
|
||||
stevedore===5.2.0
|
||||
testresources===2.0.1
|
||||
testscenarios===0.5.0
|
||||
testtools===2.7.0
|
||||
typing-extensions===4.9.0
|
||||
tzdata===2023.3
|
||||
urllib3===1.26.16
|
||||
virtualenv===20.24.5
|
||||
wcwidth===0.2.6
|
||||
wheel===0.42.0
|
||||
wrapt===1.15.0
|
||||
testtools===2.7.1
|
||||
typing-extensions===4.11.0
|
||||
tzdata===2024.1
|
||||
urllib3===1.26.18
|
||||
virtualenv===20.25.1
|
||||
wcwidth===0.2.13
|
||||
wheel===0.43.0
|
||||
wrapt===1.16.0
|
||||
|
Loading…
Reference in New Issue
Block a user