- squash merge of ticket_302 branch
- The internal system for Alembic operations has been reworked to now build upon an extensible system of operation objects. New operations can be added to the ``op.`` namespace, including that they are available in custom autogenerate schemes. fixes #302 - The internal system for autogenerate been reworked to build upon the extensible system of operation objects present in #302. A new customization hook process_revision_directives is added to allow manipulation of the autogen stream. Fixes #301
This commit is contained in:
parent
5ccc81701c
commit
0e43247da4
1
.gitignore
vendored
1
.gitignore
vendored
@ -10,3 +10,4 @@ alembic.ini
|
||||
.coverage
|
||||
coverage.xml
|
||||
.tox
|
||||
*.patch
|
||||
|
@ -1,9 +1,15 @@
|
||||
from os import path
|
||||
|
||||
__version__ = '0.7.7'
|
||||
__version__ = '0.8.0'
|
||||
|
||||
package_dir = path.abspath(path.dirname(__file__))
|
||||
|
||||
|
||||
from . import op # noqa
|
||||
from . import context # noqa
|
||||
|
||||
import sys
|
||||
from .runtime import environment
|
||||
from .runtime import migration
|
||||
sys.modules['alembic.migration'] = migration
|
||||
sys.modules['alembic.environment'] = environment
|
||||
|
@ -1,2 +1,7 @@
|
||||
from .api import compare_metadata, _produce_migration_diffs, \
|
||||
_produce_net_changes
|
||||
from .api import ( # noqa
|
||||
compare_metadata, _render_migration_diffs,
|
||||
produce_migrations, render_python_code
|
||||
)
|
||||
from .compare import _produce_net_changes # noqa
|
||||
from .generate import RevisionContext # noqa
|
||||
from .render import render_op_text, renderers # noqa
|
@ -1,26 +1,12 @@
|
||||
"""Provide the 'autogenerate' feature which can produce migration operations
|
||||
automatically."""
|
||||
|
||||
import logging
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from ..compat import StringIO
|
||||
|
||||
from mako.pygen import PythonPrinter
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
from sqlalchemy.util import OrderedSet
|
||||
from .compare import _compare_tables
|
||||
from .render import _drop_table, _drop_column, _drop_index, _drop_constraint, \
|
||||
_add_table, _add_column, _add_index, _add_constraint, _modify_col, \
|
||||
_add_fk_constraint
|
||||
from ..operations import ops
|
||||
from . import render
|
||||
from . import compare
|
||||
from . import compose
|
||||
from .. import util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
###################################################
|
||||
# public
|
||||
|
||||
|
||||
def compare_metadata(context, metadata):
|
||||
"""Compare a database schema to that given in a
|
||||
@ -105,9 +91,14 @@ def compare_metadata(context, metadata):
|
||||
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
|
||||
instance.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.produce_migrations` - produces a :class:`.MigrationScript`
|
||||
structure based on metadata comparison.
|
||||
|
||||
"""
|
||||
|
||||
autogen_context, connection = _autogen_context(context, None)
|
||||
autogen_context = _autogen_context(context, metadata=metadata)
|
||||
|
||||
# as_sql=True is nonsensical here. autogenerate requires a connection
|
||||
# it can use to run queries against to get the database schema.
|
||||
@ -118,76 +109,107 @@ def compare_metadata(context, metadata):
|
||||
|
||||
diffs = []
|
||||
|
||||
object_filters = _get_object_filters(context.opts)
|
||||
include_schemas = context.opts.get('include_schemas', False)
|
||||
|
||||
_produce_net_changes(connection, metadata, diffs, autogen_context,
|
||||
object_filters, include_schemas)
|
||||
compare._produce_net_changes(autogen_context, diffs)
|
||||
|
||||
return diffs
|
||||
|
||||
###################################################
|
||||
# top level
|
||||
|
||||
def produce_migrations(context, metadata):
|
||||
"""Produce a :class:`.MigrationScript` structure based on schema
|
||||
comparison.
|
||||
|
||||
def _produce_migration_diffs(context, template_args,
|
||||
imports, include_symbol=None,
|
||||
include_object=None,
|
||||
include_schemas=False):
|
||||
opts = context.opts
|
||||
metadata = opts['target_metadata']
|
||||
include_schemas = opts.get('include_schemas', include_schemas)
|
||||
This function does essentially what :func:`.compare_metadata` does,
|
||||
but then runs the resulting list of diffs to produce the full
|
||||
:class:`.MigrationScript` object. For an example of what this looks like,
|
||||
see the example in :ref:`customizing_revision`.
|
||||
|
||||
object_filters = _get_object_filters(opts, include_symbol, include_object)
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
if metadata is None:
|
||||
raise util.CommandError(
|
||||
"Can't proceed with --autogenerate option; environment "
|
||||
"script %s does not provide "
|
||||
"a MetaData object to the context." % (
|
||||
context.script.env_py_location
|
||||
))
|
||||
autogen_context, connection = _autogen_context(context, imports)
|
||||
.. seealso::
|
||||
|
||||
:func:`.compare_metadata` - returns more fundamental "diff"
|
||||
data from comparing a schema.
|
||||
|
||||
"""
|
||||
|
||||
autogen_context = _autogen_context(context, metadata=metadata)
|
||||
diffs = []
|
||||
_produce_net_changes(connection, metadata, diffs,
|
||||
autogen_context, object_filters, include_schemas)
|
||||
|
||||
template_args[opts['upgrade_token']] = _indent(_render_cmd_body(
|
||||
_produce_upgrade_commands, diffs, autogen_context))
|
||||
template_args[opts['downgrade_token']] = _indent(_render_cmd_body(
|
||||
_produce_downgrade_commands, diffs, autogen_context))
|
||||
template_args['imports'] = "\n".join(sorted(imports))
|
||||
compare._produce_net_changes(autogen_context, diffs)
|
||||
|
||||
|
||||
def _indent(text):
|
||||
text = re.compile(r'^', re.M).sub(" ", text).strip()
|
||||
text = re.compile(r' +$', re.M).sub("", text)
|
||||
return text
|
||||
|
||||
|
||||
def _render_cmd_body(fn, diffs, autogen_context):
|
||||
|
||||
buf = StringIO()
|
||||
printer = PythonPrinter(buf)
|
||||
|
||||
printer.writeline(
|
||||
"### commands auto generated by Alembic - "
|
||||
"please adjust! ###"
|
||||
migration_script = ops.MigrationScript(
|
||||
rev_id=None,
|
||||
upgrade_ops=ops.UpgradeOps([]),
|
||||
downgrade_ops=ops.DowngradeOps([]),
|
||||
)
|
||||
|
||||
for line in fn(diffs, autogen_context):
|
||||
printer.writeline(line)
|
||||
compose._to_migration_script(autogen_context, migration_script, diffs)
|
||||
|
||||
printer.writeline("### end Alembic commands ###")
|
||||
|
||||
return buf.getvalue()
|
||||
return migration_script
|
||||
|
||||
|
||||
def _get_object_filters(
|
||||
context_opts, include_symbol=None, include_object=None):
|
||||
include_symbol = context_opts.get('include_symbol', include_symbol)
|
||||
include_object = context_opts.get('include_object', include_object)
|
||||
def render_python_code(
|
||||
up_or_down_op,
|
||||
sqlalchemy_module_prefix='sa.',
|
||||
alembic_module_prefix='op.',
|
||||
imports=(),
|
||||
render_item=None,
|
||||
):
|
||||
"""Render Python code given an :class:`.UpgradeOps` or
|
||||
:class:`.DowngradeOps` object.
|
||||
|
||||
This is a convenience function that can be used to test the
|
||||
autogenerate output of a user-defined :class:`.MigrationScript` structure.
|
||||
|
||||
"""
|
||||
autogen_context = {
|
||||
'opts': {
|
||||
'sqlalchemy_module_prefix': sqlalchemy_module_prefix,
|
||||
'alembic_module_prefix': alembic_module_prefix,
|
||||
'render_item': render_item,
|
||||
},
|
||||
'imports': set(imports)
|
||||
}
|
||||
return render._indent(render._render_cmd_body(
|
||||
up_or_down_op, autogen_context))
|
||||
|
||||
|
||||
|
||||
|
||||
def _render_migration_diffs(context, template_args, imports):
|
||||
"""legacy, used by test_autogen_composition at the moment"""
|
||||
|
||||
migration_script = produce_migrations(context, None)
|
||||
|
||||
autogen_context = _autogen_context(context, imports=imports)
|
||||
diffs = []
|
||||
|
||||
compare._produce_net_changes(autogen_context, diffs)
|
||||
|
||||
migration_script = ops.MigrationScript(
|
||||
rev_id=None,
|
||||
imports=imports,
|
||||
upgrade_ops=ops.UpgradeOps([]),
|
||||
downgrade_ops=ops.DowngradeOps([]),
|
||||
)
|
||||
|
||||
compose._to_migration_script(autogen_context, migration_script, diffs)
|
||||
|
||||
render._render_migration_script(
|
||||
autogen_context, migration_script, template_args
|
||||
)
|
||||
|
||||
|
||||
def _autogen_context(
|
||||
context, imports=None, metadata=None, include_symbol=None,
|
||||
include_object=None, include_schemas=False):
|
||||
|
||||
opts = context.opts
|
||||
metadata = opts['target_metadata'] if metadata is None else metadata
|
||||
include_schemas = opts.get('include_schemas', include_schemas)
|
||||
|
||||
include_symbol = opts.get('include_symbol', include_symbol)
|
||||
include_object = opts.get('include_object', include_object)
|
||||
|
||||
object_filters = []
|
||||
if include_symbol:
|
||||
@ -200,171 +222,24 @@ def _get_object_filters(
|
||||
if include_object:
|
||||
object_filters.append(include_object)
|
||||
|
||||
return object_filters
|
||||
if metadata is None:
|
||||
raise util.CommandError(
|
||||
"Can't proceed with --autogenerate option; environment "
|
||||
"script %s does not provide "
|
||||
"a MetaData object to the context." % (
|
||||
context.script.env_py_location
|
||||
))
|
||||
|
||||
|
||||
def _autogen_context(context, imports):
|
||||
opts = context.opts
|
||||
connection = context.bind
|
||||
return {
|
||||
'imports': imports,
|
||||
'imports': imports if imports is not None else set(),
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context,
|
||||
'opts': opts
|
||||
}, connection
|
||||
|
||||
|
||||
###################################################
|
||||
# walk structures
|
||||
|
||||
|
||||
def _produce_net_changes(connection, metadata, diffs, autogen_context,
|
||||
object_filters=(),
|
||||
include_schemas=False):
|
||||
inspector = Inspector.from_engine(connection)
|
||||
conn_table_names = set()
|
||||
|
||||
default_schema = connection.dialect.default_schema_name
|
||||
if include_schemas:
|
||||
schemas = set(inspector.get_schema_names())
|
||||
# replace default schema name with None
|
||||
schemas.discard("information_schema")
|
||||
# replace the "default" schema with None
|
||||
schemas.add(None)
|
||||
schemas.discard(default_schema)
|
||||
else:
|
||||
schemas = [None]
|
||||
|
||||
version_table_schema = autogen_context['context'].version_table_schema
|
||||
version_table = autogen_context['context'].version_table
|
||||
|
||||
for s in schemas:
|
||||
tables = set(inspector.get_table_names(schema=s))
|
||||
if s == version_table_schema:
|
||||
tables = tables.difference(
|
||||
[autogen_context['context'].version_table]
|
||||
)
|
||||
conn_table_names.update(zip([s] * len(tables), tables))
|
||||
|
||||
metadata_table_names = OrderedSet(
|
||||
[(table.schema, table.name) for table in metadata.sorted_tables]
|
||||
).difference([(version_table_schema, version_table)])
|
||||
|
||||
_compare_tables(conn_table_names, metadata_table_names,
|
||||
object_filters,
|
||||
inspector, metadata, diffs, autogen_context)
|
||||
|
||||
|
||||
def _produce_upgrade_commands(diffs, autogen_context):
|
||||
return _produce_commands("upgrade", diffs, autogen_context)
|
||||
|
||||
|
||||
def _produce_downgrade_commands(diffs, autogen_context):
|
||||
return _produce_commands("downgrade", diffs, autogen_context)
|
||||
|
||||
|
||||
def _produce_commands(type_, diffs, autogen_context):
|
||||
opts = autogen_context['opts']
|
||||
render_as_batch = opts.get('render_as_batch', False)
|
||||
|
||||
if diffs:
|
||||
if type_ == 'downgrade':
|
||||
diffs = reversed(diffs)
|
||||
for (schema, table), subdiffs in _group_diffs_by_table(diffs):
|
||||
if table is not None and render_as_batch:
|
||||
yield "with op.batch_alter_table"\
|
||||
"(%r, schema=%r) as batch_op:" % (table, schema)
|
||||
autogen_context['batch_prefix'] = 'batch_op.'
|
||||
for diff in subdiffs:
|
||||
yield _invoke_command(type_, diff, autogen_context)
|
||||
if table is not None and render_as_batch:
|
||||
del autogen_context['batch_prefix']
|
||||
yield ""
|
||||
else:
|
||||
yield "pass"
|
||||
|
||||
|
||||
def _invoke_command(updown, args, autogen_context):
|
||||
if isinstance(args, tuple):
|
||||
return _invoke_adddrop_command(updown, args, autogen_context)
|
||||
else:
|
||||
return _invoke_modify_command(updown, args, autogen_context)
|
||||
|
||||
|
||||
def _invoke_adddrop_command(updown, args, autogen_context):
|
||||
cmd_type = args[0]
|
||||
adddrop, cmd_type = cmd_type.split("_")
|
||||
|
||||
cmd_args = args[1:] + (autogen_context,)
|
||||
|
||||
_commands = {
|
||||
"table": (_drop_table, _add_table),
|
||||
"column": (_drop_column, _add_column),
|
||||
"index": (_drop_index, _add_index),
|
||||
"constraint": (_drop_constraint, _add_constraint),
|
||||
"fk": (_drop_constraint, _add_fk_constraint)
|
||||
'opts': opts,
|
||||
'metadata': metadata,
|
||||
'object_filters': object_filters,
|
||||
'include_schemas': include_schemas
|
||||
}
|
||||
|
||||
cmd_callables = _commands[cmd_type]
|
||||
|
||||
if (
|
||||
updown == "upgrade" and adddrop == "add"
|
||||
) or (
|
||||
updown == "downgrade" and adddrop == "remove"
|
||||
):
|
||||
return cmd_callables[1](*cmd_args)
|
||||
else:
|
||||
return cmd_callables[0](*cmd_args)
|
||||
|
||||
|
||||
def _invoke_modify_command(updown, args, autogen_context):
|
||||
sname, tname, cname = args[0][1:4]
|
||||
kw = {}
|
||||
|
||||
_arg_struct = {
|
||||
"modify_type": ("existing_type", "type_"),
|
||||
"modify_nullable": ("existing_nullable", "nullable"),
|
||||
"modify_default": ("existing_server_default", "server_default"),
|
||||
}
|
||||
for diff in args:
|
||||
diff_kw = diff[4]
|
||||
for arg in ("existing_type",
|
||||
"existing_nullable",
|
||||
"existing_server_default"):
|
||||
if arg in diff_kw:
|
||||
kw.setdefault(arg, diff_kw[arg])
|
||||
old_kw, new_kw = _arg_struct[diff[0]]
|
||||
if updown == "upgrade":
|
||||
kw[new_kw] = diff[-1]
|
||||
kw[old_kw] = diff[-2]
|
||||
else:
|
||||
kw[new_kw] = diff[-2]
|
||||
kw[old_kw] = diff[-1]
|
||||
|
||||
if "nullable" in kw:
|
||||
kw.pop("existing_nullable", None)
|
||||
if "server_default" in kw:
|
||||
kw.pop("existing_server_default", None)
|
||||
return _modify_col(tname, cname, autogen_context, schema=sname, **kw)
|
||||
|
||||
|
||||
def _group_diffs_by_table(diffs):
|
||||
_adddrop = {
|
||||
"table": lambda diff: (None, None),
|
||||
"column": lambda diff: (diff[0], diff[1]),
|
||||
"index": lambda diff: (diff[0].table.schema, diff[0].table.name),
|
||||
"constraint": lambda diff: (diff[0].table.schema, diff[0].table.name),
|
||||
"fk": lambda diff: (diff[0].parent.schema, diff[0].parent.name)
|
||||
}
|
||||
|
||||
def _derive_table(diff):
|
||||
if isinstance(diff, tuple):
|
||||
cmd_type = diff[0]
|
||||
adddrop, cmd_type = cmd_type.split("_")
|
||||
return _adddrop[cmd_type](diff[1:])
|
||||
else:
|
||||
sname, tname = diff[0][1:3]
|
||||
return sname, tname
|
||||
|
||||
return itertools.groupby(diffs, _derive_table)
|
||||
|
@ -1,7 +1,9 @@
|
||||
from sqlalchemy import schema as sa_schema, types as sqltypes
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
from sqlalchemy import event
|
||||
import logging
|
||||
from .. import compat
|
||||
from ..util import compat
|
||||
from ..util import sqla_compat
|
||||
from sqlalchemy.util import OrderedSet
|
||||
import re
|
||||
from .render import _user_defined_render
|
||||
@ -11,6 +13,47 @@ from alembic.ddl.base import _fk_spec
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _produce_net_changes(autogen_context, diffs):
|
||||
|
||||
metadata = autogen_context['metadata']
|
||||
connection = autogen_context['connection']
|
||||
object_filters = autogen_context.get('object_filters', ())
|
||||
include_schemas = autogen_context.get('include_schemas', False)
|
||||
|
||||
inspector = Inspector.from_engine(connection)
|
||||
conn_table_names = set()
|
||||
|
||||
default_schema = connection.dialect.default_schema_name
|
||||
if include_schemas:
|
||||
schemas = set(inspector.get_schema_names())
|
||||
# replace default schema name with None
|
||||
schemas.discard("information_schema")
|
||||
# replace the "default" schema with None
|
||||
schemas.add(None)
|
||||
schemas.discard(default_schema)
|
||||
else:
|
||||
schemas = [None]
|
||||
|
||||
version_table_schema = autogen_context['context'].version_table_schema
|
||||
version_table = autogen_context['context'].version_table
|
||||
|
||||
for s in schemas:
|
||||
tables = set(inspector.get_table_names(schema=s))
|
||||
if s == version_table_schema:
|
||||
tables = tables.difference(
|
||||
[autogen_context['context'].version_table]
|
||||
)
|
||||
conn_table_names.update(zip([s] * len(tables), tables))
|
||||
|
||||
metadata_table_names = OrderedSet(
|
||||
[(table.schema, table.name) for table in metadata.sorted_tables]
|
||||
).difference([(version_table_schema, version_table)])
|
||||
|
||||
_compare_tables(conn_table_names, metadata_table_names,
|
||||
object_filters,
|
||||
inspector, metadata, diffs, autogen_context)
|
||||
|
||||
|
||||
def _run_filters(object_, name, type_, reflected, compare_to, object_filters):
|
||||
for fn in object_filters:
|
||||
if not fn(object_, name, type_, reflected, compare_to):
|
||||
@ -250,7 +293,7 @@ class _ix_constraint_sig(_constraint_sig):
|
||||
|
||||
@property
|
||||
def column_names(self):
|
||||
return _get_index_column_names(self.const)
|
||||
return sqla_compat._get_index_column_names(self.const)
|
||||
|
||||
|
||||
class _fk_constraint_sig(_constraint_sig):
|
||||
@ -267,13 +310,6 @@ class _fk_constraint_sig(_constraint_sig):
|
||||
)
|
||||
|
||||
|
||||
def _get_index_column_names(idx):
|
||||
if compat.sqla_08:
|
||||
return [getattr(exp, "name", None) for exp in idx.expressions]
|
||||
else:
|
||||
return [getattr(col, "name", None) for col in idx.columns]
|
||||
|
||||
|
||||
def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table,
|
||||
metadata_table, diffs,
|
||||
autogen_context, inspector):
|
||||
|
144
alembic/autogenerate/compose.py
Normal file
144
alembic/autogenerate/compose.py
Normal file
@ -0,0 +1,144 @@
|
||||
import itertools
|
||||
from ..operations import ops
|
||||
|
||||
|
||||
def _to_migration_script(autogen_context, migration_script, diffs):
|
||||
_to_upgrade_op(
|
||||
autogen_context,
|
||||
diffs,
|
||||
migration_script.upgrade_ops,
|
||||
)
|
||||
|
||||
_to_downgrade_op(
|
||||
autogen_context,
|
||||
diffs,
|
||||
migration_script.downgrade_ops,
|
||||
)
|
||||
|
||||
|
||||
def _to_upgrade_op(autogen_context, diffs, upgrade_ops):
|
||||
return _to_updown_op(autogen_context, diffs, upgrade_ops, "upgrade")
|
||||
|
||||
|
||||
def _to_downgrade_op(autogen_context, diffs, downgrade_ops):
|
||||
return _to_updown_op(autogen_context, diffs, downgrade_ops, "downgrade")
|
||||
|
||||
|
||||
def _to_updown_op(autogen_context, diffs, op_container, type_):
|
||||
if not diffs:
|
||||
return
|
||||
|
||||
if type_ == 'downgrade':
|
||||
diffs = reversed(diffs)
|
||||
|
||||
dest = [op_container.ops]
|
||||
|
||||
for (schema, tablename), subdiffs in _group_diffs_by_table(diffs):
|
||||
subdiffs = list(subdiffs)
|
||||
if tablename is not None:
|
||||
table_ops = []
|
||||
op = ops.ModifyTableOps(tablename, table_ops, schema=schema)
|
||||
dest[-1].append(op)
|
||||
dest.append(table_ops)
|
||||
for diff in subdiffs:
|
||||
_produce_command(autogen_context, diff, dest[-1], type_)
|
||||
if tablename is not None:
|
||||
dest.pop(-1)
|
||||
|
||||
|
||||
def _produce_command(autogen_context, diff, op_list, updown):
|
||||
if isinstance(diff, tuple):
|
||||
_produce_adddrop_command(updown, diff, op_list, autogen_context)
|
||||
else:
|
||||
_produce_modify_command(updown, diff, op_list, autogen_context)
|
||||
|
||||
|
||||
def _produce_adddrop_command(updown, diff, op_list, autogen_context):
|
||||
cmd_type = diff[0]
|
||||
adddrop, cmd_type = cmd_type.split("_")
|
||||
|
||||
cmd_args = diff[1:]
|
||||
|
||||
_commands = {
|
||||
"table": (ops.DropTableOp.from_table, ops.CreateTableOp.from_table),
|
||||
"column": (
|
||||
ops.DropColumnOp.from_column_and_tablename,
|
||||
ops.AddColumnOp.from_column_and_tablename),
|
||||
"index": (ops.DropIndexOp.from_index, ops.CreateIndexOp.from_index),
|
||||
"constraint": (
|
||||
ops.DropConstraintOp.from_constraint,
|
||||
ops.AddConstraintOp.from_constraint),
|
||||
"fk": (
|
||||
ops.DropConstraintOp.from_constraint,
|
||||
ops.CreateForeignKeyOp.from_constraint)
|
||||
}
|
||||
|
||||
cmd_callables = _commands[cmd_type]
|
||||
|
||||
if (
|
||||
updown == "upgrade" and adddrop == "add"
|
||||
) or (
|
||||
updown == "downgrade" and adddrop == "remove"
|
||||
):
|
||||
op_list.append(cmd_callables[1](*cmd_args))
|
||||
else:
|
||||
op_list.append(cmd_callables[0](*cmd_args))
|
||||
|
||||
|
||||
def _produce_modify_command(updown, diffs, op_list, autogen_context):
|
||||
sname, tname, cname = diffs[0][1:4]
|
||||
kw = {}
|
||||
|
||||
_arg_struct = {
|
||||
"modify_type": ("existing_type", "modify_type"),
|
||||
"modify_nullable": ("existing_nullable", "modify_nullable"),
|
||||
"modify_default": ("existing_server_default", "modify_server_default"),
|
||||
}
|
||||
for diff in diffs:
|
||||
diff_kw = diff[4]
|
||||
for arg in ("existing_type",
|
||||
"existing_nullable",
|
||||
"existing_server_default"):
|
||||
if arg in diff_kw:
|
||||
kw.setdefault(arg, diff_kw[arg])
|
||||
old_kw, new_kw = _arg_struct[diff[0]]
|
||||
if updown == "upgrade":
|
||||
kw[new_kw] = diff[-1]
|
||||
kw[old_kw] = diff[-2]
|
||||
else:
|
||||
kw[new_kw] = diff[-2]
|
||||
kw[old_kw] = diff[-1]
|
||||
|
||||
if "modify_nullable" in kw:
|
||||
kw.pop("existing_nullable", None)
|
||||
if "modify_server_default" in kw:
|
||||
kw.pop("existing_server_default", None)
|
||||
|
||||
op_list.append(
|
||||
ops.AlterColumnOp(
|
||||
tname, cname, schema=sname,
|
||||
**kw
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _group_diffs_by_table(diffs):
|
||||
_adddrop = {
|
||||
"table": lambda diff: (None, None),
|
||||
"column": lambda diff: (diff[0], diff[1]),
|
||||
"index": lambda diff: (diff[0].table.schema, diff[0].table.name),
|
||||
"constraint": lambda diff: (diff[0].table.schema, diff[0].table.name),
|
||||
"fk": lambda diff: (diff[0].parent.schema, diff[0].parent.name)
|
||||
}
|
||||
|
||||
def _derive_table(diff):
|
||||
if isinstance(diff, tuple):
|
||||
cmd_type = diff[0]
|
||||
adddrop, cmd_type = cmd_type.split("_")
|
||||
return _adddrop[cmd_type](diff[1:])
|
||||
else:
|
||||
sname, tname = diff[0][1:3]
|
||||
return sname, tname
|
||||
|
||||
return itertools.groupby(diffs, _derive_table)
|
||||
|
92
alembic/autogenerate/generate.py
Normal file
92
alembic/autogenerate/generate.py
Normal file
@ -0,0 +1,92 @@
|
||||
from .. import util
|
||||
from . import api
|
||||
from . import compose
|
||||
from . import compare
|
||||
from . import render
|
||||
from ..operations import ops
|
||||
|
||||
|
||||
class RevisionContext(object):
|
||||
def __init__(self, config, script_directory, command_args):
|
||||
self.config = config
|
||||
self.script_directory = script_directory
|
||||
self.command_args = command_args
|
||||
self.template_args = {
|
||||
'config': config # Let templates use config for
|
||||
# e.g. multiple databases
|
||||
}
|
||||
self.generated_revisions = [
|
||||
self._default_revision()
|
||||
]
|
||||
|
||||
def _to_script(self, migration_script):
|
||||
template_args = {}
|
||||
for k, v in self.template_args.items():
|
||||
template_args.setdefault(k, v)
|
||||
|
||||
if migration_script._autogen_context is not None:
|
||||
render._render_migration_script(
|
||||
migration_script._autogen_context, migration_script,
|
||||
template_args
|
||||
)
|
||||
|
||||
return self.script_directory.generate_revision(
|
||||
migration_script.rev_id,
|
||||
migration_script.message,
|
||||
refresh=True,
|
||||
head=migration_script.head,
|
||||
splice=migration_script.splice,
|
||||
branch_labels=migration_script.branch_label,
|
||||
version_path=migration_script.version_path,
|
||||
**template_args)
|
||||
|
||||
def run_autogenerate(self, rev, context):
|
||||
if self.command_args['sql']:
|
||||
raise util.CommandError(
|
||||
"Using --sql with --autogenerate does not make any sense")
|
||||
if set(self.script_directory.get_revisions(rev)) != \
|
||||
set(self.script_directory.get_revisions("heads")):
|
||||
raise util.CommandError("Target database is not up to date.")
|
||||
|
||||
autogen_context = api._autogen_context(context)
|
||||
|
||||
diffs = []
|
||||
compare._produce_net_changes(autogen_context, diffs)
|
||||
|
||||
migration_script = self.generated_revisions[0]
|
||||
|
||||
compose._to_migration_script(autogen_context, migration_script, diffs)
|
||||
|
||||
hook = context.opts.get('process_revision_directives', None)
|
||||
if hook:
|
||||
hook(context, rev, self.generated_revisions)
|
||||
|
||||
for migration_script in self.generated_revisions:
|
||||
migration_script._autogen_context = autogen_context
|
||||
|
||||
def run_no_autogenerate(self, rev, context):
|
||||
hook = context.opts.get('process_revision_directives', None)
|
||||
if hook:
|
||||
hook(context, rev, self.generated_revisions)
|
||||
|
||||
for migration_script in self.generated_revisions:
|
||||
migration_script._autogen_context = None
|
||||
|
||||
def _default_revision(self):
|
||||
op = ops.MigrationScript(
|
||||
rev_id=self.command_args['rev_id'] or util.rev_id(),
|
||||
message=self.command_args['message'],
|
||||
imports=set(),
|
||||
upgrade_ops=ops.UpgradeOps([]),
|
||||
downgrade_ops=ops.DowngradeOps([]),
|
||||
head=self.command_args['head'],
|
||||
splice=self.command_args['splice'],
|
||||
branch_label=self.command_args['branch_label'],
|
||||
version_path=self.command_args['version_path']
|
||||
)
|
||||
op._autogen_context = None
|
||||
return op
|
||||
|
||||
def generate_scripts(self):
|
||||
for generated_revision in self.generated_revisions:
|
||||
yield self._to_script(generated_revision)
|
@ -1,11 +1,12 @@
|
||||
from sqlalchemy import schema as sa_schema, types as sqltypes, sql
|
||||
import logging
|
||||
from .. import compat
|
||||
from ..ddl.base import _table_for_constraint, _fk_spec
|
||||
from ..operations import ops
|
||||
from ..util import compat
|
||||
import re
|
||||
from ..compat import string_types
|
||||
from ..util.compat import string_types
|
||||
from .. import util
|
||||
from mako.pygen import PythonPrinter
|
||||
from ..util.compat import StringIO
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
MAX_PYTHON_ARGS = 255
|
||||
|
||||
@ -22,6 +23,341 @@ except ImportError:
|
||||
return name
|
||||
|
||||
|
||||
def _indent(text):
|
||||
text = re.compile(r'^', re.M).sub(" ", text).strip()
|
||||
text = re.compile(r' +$', re.M).sub("", text)
|
||||
return text
|
||||
|
||||
|
||||
def _render_migration_script(autogen_context, migration_script, template_args):
|
||||
opts = autogen_context['opts']
|
||||
imports = autogen_context['imports']
|
||||
template_args[opts['upgrade_token']] = _indent(_render_cmd_body(
|
||||
migration_script.upgrade_ops, autogen_context))
|
||||
template_args[opts['downgrade_token']] = _indent(_render_cmd_body(
|
||||
migration_script.downgrade_ops, autogen_context))
|
||||
template_args['imports'] = "\n".join(sorted(imports))
|
||||
|
||||
|
||||
default_renderers = renderers = util.Dispatcher()
|
||||
|
||||
|
||||
def _render_cmd_body(op_container, autogen_context):
|
||||
|
||||
buf = StringIO()
|
||||
printer = PythonPrinter(buf)
|
||||
|
||||
printer.writeline(
|
||||
"### commands auto generated by Alembic - "
|
||||
"please adjust! ###"
|
||||
)
|
||||
|
||||
if not op_container.ops:
|
||||
printer.writeline("pass")
|
||||
else:
|
||||
for op in op_container.ops:
|
||||
lines = render_op(autogen_context, op)
|
||||
|
||||
for line in lines:
|
||||
printer.writeline(line)
|
||||
|
||||
printer.writeline("### end Alembic commands ###")
|
||||
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
def render_op(autogen_context, op):
|
||||
renderer = renderers.dispatch(op)
|
||||
lines = util.to_list(renderer(autogen_context, op))
|
||||
return lines
|
||||
|
||||
|
||||
def render_op_text(autogen_context, op):
|
||||
return "\n".join(render_op(autogen_context, op))
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.ModifyTableOps)
|
||||
def _render_modify_table(autogen_context, op):
|
||||
opts = autogen_context['opts']
|
||||
render_as_batch = opts.get('render_as_batch', False)
|
||||
|
||||
if op.ops:
|
||||
lines = []
|
||||
if render_as_batch:
|
||||
lines.append(
|
||||
"with op.batch_alter_table(%r, schema=%r) as batch_op:"
|
||||
% (op.table_name, op.schema)
|
||||
)
|
||||
autogen_context['batch_prefix'] = 'batch_op.'
|
||||
for t_op in op.ops:
|
||||
t_lines = render_op(autogen_context, t_op)
|
||||
lines.extend(t_lines)
|
||||
if render_as_batch:
|
||||
del autogen_context['batch_prefix']
|
||||
lines.append("")
|
||||
return lines
|
||||
else:
|
||||
return [
|
||||
"pass"
|
||||
]
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreateTableOp)
|
||||
def _add_table(autogen_context, op):
|
||||
table = op.to_table()
|
||||
|
||||
args = [col for col in
|
||||
[_render_column(col, autogen_context) for col in table.columns]
|
||||
if col] + \
|
||||
sorted([rcons for rcons in
|
||||
[_render_constraint(cons, autogen_context) for cons in
|
||||
table.constraints]
|
||||
if rcons is not None
|
||||
])
|
||||
|
||||
if len(args) > MAX_PYTHON_ARGS:
|
||||
args = '*[' + ',\n'.join(args) + ']'
|
||||
else:
|
||||
args = ',\n'.join(args)
|
||||
|
||||
text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
|
||||
'tablename': _ident(op.table_name),
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': args,
|
||||
}
|
||||
if op.schema:
|
||||
text += ",\nschema=%r" % _ident(op.schema)
|
||||
for k in sorted(op.kw):
|
||||
text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k])
|
||||
text += "\n)"
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.DropTableOp)
|
||||
def _drop_table(autogen_context, op):
|
||||
text = "%(prefix)sdrop_table(%(tname)r" % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": _ident(op.table_name)
|
||||
}
|
||||
if op.schema:
|
||||
text += ", schema=%r" % _ident(op.schema)
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreateIndexOp)
|
||||
def _add_index(autogen_context, op):
|
||||
index = op.to_index()
|
||||
|
||||
has_batch = 'batch_prefix' in autogen_context
|
||||
|
||||
if has_batch:
|
||||
tmpl = "%(prefix)screate_index(%(name)r, [%(columns)s], "\
|
||||
"unique=%(unique)r%(kwargs)s)"
|
||||
else:
|
||||
tmpl = "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], "\
|
||||
"unique=%(unique)r%(schema)s%(kwargs)s)"
|
||||
|
||||
text = tmpl % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, index.name),
|
||||
'table': _ident(index.table.name),
|
||||
'columns': ", ".join(
|
||||
_get_index_rendered_expressions(index, autogen_context)),
|
||||
'unique': index.unique or False,
|
||||
'schema': (", schema=%r" % _ident(index.table.schema))
|
||||
if index.table.schema else '',
|
||||
'kwargs': (
|
||||
', ' +
|
||||
', '.join(
|
||||
["%s=%s" %
|
||||
(key, _render_potential_expr(val, autogen_context))
|
||||
for key, val in index.kwargs.items()]))
|
||||
if len(index.kwargs) else ''
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.DropIndexOp)
|
||||
def _drop_index(autogen_context, op):
|
||||
has_batch = 'batch_prefix' in autogen_context
|
||||
|
||||
if has_batch:
|
||||
tmpl = "%(prefix)sdrop_index(%(name)r)"
|
||||
else:
|
||||
tmpl = "%(prefix)sdrop_index(%(name)r, "\
|
||||
"table_name=%(table_name)r%(schema)s)"
|
||||
|
||||
text = tmpl % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, op.index_name),
|
||||
'table_name': _ident(op.table_name),
|
||||
'schema': ((", schema=%r" % _ident(op.schema))
|
||||
if op.schema else '')
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreateUniqueConstraintOp)
|
||||
def _add_unique_constraint(autogen_context, op):
|
||||
return [_uq_constraint(op.to_constraint(), autogen_context, True)]
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreateForeignKeyOp)
|
||||
def _add_fk_constraint(autogen_context, op):
|
||||
|
||||
args = [
|
||||
repr(
|
||||
_render_gen_name(autogen_context, op.constraint_name)),
|
||||
repr(_ident(op.source_table)),
|
||||
repr(_ident(op.referent_table)),
|
||||
repr([_ident(col) for col in op.local_cols]),
|
||||
repr([_ident(col) for col in op.remote_cols])
|
||||
]
|
||||
|
||||
for k in (
|
||||
'source_schema', 'referent_schema',
|
||||
'onupdate', 'ondelete', 'initially', 'deferrable', 'use_alter'
|
||||
):
|
||||
if k in op.kw:
|
||||
value = op.kw[k]
|
||||
if value is not None:
|
||||
args.append("%s=%r" % (k, value))
|
||||
|
||||
return "%(prefix)screate_foreign_key(%(args)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': ", ".join(args)
|
||||
}
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreatePrimaryKeyOp)
|
||||
def _add_pk_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.CreateCheckConstraintOp)
|
||||
def _add_check_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.DropConstraintOp)
|
||||
def _drop_constraint(autogen_context, op):
|
||||
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sdrop_constraint"\
|
||||
"(%(name)r, type_=%(type)r)"
|
||||
else:
|
||||
template = "%(prefix)sdrop_constraint"\
|
||||
"(%(name)r, '%(table_name)s'%(schema)s, type_=%(type)r)"
|
||||
|
||||
text = template % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(
|
||||
autogen_context, op.constraint_name),
|
||||
'table_name': _ident(op.table_name),
|
||||
'type': op.constraint_type,
|
||||
'schema': (", schema='%s'" % _ident(op.schema))
|
||||
if op.schema else '',
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.AddColumnOp)
|
||||
def _add_column(autogen_context, op):
|
||||
|
||||
schema, tname, column = op.schema, op.table_name, op.column
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sadd_column(%(column)s)"
|
||||
else:
|
||||
template = "%(prefix)sadd_column(%(tname)r, %(column)s"
|
||||
if schema:
|
||||
template += ", schema=%(schema)r"
|
||||
template += ")"
|
||||
text = template % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": tname,
|
||||
"column": _render_column(column, autogen_context),
|
||||
"schema": schema
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.DropColumnOp)
|
||||
def _drop_column(autogen_context, op):
|
||||
|
||||
schema, tname, column_name = op.schema, op.table_name, op.column_name
|
||||
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sdrop_column(%(cname)r)"
|
||||
else:
|
||||
template = "%(prefix)sdrop_column(%(tname)r, %(cname)r"
|
||||
if schema:
|
||||
template += ", schema=%(schema)r"
|
||||
template += ")"
|
||||
|
||||
text = template % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": _ident(tname),
|
||||
"cname": _ident(column_name),
|
||||
"schema": _ident(schema)
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
@renderers.dispatch_for(ops.AlterColumnOp)
|
||||
def _alter_column(autogen_context, op):
|
||||
|
||||
tname = op.table_name
|
||||
cname = op.column_name
|
||||
server_default = op.modify_server_default
|
||||
type_ = op.modify_type
|
||||
nullable = op.modify_nullable
|
||||
existing_type = op.existing_type
|
||||
existing_nullable = op.existing_nullable
|
||||
existing_server_default = op.existing_server_default
|
||||
schema = op.schema
|
||||
|
||||
indent = " " * 11
|
||||
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)salter_column(%(cname)r"
|
||||
else:
|
||||
template = "%(prefix)salter_column(%(tname)r, %(cname)r"
|
||||
|
||||
text = template % {
|
||||
'prefix': _alembic_autogenerate_prefix(
|
||||
autogen_context),
|
||||
'tname': tname,
|
||||
'cname': cname}
|
||||
text += ",\n%sexisting_type=%s" % (
|
||||
indent,
|
||||
_repr_type(existing_type, autogen_context))
|
||||
if server_default is not False:
|
||||
rendered = _render_server_default(
|
||||
server_default, autogen_context)
|
||||
text += ",\n%sserver_default=%s" % (indent, rendered)
|
||||
|
||||
if type_ is not None:
|
||||
text += ",\n%stype_=%s" % (indent,
|
||||
_repr_type(type_, autogen_context))
|
||||
if nullable is not None:
|
||||
text += ",\n%snullable=%r" % (
|
||||
indent, nullable,)
|
||||
if existing_nullable is not None:
|
||||
text += ",\n%sexisting_nullable=%r" % (
|
||||
indent, existing_nullable)
|
||||
if existing_server_default:
|
||||
rendered = _render_server_default(
|
||||
existing_server_default,
|
||||
autogen_context)
|
||||
text += ",\n%sexisting_server_default=%s" % (
|
||||
indent, rendered)
|
||||
if schema and "batch_prefix" not in autogen_context:
|
||||
text += ",\n%sschema=%r" % (indent, schema)
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
|
||||
class _f_name(object):
|
||||
|
||||
def __init__(self, prefix, name):
|
||||
@ -82,45 +418,6 @@ def _render_potential_expr(value, autogen_context, wrap_in_text=True):
|
||||
return repr(value)
|
||||
|
||||
|
||||
def _add_table(table, autogen_context):
|
||||
args = [col for col in
|
||||
[_render_column(col, autogen_context) for col in table.c]
|
||||
if col] + \
|
||||
sorted([rcons for rcons in
|
||||
[_render_constraint(cons, autogen_context) for cons in
|
||||
table.constraints]
|
||||
if rcons is not None
|
||||
])
|
||||
|
||||
if len(args) > MAX_PYTHON_ARGS:
|
||||
args = '*[' + ',\n'.join(args) + ']'
|
||||
else:
|
||||
args = ',\n'.join(args)
|
||||
|
||||
text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
|
||||
'tablename': _ident(table.name),
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': args,
|
||||
}
|
||||
if table.schema:
|
||||
text += ",\nschema=%r" % _ident(table.schema)
|
||||
for k in sorted(table.kwargs):
|
||||
text += ",\n%s=%r" % (k.replace(" ", "_"), table.kwargs[k])
|
||||
text += "\n)"
|
||||
return text
|
||||
|
||||
|
||||
def _drop_table(table, autogen_context):
|
||||
text = "%(prefix)sdrop_table(%(tname)r" % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": _ident(table.name)
|
||||
}
|
||||
if table.schema:
|
||||
text += ", schema=%r" % _ident(table.schema)
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
|
||||
def _get_index_rendered_expressions(idx, autogen_context):
|
||||
if compat.sqla_08:
|
||||
return [repr(_ident(getattr(exp, "name", None)))
|
||||
@ -132,80 +429,6 @@ def _get_index_rendered_expressions(idx, autogen_context):
|
||||
repr(_ident(getattr(col, "name", None))) for col in idx.columns]
|
||||
|
||||
|
||||
def _add_index(index, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the CREATE INDEX of an
|
||||
:class:`~sqlalchemy.schema.Index` instance.
|
||||
"""
|
||||
|
||||
has_batch = 'batch_prefix' in autogen_context
|
||||
|
||||
if has_batch:
|
||||
tmpl = "%(prefix)screate_index(%(name)r, [%(columns)s], "\
|
||||
"unique=%(unique)r%(kwargs)s)"
|
||||
else:
|
||||
tmpl = "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], "\
|
||||
"unique=%(unique)r%(schema)s%(kwargs)s)"
|
||||
|
||||
text = tmpl % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, index.name),
|
||||
'table': _ident(index.table.name),
|
||||
'columns': ", ".join(
|
||||
_get_index_rendered_expressions(index, autogen_context)),
|
||||
'unique': index.unique or False,
|
||||
'schema': (", schema=%r" % _ident(index.table.schema))
|
||||
if index.table.schema else '',
|
||||
'kwargs': (
|
||||
', ' +
|
||||
', '.join(
|
||||
["%s=%s" %
|
||||
(key, _render_potential_expr(val, autogen_context))
|
||||
for key, val in index.kwargs.items()]))
|
||||
if len(index.kwargs) else ''
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _drop_index(index, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the DROP INDEX of an
|
||||
:class:`~sqlalchemy.schema.Index` instance.
|
||||
"""
|
||||
has_batch = 'batch_prefix' in autogen_context
|
||||
|
||||
if has_batch:
|
||||
tmpl = "%(prefix)sdrop_index(%(name)r)"
|
||||
else:
|
||||
tmpl = "%(prefix)sdrop_index(%(name)r, "\
|
||||
"table_name=%(table_name)r%(schema)s)"
|
||||
|
||||
text = tmpl % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, index.name),
|
||||
'table_name': _ident(index.table.name),
|
||||
'schema': ((", schema=%r" % _ident(index.table.schema))
|
||||
if index.table.schema else '')
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _render_unique_constraint(constraint, autogen_context):
|
||||
rendered = _user_defined_render("unique", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
return _uq_constraint(constraint, autogen_context, False)
|
||||
|
||||
|
||||
def _add_unique_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the ALTER TABLE .. ADD CONSTRAINT ...
|
||||
UNIQUE of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
|
||||
"""
|
||||
return _uq_constraint(constraint, autogen_context, True)
|
||||
|
||||
|
||||
def _uq_constraint(constraint, autogen_context, alter):
|
||||
opts = []
|
||||
|
||||
@ -224,7 +447,8 @@ def _uq_constraint(constraint, autogen_context, alter):
|
||||
|
||||
if alter:
|
||||
args = [
|
||||
repr(_render_gen_name(autogen_context, constraint.name))]
|
||||
repr(_render_gen_name(
|
||||
autogen_context, constraint.name))]
|
||||
if not has_batch:
|
||||
args += [repr(_ident(constraint.table.name))]
|
||||
args.append(repr([_ident(col.name) for col in constraint.columns]))
|
||||
@ -242,177 +466,6 @@ def _uq_constraint(constraint, autogen_context, alter):
|
||||
}
|
||||
|
||||
|
||||
def _add_fk_constraint(constraint, autogen_context):
|
||||
source_schema, source_table, \
|
||||
source_columns, target_schema, \
|
||||
target_table, target_columns = _fk_spec(constraint)
|
||||
|
||||
args = [
|
||||
repr(_render_gen_name(autogen_context, constraint.name)),
|
||||
repr(_ident(source_table)),
|
||||
repr(_ident(target_table)),
|
||||
repr([_ident(col) for col in source_columns]),
|
||||
repr([_ident(col) for col in target_columns])
|
||||
]
|
||||
if source_schema:
|
||||
args.append(
|
||||
"%s=%r" % ('source_schema', source_schema),
|
||||
)
|
||||
if target_schema:
|
||||
args.append(
|
||||
"%s=%r" % ('referent_schema', target_schema)
|
||||
)
|
||||
|
||||
opts = []
|
||||
_populate_render_fk_opts(constraint, opts)
|
||||
args.extend(("%s=%s" % (k, v) for (k, v) in opts))
|
||||
|
||||
return "%(prefix)screate_foreign_key(%(args)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': ", ".join(args)
|
||||
}
|
||||
|
||||
|
||||
def _add_pk_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _add_check_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _add_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Dispatcher for the different types of constraints.
|
||||
"""
|
||||
funcs = {
|
||||
"unique_constraint": _add_unique_constraint,
|
||||
"foreign_key_constraint": _add_fk_constraint,
|
||||
"primary_key_constraint": _add_pk_constraint,
|
||||
"check_constraint": _add_check_constraint,
|
||||
"column_check_constraint": _add_check_constraint,
|
||||
}
|
||||
return funcs[constraint.__visit_name__](constraint, autogen_context)
|
||||
|
||||
|
||||
def _drop_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the ALTER TABLE ... DROP CONSTRAINT
|
||||
of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
|
||||
"""
|
||||
|
||||
types = {
|
||||
"unique_constraint": "unique",
|
||||
"foreign_key_constraint": "foreignkey",
|
||||
"primary_key_constraint": "primary",
|
||||
"check_constraint": "check",
|
||||
"column_check_constraint": "check",
|
||||
}
|
||||
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sdrop_constraint"\
|
||||
"(%(name)r, type_=%(type)r)"
|
||||
else:
|
||||
template = "%(prefix)sdrop_constraint"\
|
||||
"(%(name)r, '%(table_name)s'%(schema)s, type_=%(type)r)"
|
||||
|
||||
constraint_table = _table_for_constraint(constraint)
|
||||
text = template % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, constraint.name),
|
||||
'table_name': _ident(constraint_table.name),
|
||||
'type': types[constraint.__visit_name__],
|
||||
'schema': (", schema='%s'" % _ident(constraint_table.schema))
|
||||
if constraint_table.schema else '',
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _add_column(schema, tname, column, autogen_context):
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sadd_column(%(column)s)"
|
||||
else:
|
||||
template = "%(prefix)sadd_column(%(tname)r, %(column)s"
|
||||
if schema:
|
||||
template += ", schema=%(schema)r"
|
||||
template += ")"
|
||||
text = template % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": tname,
|
||||
"column": _render_column(column, autogen_context),
|
||||
"schema": schema
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _drop_column(schema, tname, column, autogen_context):
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)sdrop_column(%(cname)r)"
|
||||
else:
|
||||
template = "%(prefix)sdrop_column(%(tname)r, %(cname)r"
|
||||
if schema:
|
||||
template += ", schema=%(schema)r"
|
||||
template += ")"
|
||||
|
||||
text = template % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": _ident(tname),
|
||||
"cname": _ident(column.name),
|
||||
"schema": _ident(schema)
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _modify_col(tname, cname,
|
||||
autogen_context,
|
||||
server_default=False,
|
||||
type_=None,
|
||||
nullable=None,
|
||||
existing_type=None,
|
||||
existing_nullable=None,
|
||||
existing_server_default=False,
|
||||
schema=None):
|
||||
indent = " " * 11
|
||||
|
||||
if 'batch_prefix' in autogen_context:
|
||||
template = "%(prefix)salter_column(%(cname)r"
|
||||
else:
|
||||
template = "%(prefix)salter_column(%(tname)r, %(cname)r"
|
||||
|
||||
text = template % {
|
||||
'prefix': _alembic_autogenerate_prefix(
|
||||
autogen_context),
|
||||
'tname': tname,
|
||||
'cname': cname}
|
||||
text += ",\n%sexisting_type=%s" % (
|
||||
indent,
|
||||
_repr_type(existing_type, autogen_context))
|
||||
if server_default is not False:
|
||||
rendered = _render_server_default(
|
||||
server_default, autogen_context)
|
||||
text += ",\n%sserver_default=%s" % (indent, rendered)
|
||||
|
||||
if type_ is not None:
|
||||
text += ",\n%stype_=%s" % (indent,
|
||||
_repr_type(type_, autogen_context))
|
||||
if nullable is not None:
|
||||
text += ",\n%snullable=%r" % (
|
||||
indent, nullable,)
|
||||
if existing_nullable is not None:
|
||||
text += ",\n%sexisting_nullable=%r" % (
|
||||
indent, existing_nullable)
|
||||
if existing_server_default:
|
||||
rendered = _render_server_default(
|
||||
existing_server_default,
|
||||
autogen_context)
|
||||
text += ",\n%sexisting_server_default=%s" % (
|
||||
indent, rendered)
|
||||
if schema and "batch_prefix" not in autogen_context:
|
||||
text += ",\n%sschema=%r" % (indent, schema)
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
|
||||
def _user_autogenerate_prefix(autogen_context, target):
|
||||
prefix = autogen_context['opts']['user_module_prefix']
|
||||
if prefix is None:
|
||||
@ -508,14 +561,15 @@ def _repr_type(type_, autogen_context):
|
||||
return "%s%r" % (prefix, type_)
|
||||
|
||||
|
||||
_constraint_renderers = util.Dispatcher()
|
||||
|
||||
|
||||
def _render_constraint(constraint, autogen_context):
|
||||
renderer = _constraint_renderers.get(type(constraint), None)
|
||||
if renderer:
|
||||
renderer = _constraint_renderers.dispatch(constraint)
|
||||
return renderer(constraint, autogen_context)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint)
|
||||
def _render_primary_key(constraint, autogen_context):
|
||||
rendered = _user_defined_render("primary_key", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
@ -555,7 +609,8 @@ def _fk_colspec(fk, metadata_schema):
|
||||
# try to resolve the remote table and adjust for column.key
|
||||
parent_metadata = fk.parent.table.metadata
|
||||
if table_fullname in parent_metadata.tables:
|
||||
colname = _ident(parent_metadata.tables[table_fullname].c[colname].name)
|
||||
colname = _ident(
|
||||
parent_metadata.tables[table_fullname].c[colname].name)
|
||||
|
||||
colspec = "%s.%s" % (table_fullname, colname)
|
||||
|
||||
@ -576,6 +631,7 @@ def _populate_render_fk_opts(constraint, opts):
|
||||
opts.append(("use_alter", repr(constraint.use_alter)))
|
||||
|
||||
|
||||
@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint)
|
||||
def _render_foreign_key(constraint, autogen_context):
|
||||
rendered = _user_defined_render("foreign_key", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
@ -602,6 +658,16 @@ def _render_foreign_key(constraint, autogen_context):
|
||||
}
|
||||
|
||||
|
||||
@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint)
|
||||
def _render_unique_constraint(constraint, autogen_context):
|
||||
rendered = _user_defined_render("unique", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
return _uq_constraint(constraint, autogen_context, False)
|
||||
|
||||
|
||||
@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint)
|
||||
def _render_check_constraint(constraint, autogen_context):
|
||||
rendered = _user_defined_render("check", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
@ -622,7 +688,8 @@ def _render_check_constraint(constraint, autogen_context):
|
||||
(
|
||||
"name",
|
||||
repr(
|
||||
_render_gen_name(autogen_context, constraint.name))
|
||||
_render_gen_name(
|
||||
autogen_context, constraint.name))
|
||||
)
|
||||
)
|
||||
return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % {
|
||||
@ -633,9 +700,5 @@ def _render_check_constraint(constraint, autogen_context):
|
||||
constraint.sqltext, autogen_context, wrap_in_text=False)
|
||||
}
|
||||
|
||||
_constraint_renderers = {
|
||||
sa_schema.PrimaryKeyConstraint: _render_primary_key,
|
||||
sa_schema.ForeignKeyConstraint: _render_foreign_key,
|
||||
sa_schema.UniqueConstraint: _render_unique_constraint,
|
||||
sa_schema.CheckConstraint: _render_check_constraint
|
||||
}
|
||||
|
||||
renderers = default_renderers.branch()
|
||||
|
@ -1,8 +1,9 @@
|
||||
import os
|
||||
|
||||
from .script import ScriptDirectory
|
||||
from .environment import EnvironmentContext
|
||||
from . import util, autogenerate as autogen
|
||||
from .runtime.environment import EnvironmentContext
|
||||
from . import util
|
||||
from . import autogenerate as autogen
|
||||
|
||||
|
||||
def list_templates(config):
|
||||
@ -70,12 +71,16 @@ def revision(
|
||||
version_path=None, rev_id=None):
|
||||
"""Create a new revision file."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
template_args = {
|
||||
'config': config # Let templates use config for
|
||||
# e.g. multiple databases
|
||||
}
|
||||
imports = set()
|
||||
script_directory = ScriptDirectory.from_config(config)
|
||||
|
||||
command_args = dict(
|
||||
message=message,
|
||||
autogenerate=autogenerate,
|
||||
sql=sql, head=head, splice=splice, branch_label=branch_label,
|
||||
version_path=version_path, rev_id=rev_id
|
||||
)
|
||||
revision_context = autogen.RevisionContext(
|
||||
config, script_directory, command_args)
|
||||
|
||||
environment = util.asbool(
|
||||
config.get_main_option("revision_environment")
|
||||
@ -89,13 +94,11 @@ def revision(
|
||||
"Using --sql with --autogenerate does not make any sense")
|
||||
|
||||
def retrieve_migrations(rev, context):
|
||||
if set(script.get_revisions(rev)) != \
|
||||
set(script.get_revisions("heads")):
|
||||
raise util.CommandError("Target database is not up to date.")
|
||||
autogen._produce_migration_diffs(context, template_args, imports)
|
||||
revision_context.run_autogenerate(rev, context)
|
||||
return []
|
||||
elif environment:
|
||||
def retrieve_migrations(rev, context):
|
||||
revision_context.run_no_autogenerate(rev, context)
|
||||
return []
|
||||
elif sql:
|
||||
raise util.CommandError(
|
||||
@ -105,16 +108,22 @@ def revision(
|
||||
if environment:
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
script_directory,
|
||||
fn=retrieve_migrations,
|
||||
as_sql=sql,
|
||||
template_args=template_args,
|
||||
template_args=revision_context.template_args,
|
||||
revision_context=revision_context
|
||||
):
|
||||
script.run_env()
|
||||
return script.generate_revision(
|
||||
rev_id or util.rev_id(), message, refresh=True,
|
||||
head=head, splice=splice, branch_labels=branch_label,
|
||||
version_path=version_path, **template_args)
|
||||
script_directory.run_env()
|
||||
|
||||
scripts = [
|
||||
script for script in
|
||||
revision_context.generate_scripts()
|
||||
]
|
||||
if len(scripts) == 1:
|
||||
return scripts[0]
|
||||
else:
|
||||
return scripts
|
||||
|
||||
|
||||
def merge(config, revisions, message=None, branch_label=None, rev_id=None):
|
||||
|
@ -1,10 +1,13 @@
|
||||
from argparse import ArgumentParser
|
||||
from .compat import SafeConfigParser
|
||||
from .util.compat import SafeConfigParser
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import command, util, package_dir, compat
|
||||
from . import command
|
||||
from . import util
|
||||
from . import package_dir
|
||||
from .util import compat
|
||||
|
||||
|
||||
class Config(object):
|
||||
@ -127,7 +130,7 @@ class Config(object):
|
||||
This is a utility dictionary which can include not just strings but
|
||||
engines, connections, schema objects, or anything else.
|
||||
Use this to pass objects into an env.py script, such as passing
|
||||
a :class:`.Connection` when calling
|
||||
a :class:`sqlalchemy.engine.base.Connection` when calling
|
||||
commands from :mod:`alembic.command` programmatically.
|
||||
|
||||
.. versionadded:: 0.7.5
|
||||
@ -152,7 +155,7 @@ class Config(object):
|
||||
|
||||
@util.memoized_property
|
||||
def file_config(self):
|
||||
"""Return the underlying :class:`ConfigParser` object.
|
||||
"""Return the underlying ``ConfigParser`` object.
|
||||
|
||||
Direct access to the .ini file is available here,
|
||||
though the :meth:`.Config.get_section` and
|
||||
|
@ -1,6 +1,5 @@
|
||||
from .environment import EnvironmentContext
|
||||
from . import util
|
||||
from .runtime.environment import EnvironmentContext
|
||||
|
||||
# create proxy functions for
|
||||
# each method on the EnvironmentContext class.
|
||||
util.create_module_class_proxy(EnvironmentContext, globals(), locals())
|
||||
EnvironmentContext.create_module_class_proxy(globals(), locals())
|
||||
|
@ -1,13 +1,16 @@
|
||||
import functools
|
||||
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.schema import DDLElement, Column, \
|
||||
ForeignKeyConstraint, CheckConstraint
|
||||
from sqlalchemy.schema import DDLElement, Column
|
||||
from sqlalchemy import Integer
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.sql.visitors import traverse
|
||||
from .. import util
|
||||
|
||||
# backwards compat
|
||||
from ..util.sqla_compat import ( # noqa
|
||||
_table_for_constraint,
|
||||
_columns_for_constraint, _fk_spec, _is_type_bound, _find_columns)
|
||||
|
||||
if util.sqla_09:
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
|
||||
@ -154,65 +157,6 @@ def visit_column_default(element, compiler, **kw):
|
||||
)
|
||||
|
||||
|
||||
def _table_for_constraint(constraint):
|
||||
if isinstance(constraint, ForeignKeyConstraint):
|
||||
return constraint.parent
|
||||
else:
|
||||
return constraint.table
|
||||
|
||||
|
||||
def _columns_for_constraint(constraint):
|
||||
if isinstance(constraint, ForeignKeyConstraint):
|
||||
return [fk.parent for fk in constraint.elements]
|
||||
elif isinstance(constraint, CheckConstraint):
|
||||
return _find_columns(constraint.sqltext)
|
||||
else:
|
||||
return list(constraint.columns)
|
||||
|
||||
|
||||
def _fk_spec(constraint):
|
||||
if util.sqla_100:
|
||||
source_columns = [
|
||||
constraint.columns[key].name for key in constraint.column_keys]
|
||||
else:
|
||||
source_columns = [
|
||||
element.parent.name for element in constraint.elements]
|
||||
|
||||
source_table = constraint.parent.name
|
||||
source_schema = constraint.parent.schema
|
||||
target_schema = constraint.elements[0].column.table.schema
|
||||
target_table = constraint.elements[0].column.table.name
|
||||
target_columns = [element.column.name for element in constraint.elements]
|
||||
|
||||
return (
|
||||
source_schema, source_table,
|
||||
source_columns, target_schema, target_table, target_columns)
|
||||
|
||||
|
||||
def _is_type_bound(constraint):
|
||||
# this deals with SQLAlchemy #3260, don't copy CHECK constraints
|
||||
# that will be generated by the type.
|
||||
if util.sqla_100:
|
||||
# new feature added for #3260
|
||||
return constraint._type_bound
|
||||
else:
|
||||
# old way, look at what we know Boolean/Enum to use
|
||||
return (
|
||||
constraint._create_rule is not None and
|
||||
isinstance(
|
||||
getattr(constraint._create_rule, "target", None),
|
||||
sqltypes.SchemaType)
|
||||
)
|
||||
|
||||
|
||||
def _find_columns(clause):
|
||||
"""locate Column objects within the given expression."""
|
||||
|
||||
cols = set()
|
||||
traverse(clause, {}, {'column': cols.add})
|
||||
return cols
|
||||
|
||||
|
||||
def quote_dotted(name, quote):
|
||||
"""quote the elements of a dotted name"""
|
||||
|
||||
|
@ -1,17 +1,13 @@
|
||||
from sqlalchemy.sql.expression import _BindParamClause
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy import schema, text, sql
|
||||
from sqlalchemy import schema, text
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
from ..compat import string_types, text_type, with_metaclass
|
||||
from ..util.compat import (
|
||||
string_types, text_type, with_metaclass
|
||||
)
|
||||
from ..util import sqla_compat
|
||||
from .. import util
|
||||
from . import base
|
||||
|
||||
if util.sqla_08:
|
||||
from sqlalchemy.sql.expression import TextClause
|
||||
else:
|
||||
from sqlalchemy.sql.expression import _TextClause as TextClause
|
||||
|
||||
|
||||
class ImplMeta(type):
|
||||
|
||||
@ -221,8 +217,10 @@ class DefaultImpl(with_metaclass(ImplMeta)):
|
||||
for row in rows:
|
||||
self._exec(table.insert(inline=True).values(**dict(
|
||||
(k,
|
||||
_literal_bindparam(k, v, type_=table.c[k].type)
|
||||
if not isinstance(v, _literal_bindparam) else v)
|
||||
sqla_compat._literal_bindparam(
|
||||
k, v, type_=table.c[k].type)
|
||||
if not isinstance(
|
||||
v, sqla_compat._literal_bindparam) else v)
|
||||
for k, v in row.items()
|
||||
)))
|
||||
else:
|
||||
@ -320,61 +318,6 @@ class DefaultImpl(with_metaclass(ImplMeta)):
|
||||
self.static_output("COMMIT" + self.command_terminator)
|
||||
|
||||
|
||||
class _literal_bindparam(_BindParamClause):
|
||||
pass
|
||||
|
||||
|
||||
@compiles(_literal_bindparam)
|
||||
def _render_literal_bindparam(element, compiler, **kw):
|
||||
return compiler.render_literal_bindparam(element, **kw)
|
||||
|
||||
|
||||
def _textual_index_column(table, text_):
|
||||
"""a workaround for the Index construct's severe lack of flexibility"""
|
||||
if isinstance(text_, string_types):
|
||||
c = schema.Column(text_, sqltypes.NULLTYPE)
|
||||
table.append_column(c)
|
||||
return c
|
||||
elif isinstance(text_, TextClause):
|
||||
return _textual_index_element(table, text_)
|
||||
else:
|
||||
raise ValueError("String or text() construct expected")
|
||||
|
||||
|
||||
class _textual_index_element(sql.ColumnElement):
|
||||
"""Wrap around a sqlalchemy text() construct in such a way that
|
||||
we appear like a column-oriented SQL expression to an Index
|
||||
construct.
|
||||
|
||||
The issue here is that currently the Postgresql dialect, the biggest
|
||||
recipient of functional indexes, keys all the index expressions to
|
||||
the corresponding column expressions when rendering CREATE INDEX,
|
||||
so the Index we create here needs to have a .columns collection that
|
||||
is the same length as the .expressions collection. Ultimately
|
||||
SQLAlchemy should support text() expressions in indexes.
|
||||
|
||||
See https://bitbucket.org/zzzeek/sqlalchemy/issue/3174/\
|
||||
support-text-sent-to-indexes
|
||||
|
||||
"""
|
||||
__visit_name__ = '_textual_idx_element'
|
||||
|
||||
def __init__(self, table, text):
|
||||
self.table = table
|
||||
self.text = text
|
||||
self.key = text.text
|
||||
self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE)
|
||||
table.append_column(self.fake_column)
|
||||
|
||||
def get_children(self):
|
||||
return [self.fake_column]
|
||||
|
||||
|
||||
@compiles(_textual_index_element)
|
||||
def _render_textual_index_column(element, compiler, **kw):
|
||||
return compiler.process(element.text, **kw)
|
||||
|
||||
|
||||
def _string_compare(t1, t2):
|
||||
return \
|
||||
t1.length is not None and \
|
||||
|
@ -39,11 +39,10 @@ class MSSQLImpl(DefaultImpl):
|
||||
name=None,
|
||||
type_=None,
|
||||
schema=None,
|
||||
autoincrement=None,
|
||||
existing_type=None,
|
||||
existing_server_default=None,
|
||||
existing_nullable=None,
|
||||
existing_autoincrement=None
|
||||
**kw
|
||||
):
|
||||
|
||||
if nullable is not None and existing_type is None:
|
||||
@ -63,10 +62,9 @@ class MSSQLImpl(DefaultImpl):
|
||||
nullable=nullable,
|
||||
type_=type_,
|
||||
schema=schema,
|
||||
autoincrement=autoincrement,
|
||||
existing_type=existing_type,
|
||||
existing_nullable=existing_nullable,
|
||||
existing_autoincrement=existing_autoincrement
|
||||
**kw
|
||||
)
|
||||
|
||||
if server_default is not False:
|
||||
|
@ -2,7 +2,7 @@ from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import schema
|
||||
|
||||
from ..compat import string_types
|
||||
from ..util.compat import string_types
|
||||
from .. import util
|
||||
from .impl import DefaultImpl
|
||||
from .base import ColumnNullable, ColumnName, ColumnDefault, \
|
||||
@ -23,11 +23,12 @@ class MySQLImpl(DefaultImpl):
|
||||
name=None,
|
||||
type_=None,
|
||||
schema=None,
|
||||
autoincrement=None,
|
||||
existing_type=None,
|
||||
existing_server_default=None,
|
||||
existing_nullable=None,
|
||||
existing_autoincrement=None
|
||||
autoincrement=None,
|
||||
existing_autoincrement=None,
|
||||
**kw
|
||||
):
|
||||
if name is not None:
|
||||
self._exec(
|
||||
@ -284,3 +285,5 @@ def _mysql_drop_constraint(element, compiler, **kw):
|
||||
raise NotImplementedError(
|
||||
"No generic 'DROP CONSTRAINT' in MySQL - "
|
||||
"please specify constraint type")
|
||||
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import re
|
||||
|
||||
from .. import compat
|
||||
from ..util import compat
|
||||
from .. import util
|
||||
from .base import compiles, alter_table, format_table_name, RenameTable
|
||||
from .impl import DefaultImpl
|
||||
|
@ -1,6 +1,6 @@
|
||||
from .operations import Operations
|
||||
from . import util
|
||||
from .operations.base import Operations
|
||||
|
||||
# create proxy functions for
|
||||
# each method on the Operations class.
|
||||
util.create_module_class_proxy(Operations, globals(), locals())
|
||||
Operations.create_module_class_proxy(globals(), locals())
|
||||
|
||||
|
6
alembic/operations/__init__.py
Normal file
6
alembic/operations/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from .base import Operations, BatchOperations
|
||||
from .ops import MigrateOperation
|
||||
from . import toimpl
|
||||
|
||||
|
||||
__all__ = ['Operations', 'BatchOperations', 'MigrateOperation']
|
442
alembic/operations/base.py
Normal file
442
alembic/operations/base.py
Normal file
@ -0,0 +1,442 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .. import util
|
||||
from ..util import sqla_compat
|
||||
from . import batch
|
||||
from . import schemaobj
|
||||
from ..util.compat import exec_
|
||||
import textwrap
|
||||
import inspect
|
||||
|
||||
__all__ = ('Operations', 'BatchOperations')
|
||||
|
||||
try:
|
||||
from sqlalchemy.sql.naming import conv
|
||||
except:
|
||||
conv = None
|
||||
|
||||
|
||||
class Operations(util.ModuleClsProxy):
|
||||
|
||||
"""Define high level migration operations.
|
||||
|
||||
Each operation corresponds to some schema migration operation,
|
||||
executed against a particular :class:`.MigrationContext`
|
||||
which in turn represents connectivity to a database,
|
||||
or a file output stream.
|
||||
|
||||
While :class:`.Operations` is normally configured as
|
||||
part of the :meth:`.EnvironmentContext.run_migrations`
|
||||
method called from an ``env.py`` script, a standalone
|
||||
:class:`.Operations` instance can be
|
||||
made for use cases external to regular Alembic
|
||||
migrations by passing in a :class:`.MigrationContext`::
|
||||
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.operations import Operations
|
||||
|
||||
conn = myengine.connect()
|
||||
ctx = MigrationContext.configure(conn)
|
||||
op = Operations(ctx)
|
||||
|
||||
op.alter_column("t", "c", nullable=True)
|
||||
|
||||
Note that as of 0.8, most of the methods on this class are produced
|
||||
dynamically using the :meth:`.Operations.register_operation`
|
||||
method.
|
||||
|
||||
"""
|
||||
|
||||
_to_impl = util.Dispatcher()
|
||||
|
||||
def __init__(self, migration_context, impl=None):
|
||||
"""Construct a new :class:`.Operations`
|
||||
|
||||
:param migration_context: a :class:`.MigrationContext`
|
||||
instance.
|
||||
|
||||
"""
|
||||
self.migration_context = migration_context
|
||||
if impl is None:
|
||||
self.impl = migration_context.impl
|
||||
else:
|
||||
self.impl = impl
|
||||
|
||||
self.schema_obj = schemaobj.SchemaObjects(migration_context)
|
||||
|
||||
@classmethod
|
||||
def register_operation(cls, name, sourcename=None):
|
||||
"""Register a new operation for this class.
|
||||
|
||||
This method is normally used to add new operations
|
||||
to the :class:`.Operations` class, and possibly the
|
||||
:class:`.BatchOperations` class as well. All Alembic migration
|
||||
operations are implemented via this system, however the system
|
||||
is also available as a public API to facilitate adding custom
|
||||
operations.
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`operation_plugins`
|
||||
|
||||
|
||||
"""
|
||||
def register(op_cls):
|
||||
if sourcename is None:
|
||||
fn = getattr(op_cls, name)
|
||||
source_name = fn.__name__
|
||||
else:
|
||||
fn = getattr(op_cls, sourcename)
|
||||
source_name = fn.__name__
|
||||
|
||||
spec = inspect.getargspec(fn)
|
||||
|
||||
name_args = spec[0]
|
||||
assert name_args[0:2] == ['cls', 'operations']
|
||||
|
||||
name_args[0:2] = ['self']
|
||||
|
||||
args = inspect.formatargspec(*spec)
|
||||
num_defaults = len(spec[3]) if spec[3] else 0
|
||||
if num_defaults:
|
||||
defaulted_vals = name_args[0 - num_defaults:]
|
||||
else:
|
||||
defaulted_vals = ()
|
||||
|
||||
apply_kw = inspect.formatargspec(
|
||||
name_args, spec[1], spec[2],
|
||||
defaulted_vals,
|
||||
formatvalue=lambda x: '=' + x)
|
||||
|
||||
func_text = textwrap.dedent("""\
|
||||
def %(name)s%(args)s:
|
||||
%(doc)r
|
||||
return op_cls.%(source_name)s%(apply_kw)s
|
||||
""" % {
|
||||
'name': name,
|
||||
'source_name': source_name,
|
||||
'args': args,
|
||||
'apply_kw': apply_kw,
|
||||
'doc': fn.__doc__,
|
||||
'meth': fn.__name__
|
||||
})
|
||||
globals_ = {'op_cls': op_cls}
|
||||
lcl = {}
|
||||
exec_(func_text, globals_, lcl)
|
||||
setattr(cls, name, lcl[name])
|
||||
fn.__func__.__doc__ = "This method is proxied on "\
|
||||
"the :class:`.%s` class, via the :meth:`.%s.%s` method." % (
|
||||
cls.__name__, cls.__name__, name
|
||||
)
|
||||
return op_cls
|
||||
return register
|
||||
|
||||
@classmethod
|
||||
def implementation_for(cls, op_cls):
|
||||
"""Register an implementation for a given :class:`.MigrateOperation`.
|
||||
|
||||
This is part of the operation extensibility API.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`operation_plugins` - example of use
|
||||
|
||||
"""
|
||||
|
||||
def decorate(fn):
|
||||
cls._to_impl.dispatch_for(op_cls)(fn)
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def context(cls, migration_context):
|
||||
op = Operations(migration_context)
|
||||
op._install_proxy()
|
||||
yield op
|
||||
op._remove_proxy()
|
||||
|
||||
@contextmanager
|
||||
def batch_alter_table(
|
||||
self, table_name, schema=None, recreate="auto", copy_from=None,
|
||||
table_args=(), table_kwargs=util.immutabledict(),
|
||||
reflect_args=(), reflect_kwargs=util.immutabledict(),
|
||||
naming_convention=None):
|
||||
"""Invoke a series of per-table migrations in batch.
|
||||
|
||||
Batch mode allows a series of operations specific to a table
|
||||
to be syntactically grouped together, and allows for alternate
|
||||
modes of table migration, in particular the "recreate" style of
|
||||
migration required by SQLite.
|
||||
|
||||
"recreate" style is as follows:
|
||||
|
||||
1. A new table is created with the new specification, based on the
|
||||
migration directives within the batch, using a temporary name.
|
||||
|
||||
2. the data copied from the existing table to the new table.
|
||||
|
||||
3. the existing table is dropped.
|
||||
|
||||
4. the new table is renamed to the existing table name.
|
||||
|
||||
The directive by default will only use "recreate" style on the
|
||||
SQLite backend, and only if directives are present which require
|
||||
this form, e.g. anything other than ``add_column()``. The batch
|
||||
operation on other backends will proceed using standard ALTER TABLE
|
||||
operations.
|
||||
|
||||
The method is used as a context manager, which returns an instance
|
||||
of :class:`.BatchOperations`; this object is the same as
|
||||
:class:`.Operations` except that table names and schema names
|
||||
are omitted. E.g.::
|
||||
|
||||
with op.batch_alter_table("some_table") as batch_op:
|
||||
batch_op.add_column(Column('foo', Integer))
|
||||
batch_op.drop_column('bar')
|
||||
|
||||
The operations within the context manager are invoked at once
|
||||
when the context is ended. When run against SQLite, if the
|
||||
migrations include operations not supported by SQLite's ALTER TABLE,
|
||||
the entire table will be copied to a new one with the new
|
||||
specification, moving all data across as well.
|
||||
|
||||
The copy operation by default uses reflection to retrieve the current
|
||||
structure of the table, and therefore :meth:`.batch_alter_table`
|
||||
in this mode requires that the migration is run in "online" mode.
|
||||
The ``copy_from`` parameter may be passed which refers to an existing
|
||||
:class:`.Table` object, which will bypass this reflection step.
|
||||
|
||||
.. note:: The table copy operation will currently not copy
|
||||
CHECK constraints, and may not copy UNIQUE constraints that are
|
||||
unnamed, as is possible on SQLite. See the section
|
||||
:ref:`sqlite_batch_constraints` for workarounds.
|
||||
|
||||
:param table_name: name of table
|
||||
:param schema: optional schema name.
|
||||
:param recreate: under what circumstances the table should be
|
||||
recreated. At its default of ``"auto"``, the SQLite dialect will
|
||||
recreate the table if any operations other than ``add_column()``,
|
||||
``create_index()``, or ``drop_index()`` are
|
||||
present. Other options include ``"always"`` and ``"never"``.
|
||||
:param copy_from: optional :class:`~sqlalchemy.schema.Table` object
|
||||
that will act as the structure of the table being copied. If omitted,
|
||||
table reflection is used to retrieve the structure of the table.
|
||||
|
||||
.. versionadded:: 0.7.6 Fully implemented the
|
||||
:paramref:`~.Operations.batch_alter_table.copy_from`
|
||||
parameter.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`batch_offline_mode`
|
||||
|
||||
:paramref:`~.Operations.batch_alter_table.reflect_args`
|
||||
|
||||
:paramref:`~.Operations.batch_alter_table.reflect_kwargs`
|
||||
|
||||
:param reflect_args: a sequence of additional positional arguments that
|
||||
will be applied to the table structure being reflected / copied;
|
||||
this may be used to pass column and constraint overrides to the
|
||||
table that will be reflected, in lieu of passing the whole
|
||||
:class:`~sqlalchemy.schema.Table` using
|
||||
:paramref:`~.Operations.batch_alter_table.copy_from`.
|
||||
|
||||
.. versionadded:: 0.7.1
|
||||
|
||||
:param reflect_kwargs: a dictionary of additional keyword arguments
|
||||
that will be applied to the table structure being copied; this may be
|
||||
used to pass additional table and reflection options to the table that
|
||||
will be reflected, in lieu of passing the whole
|
||||
:class:`~sqlalchemy.schema.Table` using
|
||||
:paramref:`~.Operations.batch_alter_table.copy_from`.
|
||||
|
||||
.. versionadded:: 0.7.1
|
||||
|
||||
:param table_args: a sequence of additional positional arguments that
|
||||
will be applied to the new :class:`~sqlalchemy.schema.Table` when
|
||||
created, in addition to those copied from the source table.
|
||||
This may be used to provide additional constraints such as CHECK
|
||||
constraints that may not be reflected.
|
||||
:param table_kwargs: a dictionary of additional keyword arguments
|
||||
that will be applied to the new :class:`~sqlalchemy.schema.Table`
|
||||
when created, in addition to those copied from the source table.
|
||||
This may be used to provide for additional table options that may
|
||||
not be reflected.
|
||||
|
||||
.. versionadded:: 0.7.0
|
||||
|
||||
:param naming_convention: a naming convention dictionary of the form
|
||||
described at :ref:`autogen_naming_conventions` which will be applied
|
||||
to the :class:`~sqlalchemy.schema.MetaData` during the reflection
|
||||
process. This is typically required if one wants to drop SQLite
|
||||
constraints, as these constraints will not have names when
|
||||
reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dropping_sqlite_foreign_keys`
|
||||
|
||||
.. versionadded:: 0.7.1
|
||||
|
||||
.. note:: batch mode requires SQLAlchemy 0.8 or above.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`batch_migrations`
|
||||
|
||||
"""
|
||||
impl = batch.BatchOperationsImpl(
|
||||
self, table_name, schema, recreate,
|
||||
copy_from, table_args, table_kwargs, reflect_args,
|
||||
reflect_kwargs, naming_convention)
|
||||
batch_op = BatchOperations(self.migration_context, impl=impl)
|
||||
yield batch_op
|
||||
impl.flush()
|
||||
|
||||
def get_context(self):
|
||||
"""Return the :class:`.MigrationContext` object that's
|
||||
currently in use.
|
||||
|
||||
"""
|
||||
|
||||
return self.migration_context
|
||||
|
||||
def invoke(self, operation):
|
||||
"""Given a :class:`.MigrateOperation`, invoke it in terms of
|
||||
this :class:`.Operations` instance.
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
"""
|
||||
fn = self._to_impl.dispatch(
|
||||
operation, self.migration_context.impl.__dialect__)
|
||||
return fn(self, operation)
|
||||
|
||||
def f(self, name):
|
||||
"""Indicate a string name that has already had a naming convention
|
||||
applied to it.
|
||||
|
||||
This feature combines with the SQLAlchemy ``naming_convention`` feature
|
||||
to disambiguate constraint names that have already had naming
|
||||
conventions applied to them, versus those that have not. This is
|
||||
necessary in the case that the ``"%(constraint_name)s"`` token
|
||||
is used within a naming convention, so that it can be identified
|
||||
that this particular name should remain fixed.
|
||||
|
||||
If the :meth:`.Operations.f` is used on a constraint, the naming
|
||||
convention will not take effect::
|
||||
|
||||
op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x')))
|
||||
|
||||
Above, the CHECK constraint generated will have the name
|
||||
``ck_bool_t_x`` regardless of whether or not a naming convention is
|
||||
in use.
|
||||
|
||||
Alternatively, if a naming convention is in use, and 'f' is not used,
|
||||
names will be converted along conventions. If the ``target_metadata``
|
||||
contains the naming convention
|
||||
``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
|
||||
output of the following:
|
||||
|
||||
op.add_column('t', 'x', Boolean(name='x'))
|
||||
|
||||
will be::
|
||||
|
||||
CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
|
||||
|
||||
The function is rendered in the output of autogenerate when
|
||||
a particular constraint name is already converted, for SQLAlchemy
|
||||
version **0.9.4 and greater only**. Even though ``naming_convention``
|
||||
was introduced in 0.9.2, the string disambiguation service is new
|
||||
as of 0.9.4.
|
||||
|
||||
.. versionadded:: 0.6.4
|
||||
|
||||
"""
|
||||
if conv:
|
||||
return conv(name)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"op.f() feature requires SQLAlchemy 0.9.4 or greater.")
|
||||
|
||||
def inline_literal(self, value, type_=None):
|
||||
"""Produce an 'inline literal' expression, suitable for
|
||||
using in an INSERT, UPDATE, or DELETE statement.
|
||||
|
||||
When using Alembic in "offline" mode, CRUD operations
|
||||
aren't compatible with SQLAlchemy's default behavior surrounding
|
||||
literal values,
|
||||
which is that they are converted into bound values and passed
|
||||
separately into the ``execute()`` method of the DBAPI cursor.
|
||||
An offline SQL
|
||||
script needs to have these rendered inline. While it should
|
||||
always be noted that inline literal values are an **enormous**
|
||||
security hole in an application that handles untrusted input,
|
||||
a schema migration is not run in this context, so
|
||||
literals are safe to render inline, with the caveat that
|
||||
advanced types like dates may not be supported directly
|
||||
by SQLAlchemy.
|
||||
|
||||
See :meth:`.execute` for an example usage of
|
||||
:meth:`.inline_literal`.
|
||||
|
||||
The environment can also be configured to attempt to render
|
||||
"literal" values inline automatically, for those simple types
|
||||
that are supported by the dialect; see
|
||||
:paramref:`.EnvironmentContext.configure.literal_binds` for this
|
||||
more recently added feature.
|
||||
|
||||
:param value: The value to render. Strings, integers, and simple
|
||||
numerics should be supported. Other types like boolean,
|
||||
dates, etc. may or may not be supported yet by various
|
||||
backends.
|
||||
:param ``type_``: optional - a :class:`sqlalchemy.types.TypeEngine`
|
||||
subclass stating the type of this value. In SQLAlchemy
|
||||
expressions, this is usually derived automatically
|
||||
from the Python type of the value itself, as well as
|
||||
based on the context in which the value is used.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.literal_binds`
|
||||
|
||||
"""
|
||||
return sqla_compat._literal_bindparam(None, value, type_=type_)
|
||||
|
||||
def get_bind(self):
|
||||
"""Return the current 'bind'.
|
||||
|
||||
Under normal circumstances, this is the
|
||||
:class:`~sqlalchemy.engine.Connection` currently being used
|
||||
to emit SQL to the database.
|
||||
|
||||
In a SQL script context, this value is ``None``. [TODO: verify this]
|
||||
|
||||
"""
|
||||
return self.migration_context.impl.bind
|
||||
|
||||
|
||||
class BatchOperations(Operations):
|
||||
"""Modifies the interface :class:`.Operations` for batch mode.
|
||||
|
||||
This basically omits the ``table_name`` and ``schema`` parameters
|
||||
from associated methods, as these are a given when running under batch
|
||||
mode.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Operations.batch_alter_table`
|
||||
|
||||
Note that as of 0.8, most of the methods on this class are produced
|
||||
dynamically using the :meth:`.Operations.register_operation`
|
||||
method.
|
||||
|
||||
"""
|
||||
|
||||
def _noop(self, operation):
|
||||
raise NotImplementedError(
|
||||
"The %s method does not apply to a batch table alter operation."
|
||||
% operation)
|
@ -3,8 +3,8 @@ from sqlalchemy import Table, MetaData, Index, select, Column, \
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import schema as sql_schema
|
||||
from sqlalchemy.util import OrderedDict
|
||||
from . import util
|
||||
from .ddl.base import _columns_for_constraint, _is_type_bound
|
||||
from .. import util
|
||||
from ..util.sqla_compat import _columns_for_constraint, _is_type_bound
|
||||
|
||||
|
||||
class BatchOperationsImpl(object):
|
File diff suppressed because it is too large
Load Diff
157
alembic/operations/schemaobj.py
Normal file
157
alembic/operations/schemaobj.py
Normal file
@ -0,0 +1,157 @@
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy.types import NULLTYPE, Integer
|
||||
from ..util.compat import string_types
|
||||
from .. import util
|
||||
|
||||
|
||||
class SchemaObjects(object):
|
||||
|
||||
def __init__(self, migration_context=None):
|
||||
self.migration_context = migration_context
|
||||
|
||||
def primary_key_constraint(self, name, table_name, cols, schema=None):
|
||||
m = self.metadata()
|
||||
columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
|
||||
t1 = sa_schema.Table(table_name, m,
|
||||
*columns,
|
||||
schema=schema)
|
||||
p = sa_schema.PrimaryKeyConstraint(*columns, name=name)
|
||||
t1.append_constraint(p)
|
||||
return p
|
||||
|
||||
def foreign_key_constraint(
|
||||
self, name, source, referent,
|
||||
local_cols, remote_cols,
|
||||
onupdate=None, ondelete=None,
|
||||
deferrable=None, source_schema=None,
|
||||
referent_schema=None, initially=None,
|
||||
match=None, **dialect_kw):
|
||||
m = self.metadata()
|
||||
if source == referent:
|
||||
t1_cols = local_cols + remote_cols
|
||||
else:
|
||||
t1_cols = local_cols
|
||||
sa_schema.Table(
|
||||
referent, m,
|
||||
*[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
|
||||
schema=referent_schema)
|
||||
|
||||
t1 = sa_schema.Table(
|
||||
source, m,
|
||||
*[sa_schema.Column(n, NULLTYPE) for n in t1_cols],
|
||||
schema=source_schema)
|
||||
|
||||
tname = "%s.%s" % (referent_schema, referent) if referent_schema \
|
||||
else referent
|
||||
|
||||
if util.sqla_08:
|
||||
# "match" kw unsupported in 0.7
|
||||
dialect_kw['match'] = match
|
||||
|
||||
f = sa_schema.ForeignKeyConstraint(local_cols,
|
||||
["%s.%s" % (tname, n)
|
||||
for n in remote_cols],
|
||||
name=name,
|
||||
onupdate=onupdate,
|
||||
ondelete=ondelete,
|
||||
deferrable=deferrable,
|
||||
initially=initially,
|
||||
**dialect_kw
|
||||
)
|
||||
t1.append_constraint(f)
|
||||
|
||||
return f
|
||||
|
||||
def unique_constraint(self, name, source, local_cols, schema=None, **kw):
|
||||
t = sa_schema.Table(
|
||||
source, self.metadata(),
|
||||
*[sa_schema.Column(n, NULLTYPE) for n in local_cols],
|
||||
schema=schema)
|
||||
kw['name'] = name
|
||||
uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
|
||||
# TODO: need event tests to ensure the event
|
||||
# is fired off here
|
||||
t.append_constraint(uq)
|
||||
return uq
|
||||
|
||||
def check_constraint(self, name, source, condition, schema=None, **kw):
|
||||
t = sa_schema.Table(source, self.metadata(),
|
||||
sa_schema.Column('x', Integer), schema=schema)
|
||||
ck = sa_schema.CheckConstraint(condition, name=name, **kw)
|
||||
t.append_constraint(ck)
|
||||
return ck
|
||||
|
||||
def generic_constraint(self, name, table_name, type_, schema=None, **kw):
|
||||
t = self.table(table_name, schema=schema)
|
||||
types = {
|
||||
'foreignkey': lambda name: sa_schema.ForeignKeyConstraint(
|
||||
[], [], name=name),
|
||||
'primary': sa_schema.PrimaryKeyConstraint,
|
||||
'unique': sa_schema.UniqueConstraint,
|
||||
'check': lambda name: sa_schema.CheckConstraint("", name=name),
|
||||
None: sa_schema.Constraint
|
||||
}
|
||||
try:
|
||||
const = types[type_]
|
||||
except KeyError:
|
||||
raise TypeError("'type' can be one of %s" %
|
||||
", ".join(sorted(repr(x) for x in types)))
|
||||
else:
|
||||
const = const(name=name)
|
||||
t.append_constraint(const)
|
||||
return const
|
||||
|
||||
def metadata(self):
|
||||
kw = {}
|
||||
if self.migration_context is not None and \
|
||||
'target_metadata' in self.migration_context.opts:
|
||||
mt = self.migration_context.opts['target_metadata']
|
||||
if hasattr(mt, 'naming_convention'):
|
||||
kw['naming_convention'] = mt.naming_convention
|
||||
return sa_schema.MetaData(**kw)
|
||||
|
||||
def table(self, name, *columns, **kw):
|
||||
m = self.metadata()
|
||||
t = sa_schema.Table(name, m, *columns, **kw)
|
||||
for f in t.foreign_keys:
|
||||
self._ensure_table_for_fk(m, f)
|
||||
return t
|
||||
|
||||
def column(self, name, type_, **kw):
|
||||
return sa_schema.Column(name, type_, **kw)
|
||||
|
||||
def index(self, name, tablename, columns, schema=None, **kw):
|
||||
t = sa_schema.Table(
|
||||
tablename or 'no_table', self.metadata(),
|
||||
schema=schema
|
||||
)
|
||||
idx = sa_schema.Index(
|
||||
name,
|
||||
*[util.sqla_compat._textual_index_column(t, n) for n in columns],
|
||||
**kw)
|
||||
return idx
|
||||
|
||||
def _parse_table_key(self, table_key):
|
||||
if '.' in table_key:
|
||||
tokens = table_key.split('.')
|
||||
sname = ".".join(tokens[0:-1])
|
||||
tname = tokens[-1]
|
||||
else:
|
||||
tname = table_key
|
||||
sname = None
|
||||
return (sname, tname)
|
||||
|
||||
def _ensure_table_for_fk(self, metadata, fk):
|
||||
"""create a placeholder Table object for the referent of a
|
||||
ForeignKey.
|
||||
|
||||
"""
|
||||
if isinstance(fk._colspec, string_types):
|
||||
table_key, cname = fk._colspec.rsplit('.', 1)
|
||||
sname, tname = self._parse_table_key(table_key)
|
||||
if table_key not in metadata.tables:
|
||||
rel_t = sa_schema.Table(tname, metadata, schema=sname)
|
||||
else:
|
||||
rel_t = metadata.tables[table_key]
|
||||
if cname not in rel_t.c:
|
||||
rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
|
162
alembic/operations/toimpl.py
Normal file
162
alembic/operations/toimpl.py
Normal file
@ -0,0 +1,162 @@
|
||||
from . import ops
|
||||
|
||||
from . import Operations
|
||||
from sqlalchemy import schema as sa_schema
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AlterColumnOp)
|
||||
def alter_column(operations, operation):
|
||||
|
||||
compiler = operations.impl.dialect.statement_compiler(
|
||||
operations.impl.dialect,
|
||||
None
|
||||
)
|
||||
|
||||
existing_type = operation.existing_type
|
||||
existing_nullable = operation.existing_nullable
|
||||
existing_server_default = operation.existing_server_default
|
||||
type_ = operation.modify_type
|
||||
column_name = operation.column_name
|
||||
table_name = operation.table_name
|
||||
schema = operation.schema
|
||||
server_default = operation.modify_server_default
|
||||
new_column_name = operation.modify_name
|
||||
nullable = operation.modify_nullable
|
||||
|
||||
def _count_constraint(constraint):
|
||||
return not isinstance(
|
||||
constraint,
|
||||
sa_schema.PrimaryKeyConstraint) and \
|
||||
(not constraint._create_rule or
|
||||
constraint._create_rule(compiler))
|
||||
|
||||
if existing_type and type_:
|
||||
t = operations.schema_obj.table(
|
||||
table_name,
|
||||
sa_schema.Column(column_name, existing_type),
|
||||
schema=schema
|
||||
)
|
||||
for constraint in t.constraints:
|
||||
if _count_constraint(constraint):
|
||||
operations.impl.drop_constraint(constraint)
|
||||
|
||||
operations.impl.alter_column(
|
||||
table_name, column_name,
|
||||
nullable=nullable,
|
||||
server_default=server_default,
|
||||
name=new_column_name,
|
||||
type_=type_,
|
||||
schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
**operation.kw
|
||||
)
|
||||
|
||||
if type_:
|
||||
t = operations.schema_obj.table(
|
||||
table_name,
|
||||
operations.schema_obj.column(column_name, type_),
|
||||
schema=schema
|
||||
)
|
||||
for constraint in t.constraints:
|
||||
if _count_constraint(constraint):
|
||||
operations.impl.add_constraint(constraint)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropTableOp)
|
||||
def drop_table(operations, operation):
|
||||
operations.impl.drop_table(
|
||||
operation.to_table(operations.migration_context)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropColumnOp)
|
||||
def drop_column(operations, operation):
|
||||
column = operation.to_column(operations.migration_context)
|
||||
operations.impl.drop_column(
|
||||
operation.table_name,
|
||||
column,
|
||||
schema=operation.schema,
|
||||
**operation.kw
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.CreateIndexOp)
|
||||
def create_index(operations, operation):
|
||||
idx = operation.to_index(operations.migration_context)
|
||||
operations.impl.create_index(idx)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropIndexOp)
|
||||
def drop_index(operations, operation):
|
||||
operations.impl.drop_index(
|
||||
operation.to_index(operations.migration_context)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.CreateTableOp)
|
||||
def create_table(operations, operation):
|
||||
table = operation.to_table(operations.migration_context)
|
||||
operations.impl.create_table(table)
|
||||
return table
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.RenameTableOp)
|
||||
def rename_table(operations, operation):
|
||||
operations.impl.rename_table(
|
||||
operation.table_name,
|
||||
operation.new_table_name,
|
||||
schema=operation.schema)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AddColumnOp)
|
||||
def add_column(operations, operation):
|
||||
table_name = operation.table_name
|
||||
column = operation.column
|
||||
schema = operation.schema
|
||||
|
||||
t = operations.schema_obj.table(table_name, column, schema=schema)
|
||||
operations.impl.add_column(
|
||||
table_name,
|
||||
column,
|
||||
schema=schema
|
||||
)
|
||||
for constraint in t.constraints:
|
||||
if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
|
||||
operations.impl.add_constraint(constraint)
|
||||
for index in t.indexes:
|
||||
operations.impl.create_index(index)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AddConstraintOp)
|
||||
def create_constraint(operations, operation):
|
||||
operations.impl.add_constraint(
|
||||
operation.to_constraint(operations.migration_context)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropConstraintOp)
|
||||
def drop_constraint(operations, operation):
|
||||
operations.impl.drop_constraint(
|
||||
operations.schema_obj.generic_constraint(
|
||||
operation.constraint_name,
|
||||
operation.table_name,
|
||||
operation.constraint_type,
|
||||
schema=operation.schema,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.BulkInsertOp)
|
||||
def bulk_insert(operations, operation):
|
||||
operations.impl.bulk_insert(
|
||||
operation.table, operation.rows, multiinsert=operation.multiinsert)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.ExecuteSQLOp)
|
||||
def execute_sql(operations, operation):
|
||||
operations.migration_context.impl.execute(
|
||||
operation.sqltext,
|
||||
execution_options=operation.execution_options
|
||||
)
|
0
alembic/runtime/__init__.py
Normal file
0
alembic/runtime/__init__.py
Normal file
@ -1,9 +1,9 @@
|
||||
from .operations import Operations
|
||||
from ..operations import Operations
|
||||
from .migration import MigrationContext
|
||||
from . import util
|
||||
from .. import util
|
||||
|
||||
|
||||
class EnvironmentContext(object):
|
||||
class EnvironmentContext(util.ModuleClsProxy):
|
||||
|
||||
"""Represent the state made available to an ``env.py`` script.
|
||||
|
||||
@ -96,14 +96,11 @@ class EnvironmentContext(object):
|
||||
be made available as ``from alembic import context``.
|
||||
|
||||
"""
|
||||
from .context import _install_proxy
|
||||
_install_proxy(self)
|
||||
self._install_proxy()
|
||||
return self
|
||||
|
||||
def __exit__(self, *arg, **kw):
|
||||
from . import context, op
|
||||
context._remove_proxy()
|
||||
op._remove_proxy()
|
||||
self._remove_proxy()
|
||||
|
||||
def is_offline_mode(self):
|
||||
"""Return True if the current migrations environment
|
||||
@ -293,6 +290,7 @@ class EnvironmentContext(object):
|
||||
include_symbol=None,
|
||||
include_object=None,
|
||||
include_schemas=False,
|
||||
process_revision_directives=None,
|
||||
compare_type=False,
|
||||
compare_server_default=False,
|
||||
render_item=None,
|
||||
@ -656,6 +654,43 @@ class EnvironmentContext(object):
|
||||
|
||||
:ref:`autogen_module_prefix`
|
||||
|
||||
:param process_revision_directives: a callable function that will
|
||||
be passed a structure representing the end result of an autogenerate
|
||||
or plain "revision" operation, which can be manipulated to affect
|
||||
how the ``alembic revision`` command ultimately outputs new
|
||||
revision scripts. The structure of the callable is::
|
||||
|
||||
def process_revision_directives(context, revision, directives):
|
||||
pass
|
||||
|
||||
The ``directives`` parameter is a Python list containing
|
||||
a single :class:`.MigrationScript` directive, which represents
|
||||
the revision file to be generated. This list as well as its
|
||||
contents may be freely modified to produce any set of commands.
|
||||
The section :ref:`customizing_revision` shows an example of
|
||||
doing this. The ``context`` parameter is the
|
||||
:class:`.MigrationContext` in use,
|
||||
and ``revision`` is a tuple of revision identifiers representing the
|
||||
current revision of the database.
|
||||
|
||||
The callable is invoked at all times when the ``--autogenerate``
|
||||
option is passed to ``alembic revision``. If ``--autogenerate``
|
||||
is not passed, the callable is invoked only if the
|
||||
``revision_environment`` variable is set to True in the Alembic
|
||||
configuration, in which case the given ``directives`` collection
|
||||
will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
|
||||
collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
|
||||
``--autogenerate`` option itself can be inferred by inspecting
|
||||
``context.config.cmd_opts.autogenerate``.
|
||||
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`customizing_revision`
|
||||
|
||||
|
||||
Parameters specific to individual backends:
|
||||
|
||||
:param mssql_batch_separator: The "batch separator" which will
|
||||
@ -696,6 +731,8 @@ class EnvironmentContext(object):
|
||||
opts['alembic_module_prefix'] = alembic_module_prefix
|
||||
opts['user_module_prefix'] = user_module_prefix
|
||||
opts['literal_binds'] = literal_binds
|
||||
opts['process_revision_directives'] = process_revision_directives
|
||||
|
||||
if render_item is not None:
|
||||
opts['render_item'] = render_item
|
||||
if compare_type is not None:
|
@ -6,8 +6,8 @@ from sqlalchemy import MetaData, Table, Column, String, literal_column
|
||||
from sqlalchemy.engine.strategies import MockEngineStrategy
|
||||
from sqlalchemy.engine import url as sqla_url
|
||||
|
||||
from .compat import callable, EncodedIO
|
||||
from . import ddl, util
|
||||
from ..util.compat import callable, EncodedIO
|
||||
from .. import ddl, util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
3
alembic/script/__init__.py
Normal file
3
alembic/script/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .base import ScriptDirectory, Script # noqa
|
||||
|
||||
__all__ = ['ScriptDirectory', 'Script']
|
@ -2,10 +2,10 @@ import datetime
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from . import util
|
||||
from . import compat
|
||||
from .. import util
|
||||
from ..util import compat
|
||||
from . import revision
|
||||
from . import migration
|
||||
from ..runtime import migration
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
@ -1,10 +1,9 @@
|
||||
import re
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
from . import util
|
||||
from .. import util
|
||||
from sqlalchemy import util as sqlautil
|
||||
from . import compat
|
||||
from ..util import compat
|
||||
|
||||
_relative_destination = re.compile(r'(?:(.+?)@)?(\w+)?((?:\+|-)\d+)')
|
||||
|
@ -2,9 +2,9 @@ from __future__ import absolute_import
|
||||
|
||||
|
||||
import re
|
||||
from alembic import util
|
||||
from .. import util
|
||||
from sqlalchemy.engine import default
|
||||
from alembic.compat import text_type, py3k
|
||||
from ..util.compat import text_type, py3k
|
||||
import contextlib
|
||||
from sqlalchemy.util import decorator
|
||||
from sqlalchemy import exc as sa_exc
|
||||
|
@ -4,9 +4,9 @@ import os
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
from alembic.compat import u
|
||||
from alembic.script import Script, ScriptDirectory
|
||||
from alembic import util
|
||||
from ..util.compat import u
|
||||
from ..script import Script, ScriptDirectory
|
||||
from .. import util
|
||||
from . import engines
|
||||
from . import provision
|
||||
|
||||
|
@ -14,11 +14,12 @@ from .plugin.plugin_base import SkipTest
|
||||
from sqlalchemy.util import decorator
|
||||
from . import config
|
||||
from sqlalchemy import util
|
||||
from alembic import compat
|
||||
from ..util import compat
|
||||
import inspect
|
||||
import contextlib
|
||||
from .compat import get_url_driver_name, get_url_backend_name
|
||||
|
||||
|
||||
def skip_if(predicate, reason=None):
|
||||
rule = compound()
|
||||
pred = _as_predicate(predicate, reason)
|
||||
|
@ -5,13 +5,12 @@ import re
|
||||
from sqlalchemy import create_engine, text, MetaData
|
||||
|
||||
import alembic
|
||||
from alembic.compat import configparser
|
||||
from alembic import util
|
||||
from alembic.compat import string_types, text_type
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.environment import EnvironmentContext
|
||||
from alembic.operations import Operations
|
||||
from alembic.ddl.impl import _impls
|
||||
from ..util.compat import configparser
|
||||
from .. import util
|
||||
from ..util.compat import string_types, text_type
|
||||
from ..migration import MigrationContext
|
||||
from ..environment import EnvironmentContext
|
||||
from ..operations import Operations
|
||||
from contextlib import contextmanager
|
||||
from .plugin.plugin_base import SkipTest
|
||||
from .assertions import _get_dialect, eq_
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from alembic.compat import py33
|
||||
from ..util.compat import py33
|
||||
|
||||
if py33:
|
||||
from unittest.mock import MagicMock, Mock, call, patch
|
||||
|
@ -3,9 +3,9 @@
|
||||
"""
|
||||
from sqlalchemy.engine import url as sa_url
|
||||
from sqlalchemy import text
|
||||
from alembic import compat
|
||||
from alembic.testing import config, engines
|
||||
from alembic.testing.compat import get_url_backend_name
|
||||
from ..util import compat
|
||||
from . import config, engines
|
||||
from .compat import get_url_backend_name
|
||||
|
||||
FOLLOWER_IDENT = None
|
||||
|
||||
|
405
alembic/util.py
405
alembic/util.py
@ -1,405 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
import textwrap
|
||||
import warnings
|
||||
import re
|
||||
import inspect
|
||||
import uuid
|
||||
import collections
|
||||
|
||||
from mako.template import Template
|
||||
from sqlalchemy.engine import url
|
||||
from sqlalchemy import __version__
|
||||
|
||||
from .compat import callable, exec_, load_module_py, load_module_pyc, \
|
||||
binary_type, string_types, py27
|
||||
|
||||
|
||||
class CommandError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _safe_int(value):
|
||||
try:
|
||||
return int(value)
|
||||
except:
|
||||
return value
|
||||
_vers = tuple(
|
||||
[_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)])
|
||||
sqla_07 = _vers > (0, 7, 2)
|
||||
sqla_079 = _vers >= (0, 7, 9)
|
||||
sqla_08 = _vers >= (0, 8, 0)
|
||||
sqla_083 = _vers >= (0, 8, 3)
|
||||
sqla_084 = _vers >= (0, 8, 4)
|
||||
sqla_09 = _vers >= (0, 9, 0)
|
||||
sqla_092 = _vers >= (0, 9, 2)
|
||||
sqla_094 = _vers >= (0, 9, 4)
|
||||
sqla_094 = _vers >= (0, 9, 4)
|
||||
sqla_099 = _vers >= (0, 9, 9)
|
||||
sqla_100 = _vers >= (1, 0, 0)
|
||||
sqla_105 = _vers >= (1, 0, 5)
|
||||
if not sqla_07:
|
||||
raise CommandError(
|
||||
"SQLAlchemy 0.7.3 or greater is required. ")
|
||||
|
||||
from sqlalchemy.util import format_argspec_plus, update_wrapper
|
||||
from sqlalchemy.util.compat import inspect_getfullargspec
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
if py27:
|
||||
# disable "no handler found" errors
|
||||
logging.getLogger('alembic').addHandler(logging.NullHandler())
|
||||
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ,
|
||||
struct.pack('HHHH', 0, 0, 0, 0))
|
||||
_h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl)
|
||||
if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
|
||||
TERMWIDTH = None
|
||||
except (ImportError, IOError):
|
||||
TERMWIDTH = None
|
||||
|
||||
|
||||
def template_to_file(template_file, dest, output_encoding, **kw):
|
||||
with open(dest, 'wb') as f:
|
||||
template = Template(filename=template_file)
|
||||
f.write(
|
||||
template.render_unicode(**kw).encode(output_encoding)
|
||||
)
|
||||
|
||||
|
||||
def create_module_class_proxy(cls, globals_, locals_):
|
||||
"""Create module level proxy functions for the
|
||||
methods on a given class.
|
||||
|
||||
The functions will have a compatible signature
|
||||
as the methods. A proxy is established
|
||||
using the ``_install_proxy(obj)`` function,
|
||||
and removed using ``_remove_proxy()``, both
|
||||
installed by calling this function.
|
||||
|
||||
"""
|
||||
attr_names = set()
|
||||
|
||||
def _install_proxy(obj):
|
||||
globals_['_proxy'] = obj
|
||||
for name in attr_names:
|
||||
globals_[name] = getattr(obj, name)
|
||||
|
||||
def _remove_proxy():
|
||||
globals_['_proxy'] = None
|
||||
for name in attr_names:
|
||||
del globals_[name]
|
||||
|
||||
globals_['_install_proxy'] = _install_proxy
|
||||
globals_['_remove_proxy'] = _remove_proxy
|
||||
|
||||
def _create_op_proxy(name):
|
||||
fn = getattr(cls, name)
|
||||
spec = inspect.getargspec(fn)
|
||||
if spec[0] and spec[0][0] == 'self':
|
||||
spec[0].pop(0)
|
||||
args = inspect.formatargspec(*spec)
|
||||
num_defaults = 0
|
||||
if spec[3]:
|
||||
num_defaults += len(spec[3])
|
||||
name_args = spec[0]
|
||||
if num_defaults:
|
||||
defaulted_vals = name_args[0 - num_defaults:]
|
||||
else:
|
||||
defaulted_vals = ()
|
||||
|
||||
apply_kw = inspect.formatargspec(
|
||||
name_args, spec[1], spec[2],
|
||||
defaulted_vals,
|
||||
formatvalue=lambda x: '=' + x)
|
||||
|
||||
def _name_error(name):
|
||||
raise NameError(
|
||||
"Can't invoke function '%s', as the proxy object has "
|
||||
"not yet been "
|
||||
"established for the Alembic '%s' class. "
|
||||
"Try placing this code inside a callable." % (
|
||||
name, cls.__name__
|
||||
))
|
||||
globals_['_name_error'] = _name_error
|
||||
|
||||
func_text = textwrap.dedent("""\
|
||||
def %(name)s(%(args)s):
|
||||
%(doc)r
|
||||
try:
|
||||
p = _proxy
|
||||
except NameError:
|
||||
_name_error('%(name)s')
|
||||
return _proxy.%(name)s(%(apply_kw)s)
|
||||
e
|
||||
""" % {
|
||||
'name': name,
|
||||
'args': args[1:-1],
|
||||
'apply_kw': apply_kw[1:-1],
|
||||
'doc': fn.__doc__,
|
||||
})
|
||||
lcl = {}
|
||||
exec_(func_text, globals_, lcl)
|
||||
return lcl[name]
|
||||
|
||||
for methname in dir(cls):
|
||||
if not methname.startswith('_'):
|
||||
if callable(getattr(cls, methname)):
|
||||
locals_[methname] = _create_op_proxy(methname)
|
||||
else:
|
||||
attr_names.add(methname)
|
||||
|
||||
|
||||
def write_outstream(stream, *text):
|
||||
encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
|
||||
for t in text:
|
||||
if not isinstance(t, binary_type):
|
||||
t = t.encode(encoding, 'replace')
|
||||
t = t.decode(encoding)
|
||||
try:
|
||||
stream.write(t)
|
||||
except IOError:
|
||||
# suppress "broken pipe" errors.
|
||||
# no known way to handle this on Python 3 however
|
||||
# as the exception is "ignored" (noisily) in TextIOWrapper.
|
||||
break
|
||||
|
||||
|
||||
def coerce_resource_to_filename(fname):
|
||||
"""Interpret a filename as either a filesystem location or as a package
|
||||
resource.
|
||||
|
||||
Names that are non absolute paths and contain a colon
|
||||
are interpreted as resources and coerced to a file location.
|
||||
|
||||
"""
|
||||
if not os.path.isabs(fname) and ":" in fname:
|
||||
import pkg_resources
|
||||
fname = pkg_resources.resource_filename(*fname.split(':'))
|
||||
return fname
|
||||
|
||||
|
||||
def status(_statmsg, fn, *arg, **kw):
|
||||
msg(_statmsg + " ...", False)
|
||||
try:
|
||||
ret = fn(*arg, **kw)
|
||||
write_outstream(sys.stdout, " done\n")
|
||||
return ret
|
||||
except:
|
||||
write_outstream(sys.stdout, " FAILED\n")
|
||||
raise
|
||||
|
||||
|
||||
def err(message):
|
||||
log.error(message)
|
||||
msg("FAILED: %s" % message)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def obfuscate_url_pw(u):
|
||||
u = url.make_url(u)
|
||||
if u.password:
|
||||
u.password = 'XXXXX'
|
||||
return str(u)
|
||||
|
||||
|
||||
def asbool(value):
|
||||
return value is not None and \
|
||||
value.lower() == 'true'
|
||||
|
||||
|
||||
def warn(msg):
|
||||
warnings.warn(msg)
|
||||
|
||||
|
||||
def msg(msg, newline=True):
|
||||
if TERMWIDTH is None:
|
||||
write_outstream(sys.stdout, msg)
|
||||
if newline:
|
||||
write_outstream(sys.stdout, "\n")
|
||||
else:
|
||||
# left indent output lines
|
||||
lines = textwrap.wrap(msg, TERMWIDTH)
|
||||
if len(lines) > 1:
|
||||
for line in lines[0:-1]:
|
||||
write_outstream(sys.stdout, " ", line, "\n")
|
||||
write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
|
||||
|
||||
|
||||
def load_python_file(dir_, filename):
|
||||
"""Load a file from the given path as a Python module."""
|
||||
|
||||
module_id = re.sub(r'\W', "_", filename)
|
||||
path = os.path.join(dir_, filename)
|
||||
_, ext = os.path.splitext(filename)
|
||||
if ext == ".py":
|
||||
if os.path.exists(path):
|
||||
module = load_module_py(module_id, path)
|
||||
elif os.path.exists(simple_pyc_file_from_path(path)):
|
||||
# look for sourceless load
|
||||
module = load_module_pyc(
|
||||
module_id, simple_pyc_file_from_path(path))
|
||||
else:
|
||||
raise ImportError("Can't find Python file %s" % path)
|
||||
elif ext in (".pyc", ".pyo"):
|
||||
module = load_module_pyc(module_id, path)
|
||||
del sys.modules[module_id]
|
||||
return module
|
||||
|
||||
|
||||
def simple_pyc_file_from_path(path):
|
||||
"""Given a python source path, return the so-called
|
||||
"sourceless" .pyc or .pyo path.
|
||||
|
||||
This just a .pyc or .pyo file where the .py file would be.
|
||||
|
||||
Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__,
|
||||
this use case remains supported as a so-called "sourceless module import".
|
||||
|
||||
"""
|
||||
if sys.flags.optimize:
|
||||
return path + "o" # e.g. .pyo
|
||||
else:
|
||||
return path + "c" # e.g. .pyc
|
||||
|
||||
|
||||
def pyc_file_from_path(path):
|
||||
"""Given a python source path, locate the .pyc.
|
||||
|
||||
See http://www.python.org/dev/peps/pep-3147/
|
||||
#detecting-pep-3147-availability
|
||||
http://www.python.org/dev/peps/pep-3147/#file-extension-checks
|
||||
|
||||
"""
|
||||
import imp
|
||||
has3147 = hasattr(imp, 'get_tag')
|
||||
if has3147:
|
||||
return imp.cache_from_source(path)
|
||||
else:
|
||||
return simple_pyc_file_from_path(path)
|
||||
|
||||
|
||||
def rev_id():
|
||||
val = int(uuid.uuid4()) % 100000000000000
|
||||
return hex(val)[2:-1]
|
||||
|
||||
|
||||
def to_tuple(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
elif isinstance(x, string_types):
|
||||
return (x, )
|
||||
elif isinstance(x, collections.Iterable):
|
||||
return tuple(x)
|
||||
else:
|
||||
raise ValueError("Don't know how to turn %r into a tuple" % x)
|
||||
|
||||
|
||||
def format_as_comma(value):
|
||||
if value is None:
|
||||
return ""
|
||||
elif isinstance(value, string_types):
|
||||
return value
|
||||
elif isinstance(value, collections.Iterable):
|
||||
return ", ".join(value)
|
||||
else:
|
||||
raise ValueError("Don't know how to comma-format %r" % value)
|
||||
|
||||
|
||||
class memoized_property(object):
|
||||
|
||||
"""A read-only @property that is only evaluated once."""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
obj.__dict__[self.__name__] = result = self.fget(obj)
|
||||
return result
|
||||
|
||||
|
||||
class immutabledict(dict):
|
||||
|
||||
def _immutable(self, *arg, **kw):
|
||||
raise TypeError("%s object is immutable" % self.__class__.__name__)
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = \
|
||||
clear = pop = popitem = setdefault = \
|
||||
update = _immutable
|
||||
|
||||
def __new__(cls, *args):
|
||||
new = dict.__new__(cls)
|
||||
dict.__init__(new, *args)
|
||||
return new
|
||||
|
||||
def __init__(self, *args):
|
||||
pass
|
||||
|
||||
def __reduce__(self):
|
||||
return immutabledict, (dict(self), )
|
||||
|
||||
def union(self, d):
|
||||
if not self:
|
||||
return immutabledict(d)
|
||||
else:
|
||||
d2 = immutabledict(self)
|
||||
dict.update(d2, d)
|
||||
return d2
|
||||
|
||||
def __repr__(self):
|
||||
return "immutabledict(%s)" % dict.__repr__(self)
|
||||
|
||||
|
||||
def _with_legacy_names(translations):
|
||||
def decorate(fn):
|
||||
|
||||
spec = inspect_getfullargspec(fn)
|
||||
metadata = dict(target='target', fn='fn')
|
||||
metadata.update(format_argspec_plus(spec, grouped=False))
|
||||
|
||||
has_keywords = bool(spec[2])
|
||||
|
||||
if not has_keywords:
|
||||
metadata['args'] += ", **kw"
|
||||
metadata['apply_kw'] += ", **kw"
|
||||
|
||||
def go(*arg, **kw):
|
||||
names = set(kw).difference(spec[0])
|
||||
for oldname, newname in translations:
|
||||
if oldname in kw:
|
||||
kw[newname] = kw.pop(oldname)
|
||||
names.discard(oldname)
|
||||
|
||||
warnings.warn(
|
||||
"Argument '%s' is now named '%s' for function '%s'" %
|
||||
(oldname, newname, fn.__name__))
|
||||
if not has_keywords and names:
|
||||
raise TypeError("Unknown arguments: %s" % ", ".join(names))
|
||||
return fn(*arg, **kw)
|
||||
|
||||
code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % (
|
||||
metadata)
|
||||
decorated = eval(code, {"target": go})
|
||||
decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__
|
||||
update_wrapper(decorated, fn)
|
||||
if hasattr(decorated, '__wrapped__'):
|
||||
# update_wrapper in py3k applies __wrapped__, which causes
|
||||
# inspect.getargspec() to ignore the extra arguments on our
|
||||
# wrapper as of Python 3.4. We need this for the
|
||||
# "module class proxy" thing though, so just del the __wrapped__
|
||||
# for now. See #175 as well as bugs.python.org/issue17482
|
||||
del decorated.__wrapped__
|
||||
return decorated
|
||||
|
||||
return decorate
|
20
alembic/util/__init__.py
Normal file
20
alembic/util/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
from .langhelpers import ( # noqa
|
||||
asbool, rev_id, to_tuple, to_list, memoized_property,
|
||||
immutabledict, _with_legacy_names, Dispatcher, ModuleClsProxy)
|
||||
from .messaging import ( # noqa
|
||||
write_outstream, status, err, obfuscate_url_pw, warn, msg, format_as_comma)
|
||||
from .pyfiles import ( # noqa
|
||||
template_to_file, coerce_resource_to_filename, simple_pyc_file_from_path,
|
||||
pyc_file_from_path, load_python_file)
|
||||
from .sqla_compat import ( # noqa
|
||||
sqla_07, sqla_079, sqla_08, sqla_083, sqla_084, sqla_09, sqla_092,
|
||||
sqla_094, sqla_094, sqla_099, sqla_100, sqla_105)
|
||||
|
||||
|
||||
class CommandError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
if not sqla_07:
|
||||
raise CommandError(
|
||||
"SQLAlchemy 0.7.3 or greater is required. ")
|
275
alembic/util/langhelpers.py
Normal file
275
alembic/util/langhelpers.py
Normal file
@ -0,0 +1,275 @@
|
||||
import textwrap
|
||||
import warnings
|
||||
import inspect
|
||||
import uuid
|
||||
import collections
|
||||
|
||||
from .compat import callable, exec_, string_types, with_metaclass
|
||||
|
||||
from sqlalchemy.util import format_argspec_plus, update_wrapper
|
||||
from sqlalchemy.util.compat import inspect_getfullargspec
|
||||
|
||||
|
||||
class _ModuleClsMeta(type):
|
||||
def __setattr__(cls, key, value):
|
||||
super(_ModuleClsMeta, cls).__setattr__(key, value)
|
||||
cls._update_module_proxies(key)
|
||||
|
||||
|
||||
class ModuleClsProxy(with_metaclass(_ModuleClsMeta)):
|
||||
"""Create module level proxy functions for the
|
||||
methods on a given class.
|
||||
|
||||
The functions will have a compatible signature
|
||||
as the methods.
|
||||
|
||||
"""
|
||||
|
||||
_setups = collections.defaultdict(lambda: (set(), []))
|
||||
|
||||
@classmethod
|
||||
def _update_module_proxies(cls, name):
|
||||
attr_names, modules = cls._setups[cls]
|
||||
for globals_, locals_ in modules:
|
||||
cls._add_proxied_attribute(name, globals_, locals_, attr_names)
|
||||
|
||||
def _install_proxy(self):
|
||||
attr_names, modules = self._setups[self.__class__]
|
||||
for globals_, locals_ in modules:
|
||||
globals_['_proxy'] = self
|
||||
for attr_name in attr_names:
|
||||
globals_[attr_name] = getattr(self, attr_name)
|
||||
|
||||
def _remove_proxy(self):
|
||||
attr_names, modules = self._setups[self.__class__]
|
||||
for globals_, locals_ in modules:
|
||||
globals_['_proxy'] = None
|
||||
for attr_name in attr_names:
|
||||
del globals_[attr_name]
|
||||
|
||||
@classmethod
|
||||
def create_module_class_proxy(cls, globals_, locals_):
|
||||
attr_names, modules = cls._setups[cls]
|
||||
modules.append(
|
||||
(globals_, locals_)
|
||||
)
|
||||
cls._setup_proxy(globals_, locals_, attr_names)
|
||||
|
||||
@classmethod
|
||||
def _setup_proxy(cls, globals_, locals_, attr_names):
|
||||
for methname in dir(cls):
|
||||
cls._add_proxied_attribute(methname, globals_, locals_, attr_names)
|
||||
|
||||
@classmethod
|
||||
def _add_proxied_attribute(cls, methname, globals_, locals_, attr_names):
|
||||
if not methname.startswith('_'):
|
||||
meth = getattr(cls, methname)
|
||||
if callable(meth):
|
||||
locals_[methname] = cls._create_method_proxy(
|
||||
methname, globals_, locals_)
|
||||
else:
|
||||
attr_names.add(methname)
|
||||
|
||||
@classmethod
|
||||
def _create_method_proxy(cls, name, globals_, locals_):
|
||||
fn = getattr(cls, name)
|
||||
spec = inspect.getargspec(fn)
|
||||
if spec[0] and spec[0][0] == 'self':
|
||||
spec[0].pop(0)
|
||||
args = inspect.formatargspec(*spec)
|
||||
num_defaults = 0
|
||||
if spec[3]:
|
||||
num_defaults += len(spec[3])
|
||||
name_args = spec[0]
|
||||
if num_defaults:
|
||||
defaulted_vals = name_args[0 - num_defaults:]
|
||||
else:
|
||||
defaulted_vals = ()
|
||||
|
||||
apply_kw = inspect.formatargspec(
|
||||
name_args, spec[1], spec[2],
|
||||
defaulted_vals,
|
||||
formatvalue=lambda x: '=' + x)
|
||||
|
||||
def _name_error(name):
|
||||
raise NameError(
|
||||
"Can't invoke function '%s', as the proxy object has "
|
||||
"not yet been "
|
||||
"established for the Alembic '%s' class. "
|
||||
"Try placing this code inside a callable." % (
|
||||
name, cls.__name__
|
||||
))
|
||||
globals_['_name_error'] = _name_error
|
||||
|
||||
func_text = textwrap.dedent("""\
|
||||
def %(name)s(%(args)s):
|
||||
%(doc)r
|
||||
try:
|
||||
p = _proxy
|
||||
except NameError:
|
||||
_name_error('%(name)s')
|
||||
return _proxy.%(name)s(%(apply_kw)s)
|
||||
e
|
||||
""" % {
|
||||
'name': name,
|
||||
'args': args[1:-1],
|
||||
'apply_kw': apply_kw[1:-1],
|
||||
'doc': fn.__doc__,
|
||||
})
|
||||
lcl = {}
|
||||
exec_(func_text, globals_, lcl)
|
||||
return lcl[name]
|
||||
|
||||
|
||||
def asbool(value):
|
||||
return value is not None and \
|
||||
value.lower() == 'true'
|
||||
|
||||
|
||||
def rev_id():
|
||||
val = int(uuid.uuid4()) % 100000000000000
|
||||
return hex(val)[2:-1]
|
||||
|
||||
|
||||
def to_list(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
elif isinstance(x, string_types):
|
||||
return [x]
|
||||
elif isinstance(x, collections.Iterable):
|
||||
return list(x)
|
||||
else:
|
||||
raise ValueError("Don't know how to turn %r into a list" % x)
|
||||
|
||||
|
||||
def to_tuple(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
elif isinstance(x, string_types):
|
||||
return (x, )
|
||||
elif isinstance(x, collections.Iterable):
|
||||
return tuple(x)
|
||||
else:
|
||||
raise ValueError("Don't know how to turn %r into a tuple" % x)
|
||||
|
||||
|
||||
class memoized_property(object):
|
||||
|
||||
"""A read-only @property that is only evaluated once."""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
obj.__dict__[self.__name__] = result = self.fget(obj)
|
||||
return result
|
||||
|
||||
|
||||
class immutabledict(dict):
|
||||
|
||||
def _immutable(self, *arg, **kw):
|
||||
raise TypeError("%s object is immutable" % self.__class__.__name__)
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = \
|
||||
clear = pop = popitem = setdefault = \
|
||||
update = _immutable
|
||||
|
||||
def __new__(cls, *args):
|
||||
new = dict.__new__(cls)
|
||||
dict.__init__(new, *args)
|
||||
return new
|
||||
|
||||
def __init__(self, *args):
|
||||
pass
|
||||
|
||||
def __reduce__(self):
|
||||
return immutabledict, (dict(self), )
|
||||
|
||||
def union(self, d):
|
||||
if not self:
|
||||
return immutabledict(d)
|
||||
else:
|
||||
d2 = immutabledict(self)
|
||||
dict.update(d2, d)
|
||||
return d2
|
||||
|
||||
def __repr__(self):
|
||||
return "immutabledict(%s)" % dict.__repr__(self)
|
||||
|
||||
|
||||
def _with_legacy_names(translations):
|
||||
def decorate(fn):
|
||||
|
||||
spec = inspect_getfullargspec(fn)
|
||||
metadata = dict(target='target', fn='fn')
|
||||
metadata.update(format_argspec_plus(spec, grouped=False))
|
||||
|
||||
has_keywords = bool(spec[2])
|
||||
|
||||
if not has_keywords:
|
||||
metadata['args'] += ", **kw"
|
||||
metadata['apply_kw'] += ", **kw"
|
||||
|
||||
def go(*arg, **kw):
|
||||
names = set(kw).difference(spec[0])
|
||||
for oldname, newname in translations:
|
||||
if oldname in kw:
|
||||
kw[newname] = kw.pop(oldname)
|
||||
names.discard(oldname)
|
||||
|
||||
warnings.warn(
|
||||
"Argument '%s' is now named '%s' for function '%s'" %
|
||||
(oldname, newname, fn.__name__))
|
||||
if not has_keywords and names:
|
||||
raise TypeError("Unknown arguments: %s" % ", ".join(names))
|
||||
return fn(*arg, **kw)
|
||||
|
||||
code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % (
|
||||
metadata)
|
||||
decorated = eval(code, {"target": go})
|
||||
decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__
|
||||
update_wrapper(decorated, fn)
|
||||
if hasattr(decorated, '__wrapped__'):
|
||||
# update_wrapper in py3k applies __wrapped__, which causes
|
||||
# inspect.getargspec() to ignore the extra arguments on our
|
||||
# wrapper as of Python 3.4. We need this for the
|
||||
# "module class proxy" thing though, so just del the __wrapped__
|
||||
# for now. See #175 as well as bugs.python.org/issue17482
|
||||
del decorated.__wrapped__
|
||||
return decorated
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
class Dispatcher(object):
|
||||
def __init__(self):
|
||||
self._registry = {}
|
||||
|
||||
def dispatch_for(self, target, qualifier='default'):
|
||||
def decorate(fn):
|
||||
assert isinstance(target, type)
|
||||
assert target not in self._registry
|
||||
self._registry[(target, qualifier)] = fn
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
def dispatch(self, obj, qualifier='default'):
|
||||
for spcls in type(obj).__mro__:
|
||||
if qualifier != 'default' and (spcls, qualifier) in self._registry:
|
||||
return self._registry[(spcls, qualifier)]
|
||||
elif (spcls, 'default') in self._registry:
|
||||
return self._registry[(spcls, 'default')]
|
||||
else:
|
||||
raise ValueError("no dispatch function for object: %s" % obj)
|
||||
|
||||
def branch(self):
|
||||
"""Return a copy of this dispatcher that is independently
|
||||
writable."""
|
||||
|
||||
d = Dispatcher()
|
||||
d._registry.update(self._registry)
|
||||
return d
|
94
alembic/util/messaging.py
Normal file
94
alembic/util/messaging.py
Normal file
@ -0,0 +1,94 @@
|
||||
from .compat import py27, binary_type, string_types
|
||||
import sys
|
||||
from sqlalchemy.engine import url
|
||||
import warnings
|
||||
import textwrap
|
||||
import collections
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
if py27:
|
||||
# disable "no handler found" errors
|
||||
logging.getLogger('alembic').addHandler(logging.NullHandler())
|
||||
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ,
|
||||
struct.pack('HHHH', 0, 0, 0, 0))
|
||||
_h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl)
|
||||
if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
|
||||
TERMWIDTH = None
|
||||
except (ImportError, IOError):
|
||||
TERMWIDTH = None
|
||||
|
||||
|
||||
def write_outstream(stream, *text):
|
||||
encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
|
||||
for t in text:
|
||||
if not isinstance(t, binary_type):
|
||||
t = t.encode(encoding, 'replace')
|
||||
t = t.decode(encoding)
|
||||
try:
|
||||
stream.write(t)
|
||||
except IOError:
|
||||
# suppress "broken pipe" errors.
|
||||
# no known way to handle this on Python 3 however
|
||||
# as the exception is "ignored" (noisily) in TextIOWrapper.
|
||||
break
|
||||
|
||||
|
||||
def status(_statmsg, fn, *arg, **kw):
|
||||
msg(_statmsg + " ...", False)
|
||||
try:
|
||||
ret = fn(*arg, **kw)
|
||||
write_outstream(sys.stdout, " done\n")
|
||||
return ret
|
||||
except:
|
||||
write_outstream(sys.stdout, " FAILED\n")
|
||||
raise
|
||||
|
||||
|
||||
def err(message):
|
||||
log.error(message)
|
||||
msg("FAILED: %s" % message)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def obfuscate_url_pw(u):
|
||||
u = url.make_url(u)
|
||||
if u.password:
|
||||
u.password = 'XXXXX'
|
||||
return str(u)
|
||||
|
||||
|
||||
def warn(msg):
|
||||
warnings.warn(msg)
|
||||
|
||||
|
||||
def msg(msg, newline=True):
|
||||
if TERMWIDTH is None:
|
||||
write_outstream(sys.stdout, msg)
|
||||
if newline:
|
||||
write_outstream(sys.stdout, "\n")
|
||||
else:
|
||||
# left indent output lines
|
||||
lines = textwrap.wrap(msg, TERMWIDTH)
|
||||
if len(lines) > 1:
|
||||
for line in lines[0:-1]:
|
||||
write_outstream(sys.stdout, " ", line, "\n")
|
||||
write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
|
||||
|
||||
|
||||
def format_as_comma(value):
|
||||
if value is None:
|
||||
return ""
|
||||
elif isinstance(value, string_types):
|
||||
return value
|
||||
elif isinstance(value, collections.Iterable):
|
||||
return ", ".join(value)
|
||||
else:
|
||||
raise ValueError("Don't know how to comma-format %r" % value)
|
80
alembic/util/pyfiles.py
Normal file
80
alembic/util/pyfiles.py
Normal file
@ -0,0 +1,80 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from .compat import load_module_py, load_module_pyc
|
||||
from mako.template import Template
|
||||
|
||||
|
||||
def template_to_file(template_file, dest, output_encoding, **kw):
|
||||
with open(dest, 'wb') as f:
|
||||
template = Template(filename=template_file)
|
||||
f.write(
|
||||
template.render_unicode(**kw).encode(output_encoding)
|
||||
)
|
||||
|
||||
|
||||
def coerce_resource_to_filename(fname):
|
||||
"""Interpret a filename as either a filesystem location or as a package
|
||||
resource.
|
||||
|
||||
Names that are non absolute paths and contain a colon
|
||||
are interpreted as resources and coerced to a file location.
|
||||
|
||||
"""
|
||||
if not os.path.isabs(fname) and ":" in fname:
|
||||
import pkg_resources
|
||||
fname = pkg_resources.resource_filename(*fname.split(':'))
|
||||
return fname
|
||||
|
||||
|
||||
def simple_pyc_file_from_path(path):
|
||||
"""Given a python source path, return the so-called
|
||||
"sourceless" .pyc or .pyo path.
|
||||
|
||||
This just a .pyc or .pyo file where the .py file would be.
|
||||
|
||||
Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__,
|
||||
this use case remains supported as a so-called "sourceless module import".
|
||||
|
||||
"""
|
||||
if sys.flags.optimize:
|
||||
return path + "o" # e.g. .pyo
|
||||
else:
|
||||
return path + "c" # e.g. .pyc
|
||||
|
||||
|
||||
def pyc_file_from_path(path):
|
||||
"""Given a python source path, locate the .pyc.
|
||||
|
||||
See http://www.python.org/dev/peps/pep-3147/
|
||||
#detecting-pep-3147-availability
|
||||
http://www.python.org/dev/peps/pep-3147/#file-extension-checks
|
||||
|
||||
"""
|
||||
import imp
|
||||
has3147 = hasattr(imp, 'get_tag')
|
||||
if has3147:
|
||||
return imp.cache_from_source(path)
|
||||
else:
|
||||
return simple_pyc_file_from_path(path)
|
||||
|
||||
|
||||
def load_python_file(dir_, filename):
|
||||
"""Load a file from the given path as a Python module."""
|
||||
|
||||
module_id = re.sub(r'\W', "_", filename)
|
||||
path = os.path.join(dir_, filename)
|
||||
_, ext = os.path.splitext(filename)
|
||||
if ext == ".py":
|
||||
if os.path.exists(path):
|
||||
module = load_module_py(module_id, path)
|
||||
elif os.path.exists(simple_pyc_file_from_path(path)):
|
||||
# look for sourceless load
|
||||
module = load_module_pyc(
|
||||
module_id, simple_pyc_file_from_path(path))
|
||||
else:
|
||||
raise ImportError("Can't find Python file %s" % path)
|
||||
elif ext in (".pyc", ".pyo"):
|
||||
module = load_module_pyc(module_id, path)
|
||||
del sys.modules[module_id]
|
||||
return module
|
160
alembic/util/sqla_compat.py
Normal file
160
alembic/util/sqla_compat.py
Normal file
@ -0,0 +1,160 @@
|
||||
import re
|
||||
from sqlalchemy import __version__
|
||||
from sqlalchemy.schema import ForeignKeyConstraint, CheckConstraint, Column
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import schema, sql
|
||||
from sqlalchemy.sql.visitors import traverse
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.sql.expression import _BindParamClause
|
||||
from . import compat
|
||||
|
||||
|
||||
def _safe_int(value):
|
||||
try:
|
||||
return int(value)
|
||||
except:
|
||||
return value
|
||||
_vers = tuple(
|
||||
[_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)])
|
||||
sqla_07 = _vers > (0, 7, 2)
|
||||
sqla_079 = _vers >= (0, 7, 9)
|
||||
sqla_08 = _vers >= (0, 8, 0)
|
||||
sqla_083 = _vers >= (0, 8, 3)
|
||||
sqla_084 = _vers >= (0, 8, 4)
|
||||
sqla_09 = _vers >= (0, 9, 0)
|
||||
sqla_092 = _vers >= (0, 9, 2)
|
||||
sqla_094 = _vers >= (0, 9, 4)
|
||||
sqla_094 = _vers >= (0, 9, 4)
|
||||
sqla_099 = _vers >= (0, 9, 9)
|
||||
sqla_100 = _vers >= (1, 0, 0)
|
||||
sqla_105 = _vers >= (1, 0, 5)
|
||||
|
||||
if sqla_08:
|
||||
from sqlalchemy.sql.expression import TextClause
|
||||
else:
|
||||
from sqlalchemy.sql.expression import _TextClause as TextClause
|
||||
|
||||
|
||||
def _table_for_constraint(constraint):
|
||||
if isinstance(constraint, ForeignKeyConstraint):
|
||||
return constraint.parent
|
||||
else:
|
||||
return constraint.table
|
||||
|
||||
|
||||
def _columns_for_constraint(constraint):
|
||||
if isinstance(constraint, ForeignKeyConstraint):
|
||||
return [fk.parent for fk in constraint.elements]
|
||||
elif isinstance(constraint, CheckConstraint):
|
||||
return _find_columns(constraint.sqltext)
|
||||
else:
|
||||
return list(constraint.columns)
|
||||
|
||||
|
||||
def _fk_spec(constraint):
|
||||
if sqla_100:
|
||||
source_columns = [
|
||||
constraint.columns[key].name for key in constraint.column_keys]
|
||||
else:
|
||||
source_columns = [
|
||||
element.parent.name for element in constraint.elements]
|
||||
|
||||
source_table = constraint.parent.name
|
||||
source_schema = constraint.parent.schema
|
||||
target_schema = constraint.elements[0].column.table.schema
|
||||
target_table = constraint.elements[0].column.table.name
|
||||
target_columns = [element.column.name for element in constraint.elements]
|
||||
|
||||
return (
|
||||
source_schema, source_table,
|
||||
source_columns, target_schema, target_table, target_columns)
|
||||
|
||||
|
||||
def _is_type_bound(constraint):
|
||||
# this deals with SQLAlchemy #3260, don't copy CHECK constraints
|
||||
# that will be generated by the type.
|
||||
if sqla_100:
|
||||
# new feature added for #3260
|
||||
return constraint._type_bound
|
||||
else:
|
||||
# old way, look at what we know Boolean/Enum to use
|
||||
return (
|
||||
constraint._create_rule is not None and
|
||||
isinstance(
|
||||
getattr(constraint._create_rule, "target", None),
|
||||
sqltypes.SchemaType)
|
||||
)
|
||||
|
||||
|
||||
def _find_columns(clause):
|
||||
"""locate Column objects within the given expression."""
|
||||
|
||||
cols = set()
|
||||
traverse(clause, {}, {'column': cols.add})
|
||||
return cols
|
||||
|
||||
|
||||
def _textual_index_column(table, text_):
|
||||
"""a workaround for the Index construct's severe lack of flexibility"""
|
||||
if isinstance(text_, compat.string_types):
|
||||
c = Column(text_, sqltypes.NULLTYPE)
|
||||
table.append_column(c)
|
||||
return c
|
||||
elif isinstance(text_, TextClause):
|
||||
return _textual_index_element(table, text_)
|
||||
else:
|
||||
raise ValueError("String or text() construct expected")
|
||||
|
||||
|
||||
class _textual_index_element(sql.ColumnElement):
|
||||
"""Wrap around a sqlalchemy text() construct in such a way that
|
||||
we appear like a column-oriented SQL expression to an Index
|
||||
construct.
|
||||
|
||||
The issue here is that currently the Postgresql dialect, the biggest
|
||||
recipient of functional indexes, keys all the index expressions to
|
||||
the corresponding column expressions when rendering CREATE INDEX,
|
||||
so the Index we create here needs to have a .columns collection that
|
||||
is the same length as the .expressions collection. Ultimately
|
||||
SQLAlchemy should support text() expressions in indexes.
|
||||
|
||||
See https://bitbucket.org/zzzeek/sqlalchemy/issue/3174/\
|
||||
support-text-sent-to-indexes
|
||||
|
||||
"""
|
||||
__visit_name__ = '_textual_idx_element'
|
||||
|
||||
def __init__(self, table, text):
|
||||
self.table = table
|
||||
self.text = text
|
||||
self.key = text.text
|
||||
self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE)
|
||||
table.append_column(self.fake_column)
|
||||
|
||||
def get_children(self):
|
||||
return [self.fake_column]
|
||||
|
||||
|
||||
@compiles(_textual_index_element)
|
||||
def _render_textual_index_column(element, compiler, **kw):
|
||||
return compiler.process(element.text, **kw)
|
||||
|
||||
|
||||
class _literal_bindparam(_BindParamClause):
|
||||
pass
|
||||
|
||||
|
||||
@compiles(_literal_bindparam)
|
||||
def _render_literal_bindparam(element, compiler, **kw):
|
||||
return compiler.render_literal_bindparam(element, **kw)
|
||||
|
||||
|
||||
def _get_index_expressions(idx):
|
||||
if sqla_08:
|
||||
return list(idx.expressions)
|
||||
else:
|
||||
return list(idx.columns)
|
||||
|
||||
|
||||
def _get_index_column_names(idx):
|
||||
return [getattr(exp, "name", None) for exp in _get_index_expressions(idx)]
|
217
docs/build/api.rst
vendored
217
docs/build/api.rst
vendored
@ -1,217 +0,0 @@
|
||||
.. _api:
|
||||
|
||||
===========
|
||||
API Details
|
||||
===========
|
||||
|
||||
This section describes some key functions used within the migration process, particularly those referenced within
|
||||
a migration environment's ``env.py`` file.
|
||||
|
||||
Overview
|
||||
========
|
||||
|
||||
The three main objects in use are the :class:`.EnvironmentContext`, :class:`.MigrationContext`,
|
||||
and :class:`.Operations` classes, pictured below.
|
||||
|
||||
.. image:: api_overview.png
|
||||
|
||||
An Alembic command begins by instantiating an :class:`.EnvironmentContext` object, then
|
||||
making it available via the ``alembic.context`` proxy module. The ``env.py``
|
||||
script, representing a user-configurable migration environment, is then
|
||||
invoked. The ``env.py`` script is then responsible for calling upon the
|
||||
:meth:`.EnvironmentContext.configure`, whose job it is to create
|
||||
a :class:`.MigrationContext` object.
|
||||
|
||||
Before this method is called, there's not
|
||||
yet any database connection or dialect-specific state set up. While
|
||||
many methods on :class:`.EnvironmentContext` are usable at this stage,
|
||||
those which require database access, or at least access to the kind
|
||||
of database dialect in use, are not. Once the
|
||||
:meth:`.EnvironmentContext.configure` method is called, the :class:`.EnvironmentContext`
|
||||
is said to be *configured* with database connectivity, available via
|
||||
a new :class:`.MigrationContext` object. The :class:`.MigrationContext`
|
||||
is associated with the :class:`.EnvironmentContext` object
|
||||
via the :meth:`.EnvironmentContext.get_context` method.
|
||||
|
||||
Finally, ``env.py`` calls upon the :meth:`.EnvironmentContext.run_migrations`
|
||||
method. Within this method, a new :class:`.Operations` object, which
|
||||
provides an API for individual database migration operations, is established
|
||||
within the ``alembic.op`` proxy module. The :class:`.Operations` object
|
||||
uses the :class:`.MigrationContext` object ultimately as a source of
|
||||
database connectivity, though in such a way that it does not care if the
|
||||
:class:`.MigrationContext` is talking to a real database or just writing
|
||||
out SQL to a file.
|
||||
|
||||
The Environment Context
|
||||
=======================
|
||||
|
||||
The :class:`.EnvironmentContext` class provides most of the
|
||||
API used within an ``env.py`` script. Within ``env.py``,
|
||||
the instantated :class:`.EnvironmentContext` is made available
|
||||
via a special *proxy module* called ``alembic.context``. That is,
|
||||
you can import ``alembic.context`` like a regular Python module,
|
||||
and each name you call upon it is ultimately routed towards the
|
||||
current :class:`.EnvironmentContext` in use.
|
||||
|
||||
In particular, the key method used within ``env.py`` is :meth:`.EnvironmentContext.configure`,
|
||||
which establishes all the details about how the database will be accessed.
|
||||
|
||||
.. automodule:: alembic.environment
|
||||
:members:
|
||||
|
||||
The Migration Context
|
||||
=====================
|
||||
|
||||
.. automodule:: alembic.migration
|
||||
:members:
|
||||
|
||||
The Operations Object
|
||||
=====================
|
||||
|
||||
Within migration scripts, actual database migration operations are handled
|
||||
via an instance of :class:`.Operations`. See :ref:`ops` for an overview
|
||||
of this object.
|
||||
|
||||
Commands
|
||||
=========
|
||||
|
||||
Alembic commands are all represented by functions in the :mod:`alembic.command`
|
||||
package. They all accept the same style of usage, being sent
|
||||
the :class:`~.alembic.config.Config` object as the first argument.
|
||||
|
||||
Commands can be run programmatically, by first constructing a :class:`.Config`
|
||||
object, as in::
|
||||
|
||||
from alembic.config import Config
|
||||
from alembic import command
|
||||
alembic_cfg = Config("/path/to/yourapp/alembic.ini")
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
In many cases, and perhaps more often than not, an application will wish
|
||||
to call upon a series of Alembic commands and/or other features. It is
|
||||
usually a good idea to link multiple commands along a single connection
|
||||
and transaction, if feasible. This can be achieved using the
|
||||
:attr:`.Config.attributes` dictionary in order to share a connection::
|
||||
|
||||
with engine.begin() as connection:
|
||||
alembic_cfg.attributes['connection'] = connection
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
This recipe requires that ``env.py`` consumes this connection argument;
|
||||
see the example in :ref:`connection_sharing` for details.
|
||||
|
||||
To write small API functions that make direct use of database and script directory
|
||||
information, rather than just running one of the built-in commands,
|
||||
use the :class:`.ScriptDirectory` and :class:`.MigrationContext`
|
||||
classes directly.
|
||||
|
||||
.. currentmodule:: alembic.command
|
||||
|
||||
.. automodule:: alembic.command
|
||||
:members:
|
||||
|
||||
Configuration
|
||||
==============
|
||||
|
||||
The :class:`.Config` object represents the configuration
|
||||
passed to the Alembic environment. From an API usage perspective,
|
||||
it is needed for the following use cases:
|
||||
|
||||
* to create a :class:`.ScriptDirectory`, which allows you to work
|
||||
with the actual script files in a migration environment
|
||||
* to create an :class:`.EnvironmentContext`, which allows you to
|
||||
actually run the ``env.py`` module within the migration environment
|
||||
* to programatically run any of the commands in the :mod:`alembic.command`
|
||||
module.
|
||||
|
||||
The :class:`.Config` is *not* needed for these cases:
|
||||
|
||||
* to instantiate a :class:`.MigrationContext` directly - this object
|
||||
only needs a SQLAlchemy connection or dialect name.
|
||||
* to instantiate a :class:`.Operations` object - this object only
|
||||
needs a :class:`.MigrationContext`.
|
||||
|
||||
.. currentmodule:: alembic.config
|
||||
|
||||
.. automodule:: alembic.config
|
||||
:members:
|
||||
|
||||
Script Directory
|
||||
================
|
||||
|
||||
The :class:`.ScriptDirectory` object provides programmatic access
|
||||
to the Alembic version files present in the filesystem.
|
||||
|
||||
.. automodule:: alembic.script
|
||||
:members:
|
||||
|
||||
Revision
|
||||
========
|
||||
|
||||
The :class:`.RevisionMap` object serves as the basis for revision
|
||||
management, used exclusively by :class:`.ScriptDirectory`.
|
||||
|
||||
.. automodule:: alembic.revision
|
||||
:members:
|
||||
|
||||
Autogeneration
|
||||
==============
|
||||
|
||||
Alembic 0.3 introduces a small portion of the autogeneration system
|
||||
as a public API.
|
||||
|
||||
.. autofunction:: alembic.autogenerate.compare_metadata
|
||||
|
||||
DDL Internals
|
||||
=============
|
||||
|
||||
These are some of the constructs used to generate migration
|
||||
instructions. The APIs here build off of the :class:`sqlalchemy.schema.DDLElement`
|
||||
and :mod:`sqlalchemy.ext.compiler` systems.
|
||||
|
||||
For programmatic usage of Alembic's migration directives, the easiest
|
||||
route is to use the higher level functions given by :mod:`alembic.operations`.
|
||||
|
||||
.. automodule:: alembic.ddl
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
.. automodule:: alembic.ddl.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
.. automodule:: alembic.ddl.impl
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
MySQL
|
||||
-----
|
||||
|
||||
.. automodule:: alembic.ddl.mysql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
MS-SQL
|
||||
------
|
||||
|
||||
.. automodule:: alembic.ddl.mssql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Postgresql
|
||||
----------
|
||||
|
||||
.. automodule:: alembic.ddl.postgresql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
SQLite
|
||||
------
|
||||
|
||||
.. automodule:: alembic.ddl.sqlite
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
BIN
docs/build/api/api_overview.png
vendored
Normal file
BIN
docs/build/api/api_overview.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 121 KiB |
235
docs/build/api/autogenerate.rst
vendored
Normal file
235
docs/build/api/autogenerate.rst
vendored
Normal file
@ -0,0 +1,235 @@
|
||||
.. _alembic.autogenerate.toplevel:
|
||||
|
||||
==============
|
||||
Autogeneration
|
||||
==============
|
||||
|
||||
The autogenerate system has two areas of API that are public:
|
||||
|
||||
1. The ability to do a "diff" of a :class:`~sqlalchemy.schema.MetaData` object against
|
||||
a database, and receive a data structure back. This structure
|
||||
is available either as a rudimentary list of changes, or as
|
||||
a :class:`.MigrateOperation` structure.
|
||||
|
||||
2. The ability to alter how the ``alembic revision`` command generates
|
||||
revision scripts, including support for multiple revision scripts
|
||||
generated in one pass.
|
||||
|
||||
Getting Diffs
|
||||
==============
|
||||
|
||||
.. autofunction:: alembic.autogenerate.compare_metadata
|
||||
|
||||
.. autofunction:: alembic.autogenerate.produce_migrations
|
||||
|
||||
.. _customizing_revision:
|
||||
|
||||
Customizing Revision Generation
|
||||
==========================================
|
||||
|
||||
.. versionadded:: 0.8.0 - the ``alembic revision`` system is now customizable.
|
||||
|
||||
The ``alembic revision`` command, also available programmatically
|
||||
via :func:`.command.revision`, essentially produces a single migration
|
||||
script after being run. Whether or not the ``--autogenerate`` option
|
||||
was specified basically determines if this script is a blank revision
|
||||
script with empty ``upgrade()`` and ``downgrade()`` functions, or was
|
||||
produced with alembic operation directives as the result of autogenerate.
|
||||
|
||||
In either case, the system creates a full plan of what is to be done
|
||||
in the form of a :class:`.MigrateOperation` structure, which is then
|
||||
used to produce the script.
|
||||
|
||||
For example, suppose we ran ``alembic revision --autogenerate``, and the
|
||||
end result was that it produced a new revision ``'eced083f5df'``
|
||||
with the following contents::
|
||||
|
||||
"""create the organization table."""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'eced083f5df'
|
||||
down_revision = 'beafc7d709f'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'organization',
|
||||
sa.Column('id', sa.Integer(), primary_key=True),
|
||||
sa.Column('name', sa.String(50), nullable=False)
|
||||
)
|
||||
op.add_column(
|
||||
'user',
|
||||
sa.Column('organization_id', sa.Integer())
|
||||
)
|
||||
op.create_foreign_key(
|
||||
'org_fk', 'user', 'organization', ['organization_id'], ['id']
|
||||
)
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint('org_fk', 'user')
|
||||
op.drop_column('user', 'organization_id')
|
||||
op.drop_table('organization')
|
||||
|
||||
The above script is generated by a :class:`.MigrateOperation` structure
|
||||
that looks like this::
|
||||
|
||||
from alembic.operations import ops
|
||||
import sqlalchemy as sa
|
||||
|
||||
migration_script = ops.MigrationScript(
|
||||
'eced083f5df',
|
||||
ops.UpgradeOps(
|
||||
ops=[
|
||||
ops.CreateTableOp(
|
||||
'organization',
|
||||
[
|
||||
sa.Column('id', sa.Integer(), primary_key=True),
|
||||
sa.Column('name', sa.String(50), nullable=False)
|
||||
]
|
||||
),
|
||||
ops.ModifyTableOps(
|
||||
'user',
|
||||
ops=[
|
||||
ops.AddColumnOp(
|
||||
'user',
|
||||
sa.Column('organization_id', sa.Integer())
|
||||
),
|
||||
ops.CreateForeignKeyOp(
|
||||
'org_fk', 'user', 'organization',
|
||||
['organization_id'], ['id']
|
||||
)
|
||||
]
|
||||
)
|
||||
]
|
||||
),
|
||||
ops.DowngradeOps(
|
||||
ops=[
|
||||
ops.ModifyTableOps(
|
||||
'user',
|
||||
ops=[
|
||||
ops.DropConstraintOp('org_fk', 'user'),
|
||||
ops.DropColumnOp('user', 'organization_id')
|
||||
]
|
||||
),
|
||||
ops.DropTableOp('organization')
|
||||
]
|
||||
),
|
||||
message='create the organization table.'
|
||||
)
|
||||
|
||||
When we deal with a :class:`.MigrationScript` structure, we can render
|
||||
the upgrade/downgrade sections into strings for debugging purposes
|
||||
using the :func:`.render_python_code` helper function::
|
||||
|
||||
from alembic.autogenerate import render_python_code
|
||||
print(render_python_code(migration_script.upgrade_ops))
|
||||
|
||||
Renders::
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('organization',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.add_column('user', sa.Column('organization_id', sa.Integer(), nullable=True))
|
||||
op.create_foreign_key('org_fk', 'user', 'organization', ['organization_id'], ['id'])
|
||||
### end Alembic commands ###
|
||||
|
||||
Given that structures like the above are used to generate new revision
|
||||
files, and that we'd like to be able to alter these as they are created,
|
||||
we then need a system to access this structure when the
|
||||
:func:`.command.revision` command is used. The
|
||||
:paramref:`.EnvironmentContext.configure.process_revision_directives`
|
||||
parameter gives us a way to alter this. This is a function that
|
||||
is passed the above structure as generated by Alembic, giving us a chance
|
||||
to alter it.
|
||||
For example, if we wanted to put all the "upgrade" operations into
|
||||
a certain branch, and we wanted our script to not have any "downgrade"
|
||||
operations at all, we could build an extension as follows, illustrated
|
||||
within an ``env.py`` script::
|
||||
|
||||
def process_revision_directives(context, revision, directives):
|
||||
script = directives[0]
|
||||
|
||||
# set specific branch
|
||||
script.head = "mybranch@head"
|
||||
|
||||
# erase downgrade operations
|
||||
script.downgrade_ops.ops[:] = []
|
||||
|
||||
# ...
|
||||
|
||||
def run_migrations_online():
|
||||
|
||||
# ...
|
||||
with engine.connect() as connection:
|
||||
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
Above, the ``directives`` argument is a Python list. We may alter the
|
||||
given structure within this list in-place, or replace it with a new
|
||||
structure consisting of zero or more :class:`.MigrationScript` directives.
|
||||
The :func:`.command.revision` command will then produce scripts corresponding
|
||||
to whatever is in this list.
|
||||
|
||||
.. autofunction:: alembic.autogenerate.render_python_code
|
||||
|
||||
Autogenerating Custom Operation Directives
|
||||
==========================================
|
||||
|
||||
In the section :ref:`operation_plugins`, we talked about adding new
|
||||
subclasses of :class:`.MigrateOperation` in order to add new ``op.``
|
||||
directives. In the preceding section :ref:`customizing_revision`, we
|
||||
also learned that these same :class:`.MigrateOperation` structures are at
|
||||
the base of how the autogenerate system knows what Python code to render.
|
||||
How to connect these two systems, so that our own custom operation
|
||||
directives can be used? First off, we'd probably be implementing
|
||||
a :paramref:`.EnvironmentContext.configure.process_revision_directives`
|
||||
plugin as described previously, so that we can add our own directives
|
||||
to the autogenerate stream. What if we wanted to add our ``CreateSequenceOp``
|
||||
to the autogenerate structure? We basically need to define an autogenerate
|
||||
renderer for it, as follows::
|
||||
|
||||
# note: this is a continuation of the example from the
|
||||
# "Operation Plugins" section
|
||||
|
||||
from alembic.autogenerate import renderers
|
||||
|
||||
@renderers.dispatch_for(CreateSequenceOp)
|
||||
def render_create_sequence(autogen_context, op):
|
||||
return "op.create_sequence(%r, **%r)" % (
|
||||
op.sequence_name,
|
||||
op.kw
|
||||
)
|
||||
|
||||
With our render function established, we can our ``CreateSequenceOp``
|
||||
generated in an autogenerate context using the :func:`.render_python_code`
|
||||
debugging function in conjunction with an :class:`.UpgradeOps` structure::
|
||||
|
||||
from alembic.operations import ops
|
||||
from alembic.autogenerate import render_python_code
|
||||
|
||||
upgrade_ops = ops.UpgradeOps(
|
||||
ops=[
|
||||
CreateSequenceOp("my_seq")
|
||||
]
|
||||
)
|
||||
|
||||
print(render_python_code(upgrade_ops))
|
||||
|
||||
Which produces::
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_sequence('my_seq', **{})
|
||||
### end Alembic commands ###
|
||||
|
38
docs/build/api/commands.rst
vendored
Normal file
38
docs/build/api/commands.rst
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
.. _alembic.command.toplevel:
|
||||
|
||||
=========
|
||||
Commands
|
||||
=========
|
||||
|
||||
Alembic commands are all represented by functions in the :ref:`alembic.command.toplevel`
|
||||
package. They all accept the same style of usage, being sent
|
||||
the :class:`.Config` object as the first argument.
|
||||
|
||||
Commands can be run programmatically, by first constructing a :class:`.Config`
|
||||
object, as in::
|
||||
|
||||
from alembic.config import Config
|
||||
from alembic import command
|
||||
alembic_cfg = Config("/path/to/yourapp/alembic.ini")
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
In many cases, and perhaps more often than not, an application will wish
|
||||
to call upon a series of Alembic commands and/or other features. It is
|
||||
usually a good idea to link multiple commands along a single connection
|
||||
and transaction, if feasible. This can be achieved using the
|
||||
:attr:`.Config.attributes` dictionary in order to share a connection::
|
||||
|
||||
with engine.begin() as connection:
|
||||
alembic_cfg.attributes['connection'] = connection
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
This recipe requires that ``env.py`` consumes this connection argument;
|
||||
see the example in :ref:`connection_sharing` for details.
|
||||
|
||||
To write small API functions that make direct use of database and script directory
|
||||
information, rather than just running one of the built-in commands,
|
||||
use the :class:`.ScriptDirectory` and :class:`.MigrationContext`
|
||||
classes directly.
|
||||
|
||||
.. automodule:: alembic.command
|
||||
:members:
|
26
docs/build/api/config.rst
vendored
Normal file
26
docs/build/api/config.rst
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
.. _alembic.config.toplevel:
|
||||
|
||||
==============
|
||||
Configuration
|
||||
==============
|
||||
|
||||
The :class:`.Config` object represents the configuration
|
||||
passed to the Alembic environment. From an API usage perspective,
|
||||
it is needed for the following use cases:
|
||||
|
||||
* to create a :class:`.ScriptDirectory`, which allows you to work
|
||||
with the actual script files in a migration environment
|
||||
* to create an :class:`.EnvironmentContext`, which allows you to
|
||||
actually run the ``env.py`` module within the migration environment
|
||||
* to programatically run any of the commands in the :ref:`alembic.command.toplevel`
|
||||
module.
|
||||
|
||||
The :class:`.Config` is *not* needed for these cases:
|
||||
|
||||
* to instantiate a :class:`.MigrationContext` directly - this object
|
||||
only needs a SQLAlchemy connection or dialect name.
|
||||
* to instantiate a :class:`.Operations` object - this object only
|
||||
needs a :class:`.MigrationContext`.
|
||||
|
||||
.. automodule:: alembic.config
|
||||
:members:
|
56
docs/build/api/ddl.rst
vendored
Normal file
56
docs/build/api/ddl.rst
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
.. _alembic.ddl.toplevel:
|
||||
|
||||
=============
|
||||
DDL Internals
|
||||
=============
|
||||
|
||||
These are some of the constructs used to generate migration
|
||||
instructions. The APIs here build off of the :class:`sqlalchemy.schema.DDLElement`
|
||||
and :ref:`sqlalchemy.ext.compiler_toplevel` systems.
|
||||
|
||||
For programmatic usage of Alembic's migration directives, the easiest
|
||||
route is to use the higher level functions given by :ref:`alembic.operations.toplevel`.
|
||||
|
||||
.. automodule:: alembic.ddl
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
.. automodule:: alembic.ddl.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
.. automodule:: alembic.ddl.impl
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
MySQL
|
||||
=============
|
||||
|
||||
.. automodule:: alembic.ddl.mysql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
MS-SQL
|
||||
=============
|
||||
|
||||
.. automodule:: alembic.ddl.mssql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Postgresql
|
||||
=============
|
||||
|
||||
.. automodule:: alembic.ddl.postgresql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
SQLite
|
||||
=============
|
||||
|
||||
.. automodule:: alembic.ddl.sqlite
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
19
docs/build/api/environment.rst
vendored
Normal file
19
docs/build/api/environment.rst
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
.. _alembic.runtime.environment.toplevel:
|
||||
|
||||
=======================
|
||||
The Environment Context
|
||||
=======================
|
||||
|
||||
The :class:`.EnvironmentContext` class provides most of the
|
||||
API used within an ``env.py`` script. Within ``env.py``,
|
||||
the instantated :class:`.EnvironmentContext` is made available
|
||||
via a special *proxy module* called ``alembic.context``. That is,
|
||||
you can import ``alembic.context`` like a regular Python module,
|
||||
and each name you call upon it is ultimately routed towards the
|
||||
current :class:`.EnvironmentContext` in use.
|
||||
|
||||
In particular, the key method used within ``env.py`` is :meth:`.EnvironmentContext.configure`,
|
||||
which establishes all the details about how the database will be accessed.
|
||||
|
||||
.. automodule:: alembic.runtime.environment
|
||||
:members: EnvironmentContext
|
33
docs/build/api/index.rst
vendored
Normal file
33
docs/build/api/index.rst
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
.. _api:
|
||||
|
||||
===========
|
||||
API Details
|
||||
===========
|
||||
|
||||
Alembic's internal API has many public integration points that can be used
|
||||
to extend Alembic's functionality as well as to re-use its functionality
|
||||
in new ways. As the project has grown, more APIs are created and exposed
|
||||
for this purpose.
|
||||
|
||||
Direct use of the vast majority of API details discussed here is not needed
|
||||
for rudimentary use of Alembic; the only API that is used normally by end users is
|
||||
the methods provided by the :class:`.Operations` class, which is discussed
|
||||
outside of this subsection, and the parameters that can be passed to
|
||||
the :meth:`.EnvironmentContext.configure` method, used when configuring
|
||||
one's ``env.py`` environment. However, real-world applications will
|
||||
usually end up using more of the internal API, in particular being able
|
||||
to run commands programmatically, as discussed in the section :doc:`/api/commands`.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
overview
|
||||
environment
|
||||
migration
|
||||
config
|
||||
commands
|
||||
operations
|
||||
autogenerate
|
||||
script
|
||||
ddl
|
||||
|
8
docs/build/api/migration.rst
vendored
Normal file
8
docs/build/api/migration.rst
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
.. _alembic.runtime.migration.toplevel:
|
||||
|
||||
=====================
|
||||
The Migration Context
|
||||
=====================
|
||||
|
||||
.. automodule:: alembic.runtime.migration
|
||||
:members: MigrationContext
|
123
docs/build/api/operations.rst
vendored
Normal file
123
docs/build/api/operations.rst
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
.. _alembic.operations.toplevel:
|
||||
|
||||
=====================
|
||||
The Operations Object
|
||||
=====================
|
||||
|
||||
Within migration scripts, actual database migration operations are handled
|
||||
via an instance of :class:`.Operations`. The :class:`.Operations` class
|
||||
lists out available migration operations that are linked to a
|
||||
:class:`.MigrationContext`, which communicates instructions originated
|
||||
by the :class:`.Operations` object into SQL that is sent to a database or SQL
|
||||
output stream.
|
||||
|
||||
Most methods on the :class:`.Operations` class are generated dynamically
|
||||
using a "plugin" system, described in the next section
|
||||
:ref:`operation_plugins`. Additionally, when Alembic migration scripts
|
||||
actually run, the methods on the current :class:`.Operations` object are
|
||||
proxied out to the ``alembic.op`` module, so that they are available
|
||||
using module-style access.
|
||||
|
||||
For an overview of how to use an :class:`.Operations` object directly
|
||||
in programs, as well as for reference to the standard operation methods
|
||||
as well as "batch" methods, see :ref:`ops`.
|
||||
|
||||
.. _operation_plugins:
|
||||
|
||||
Operation Plugins
|
||||
=====================
|
||||
|
||||
The Operations object is extensible using a plugin system. This system
|
||||
allows one to add new ``op.<some_operation>`` methods at runtime. The
|
||||
steps to use this system are to first create a subclass of
|
||||
:class:`.MigrateOperation`, register it using the :meth:`.Operations.register_operation`
|
||||
class decorator, then build a default "implementation" function which is
|
||||
established using the :meth:`.Operations.implementation_for` decorator.
|
||||
|
||||
.. versionadded:: 0.8.0 - the :class:`.Operations` class is now an
|
||||
open namespace that is extensible via the creation of new
|
||||
:class:`.MigrateOperation` subclasses.
|
||||
|
||||
Below we illustrate a very simple operation ``CreateSequenceOp`` which
|
||||
will implement a new method ``op.create_sequence()`` for use in
|
||||
migration scripts::
|
||||
|
||||
from alembic.operations import Operations, MigrateOperation
|
||||
|
||||
@Operations.register_operation("create_sequence")
|
||||
class CreateSequenceOp(MigrateOperation):
|
||||
"""Create a SEQUENCE."""
|
||||
|
||||
def __init__(self, sequence_name, **kw):
|
||||
self.sequence_name = sequence_name
|
||||
self.kw = kw
|
||||
|
||||
@classmethod
|
||||
def create_sequence(cls, operations, sequence_name, **kw):
|
||||
"""Issue a "CREATE SEQUENCE" instruction."""
|
||||
|
||||
op = CreateSequenceOp(sequence_name, **kw)
|
||||
return operations.invoke(op)
|
||||
|
||||
Above, the ``CreateSequenceOp`` class represents a new operation that will
|
||||
be available as ``op.create_sequence()``. The reason the operation
|
||||
is represented as a stateful class is so that an operation and a specific
|
||||
set of arguments can be represented generically; the state can then correspond
|
||||
to different kinds of operations, such as invoking the instruction against
|
||||
a database, or autogenerating Python code for the operation into a
|
||||
script.
|
||||
|
||||
In order to establish the migrate-script behavior of the new operation,
|
||||
we use the :meth:`.Operations.implementation_for` decorator::
|
||||
|
||||
@Operations.implementation_for(CreateSequenceOp)
|
||||
def create_sequence(operations, operation):
|
||||
operations.execute("CREATE SEQUENCE %s" % operation.sequence_name)
|
||||
|
||||
Above, we use the simplest possible technique of invoking our DDL, which
|
||||
is just to call :meth:`.Operations.execute` with literal SQL. If this is
|
||||
all a custom operation needs, then this is fine. However, options for
|
||||
more comprehensive support include building out a custom SQL construct,
|
||||
as documented at :ref:`sqlalchemy.ext.compiler_toplevel`.
|
||||
|
||||
With the above two steps, a migration script can now use a new method
|
||||
``op.create_sequence()`` that will proxy to our object as a classmethod::
|
||||
|
||||
def upgrade():
|
||||
op.create_sequence("my_sequence")
|
||||
|
||||
The registration of new operations only needs to occur in time for the
|
||||
``env.py`` script to invoke :meth:`.MigrationContext.run_migrations`;
|
||||
within the module level of the ``env.py`` script is sufficient.
|
||||
|
||||
|
||||
.. versionadded:: 0.8 - the migration operations available via the
|
||||
:class:`.Operations` class as well as the ``alembic.op`` namespace
|
||||
is now extensible using a plugin system.
|
||||
|
||||
|
||||
.. _operation_objects:
|
||||
|
||||
Built-in Operation Objects
|
||||
==============================
|
||||
|
||||
The migration operations present on :class:`.Operations` are themselves
|
||||
delivered via operation objects that represent an operation and its
|
||||
arguments. All operations descend from the :class:`.MigrateOperation`
|
||||
class, and are registered with the :class:`.Operations` class using
|
||||
the :meth:`.Operations.register_operation` class decorator. The
|
||||
:class:`.MigrateOperation` objects also serve as the basis for how the
|
||||
autogenerate system renders new migration scripts.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`operation_plugins`
|
||||
|
||||
:ref:`customizing_revision`
|
||||
|
||||
The built-in operation objects are listed below.
|
||||
|
||||
.. _alembic.operations.ops.toplevel:
|
||||
|
||||
.. automodule:: alembic.operations.ops
|
||||
:members:
|
47
docs/build/api/overview.rst
vendored
Normal file
47
docs/build/api/overview.rst
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
========
|
||||
Overview
|
||||
========
|
||||
|
||||
A visualization of the primary features of Alembic's internals is presented
|
||||
in the following figure. The module and class boxes do not list out
|
||||
all the operations provided by each unit; only a small set of representative
|
||||
elements intended to convey the primary purpose of each system.
|
||||
|
||||
.. image:: api_overview.png
|
||||
|
||||
The script runner for Alembic is present in the :ref:`alembic.config.toplevel` module.
|
||||
This module produces a :class:`.Config` object and passes it to the
|
||||
appropriate function in :ref:`alembic.command.toplevel`. Functions within
|
||||
:ref:`alembic.command.toplevel` will typically instantiate an
|
||||
:class:`.ScriptDirectory` instance, which represents the collection of
|
||||
version files, and an :class:`.EnvironmentContext`, which represents a
|
||||
configurational object passed to the environment's ``env.py`` script.
|
||||
|
||||
Within the execution of ``env.py``, a :class:`.MigrationContext`
|
||||
object is produced when the :meth:`.EnvironmentContext.configure`
|
||||
method is called. :class:`.MigrationContext` is the gateway to the database
|
||||
for other parts of the application, and produces a :class:`.DefaultImpl`
|
||||
object which does the actual database communication, and knows how to
|
||||
create the specific SQL text of the various DDL directives such as
|
||||
ALTER TABLE; :class:`.DefaultImpl` has subclasses that are per-database-backend.
|
||||
In "offline" mode (e.g. ``--sql``), the :class:`.MigrationContext` will
|
||||
produce SQL to a file output stream instead of a database.
|
||||
|
||||
During an upgrade or downgrade operation, a specific series of migration
|
||||
scripts are invoked starting with the :class:`.MigrationContext` in conjunction
|
||||
with the :class:`.ScriptDirectory`; the actual scripts themselves make use
|
||||
of the :class:`.Operations` object, which provide the end-user interface to
|
||||
specific database operations. The :class:`.Operations` object is generated
|
||||
based on a series of "operation directive" objects that are user-extensible,
|
||||
and start out in the :ref:`alembic.operations.ops.toplevel` module.
|
||||
|
||||
Another prominent feature of Alembic is the "autogenerate" feature, which
|
||||
produces new migration scripts that contain Python code. The autogenerate
|
||||
feature starts in :ref:`alembic.autogenerate.toplevel`, and is used exclusively
|
||||
by the :func:`.alembic.command.revision` command when the ``--autogenerate``
|
||||
flag is passed. Autogenerate refers to the :class:`.MigrationContext`
|
||||
and :class:`.DefaultImpl` in order to access database connectivity and
|
||||
access per-backend rules for autogenerate comparisons. It also makes use
|
||||
of :ref:`alembic.operations.ops.toplevel` in order to represent the operations that
|
||||
it will render into scripts.
|
||||
|
20
docs/build/api/script.rst
vendored
Normal file
20
docs/build/api/script.rst
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
.. _alembic.script.toplevel:
|
||||
|
||||
================
|
||||
Script Directory
|
||||
================
|
||||
|
||||
The :class:`.ScriptDirectory` object provides programmatic access
|
||||
to the Alembic version files present in the filesystem.
|
||||
|
||||
.. automodule:: alembic.script
|
||||
:members:
|
||||
|
||||
Revision
|
||||
========
|
||||
|
||||
The :class:`.RevisionMap` object serves as the basis for revision
|
||||
management, used exclusively by :class:`.ScriptDirectory`.
|
||||
|
||||
.. automodule:: alembic.script.revision
|
||||
:members:
|
BIN
docs/build/api_overview.png
vendored
BIN
docs/build/api_overview.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 63 KiB |
2652
docs/build/assets/api_overview.graffle
vendored
2652
docs/build/assets/api_overview.graffle
vendored
File diff suppressed because it is too large
Load Diff
38
docs/build/changelog.rst
vendored
38
docs/build/changelog.rst
vendored
@ -3,6 +3,44 @@
|
||||
Changelog
|
||||
==========
|
||||
|
||||
.. changelog::
|
||||
:version: 0.8.0
|
||||
|
||||
.. change::
|
||||
:tags: feature, operations
|
||||
:tickets: 302
|
||||
|
||||
The internal system for Alembic operations has been reworked to now
|
||||
build upon an extensible system of operation objects. New operations
|
||||
can be added to the ``op.`` namespace, including that they are
|
||||
available in custom autogenerate schemes.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`operation_plugins`
|
||||
|
||||
.. change::
|
||||
:tags: feature, autogenerate
|
||||
:tickets: 301
|
||||
|
||||
The internal system for autogenerate been reworked to build upon
|
||||
the extensible system of operation objects present in
|
||||
:ticket:`302`. As part of this change, autogenerate now produces
|
||||
a full object graph representing a list of migration scripts to
|
||||
be written as well as operation objects that will render all the
|
||||
Python code within them; a new hook
|
||||
:paramref:`.EnvironmentContext.configure.process_revision_directives`
|
||||
allows end-user code to fully customize what autogenerate will do,
|
||||
including not just full manipulation of the Python steps to take
|
||||
but also what file or files will be written and where. It is also
|
||||
possible to write a system that reads an autogenerate stream and
|
||||
invokes it directly against a database without writing any files.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`alembic.autogenerate.toplevel`
|
||||
|
||||
|
||||
.. changelog::
|
||||
:version: 0.7.7
|
||||
|
||||
|
2
docs/build/cookbook.rst
vendored
2
docs/build/cookbook.rst
vendored
@ -193,7 +193,7 @@ Sharing a Connection with a Series of Migration Commands and Environments
|
||||
=========================================================================
|
||||
|
||||
It is often the case that an application will need to call upon a series
|
||||
of commands within :mod:`alembic.command`, where it would be advantageous
|
||||
of commands within :ref:`alembic.command.toplevel`, where it would be advantageous
|
||||
for all operations to proceed along a single transaction. The connectivity
|
||||
for a migration is typically solely determined within the ``env.py`` script
|
||||
of a migration environment, which is called within the scope of a command.
|
||||
|
19
docs/build/front.rst
vendored
19
docs/build/front.rst
vendored
@ -49,25 +49,19 @@ then proceed through the usage of this command.
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
Alembic's install process will ensure that `SQLAlchemy <http://www.sqlalchemy.org>`_
|
||||
Alembic's install process will ensure that SQLAlchemy_
|
||||
is installed, in addition to other dependencies. Alembic will work with
|
||||
SQLAlchemy as of version **0.7.3**. The latest version of SQLAlchemy within
|
||||
the **0.7**, **0.8**, or more recent series is strongly recommended.
|
||||
SQLAlchemy as of version **0.7.3**, however more features are available with
|
||||
newer versions such as the 0.9 or 1.0 series.
|
||||
|
||||
Alembic supports Python versions 2.6 and above.
|
||||
|
||||
.. versionchanged:: 0.5.0
|
||||
Support for SQLAlchemy 0.6 has been dropped.
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Now supporting Python 2.6 and above.
|
||||
|
||||
Community
|
||||
=========
|
||||
|
||||
Alembic is developed by `Mike Bayer <http://techspot.zzzeek.org>`_, and is
|
||||
loosely associated with the `SQLAlchemy <http://www.sqlalchemy.org/>`_ and `Pylons <http://www.pylonsproject.org>`_
|
||||
projects.
|
||||
loosely associated with the SQLAlchemy_, `Pylons <http://www.pylonsproject.org>`_,
|
||||
and `Openstack <http://www.openstack.org>`_ projects.
|
||||
|
||||
User issues, discussion of potential bugs and features should be posted
|
||||
to the Alembic Google Group at `sqlalchemy-alembic <https://groups.google.com/group/sqlalchemy-alembic>`_.
|
||||
@ -78,3 +72,6 @@ Bugs
|
||||
====
|
||||
Bugs and feature enhancements to Alembic should be reported on the `Bitbucket
|
||||
issue tracker <https://bitbucket.org/zzzeek/alembic/issues?status=new&status=open>`_.
|
||||
|
||||
|
||||
.. _SQLAlchemy: http://www.sqlalchemy.org
|
4
docs/build/index.rst
vendored
4
docs/build/index.rst
vendored
@ -6,7 +6,7 @@ Welcome to Alembic's documentation!
|
||||
with the `SQLAlchemy <http://www.sqlalchemy.org>`_ Database Toolkit for Python.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:maxdepth: 3
|
||||
|
||||
front
|
||||
tutorial
|
||||
@ -17,7 +17,7 @@ with the `SQLAlchemy <http://www.sqlalchemy.org>`_ Database Toolkit for Python.
|
||||
branches
|
||||
ops
|
||||
cookbook
|
||||
api
|
||||
api/index
|
||||
changelog
|
||||
|
||||
Indices and tables
|
||||
|
8
docs/build/ops.rst
vendored
8
docs/build/ops.rst
vendored
@ -18,7 +18,10 @@ Currently, ``alembic.op`` is a real Python module, populated
|
||||
with individual proxies for each method on :class:`.Operations`,
|
||||
so symbols can be imported safely from the ``alembic.op`` namespace.
|
||||
|
||||
A key design philosophy to the :mod:`alembic.operations` methods is that
|
||||
The :class:`.Operations` system is also fully extensible. See
|
||||
:ref:`operation_plugins` for details on this.
|
||||
|
||||
A key design philosophy to the :ref:`alembic.operations.toplevel` methods is that
|
||||
to the greatest degree possible, they internally generate the
|
||||
appropriate SQLAlchemy metadata, typically involving
|
||||
:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Constraint`
|
||||
@ -36,6 +39,5 @@ circumstances they are called from an actual migration script, which
|
||||
itself would be invoked by the :meth:`.EnvironmentContext.run_migrations`
|
||||
method.
|
||||
|
||||
|
||||
.. automodule:: alembic.operations
|
||||
:members:
|
||||
:members: Operations, BatchOperations
|
||||
|
251
tests/_autogen_fixtures.py
Normal file
251
tests/_autogen_fixtures.py
Normal file
@ -0,0 +1,251 @@
|
||||
from sqlalchemy import MetaData, Column, Table, Integer, String, Text, \
|
||||
Numeric, CHAR, ForeignKey, Index, UniqueConstraint, CheckConstraint, text
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
from alembic import autogenerate
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.testing import config
|
||||
from alembic.testing.env import staging_env, clear_staging_env
|
||||
from alembic.testing import eq_
|
||||
from alembic.ddl.base import _fk_spec
|
||||
|
||||
names_in_this_test = set()
|
||||
|
||||
from sqlalchemy import event
|
||||
|
||||
|
||||
@event.listens_for(Table, "after_parent_attach")
|
||||
def new_table(table, parent):
|
||||
names_in_this_test.add(table.name)
|
||||
|
||||
|
||||
def _default_include_object(obj, name, type_, reflected, compare_to):
|
||||
if type_ == "table":
|
||||
return name in names_in_this_test
|
||||
else:
|
||||
return True
|
||||
|
||||
_default_object_filters = [
|
||||
_default_include_object
|
||||
]
|
||||
|
||||
|
||||
class ModelOne(object):
|
||||
__requires__ = ('unique_constraint_reflection', )
|
||||
|
||||
schema = None
|
||||
|
||||
@classmethod
|
||||
def _get_db_schema(cls):
|
||||
schema = cls.schema
|
||||
|
||||
m = MetaData(schema=schema)
|
||||
|
||||
Table('user', m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(50)),
|
||||
Column('a1', Text),
|
||||
Column("pw", String(50)),
|
||||
Index('pw_idx', 'pw')
|
||||
)
|
||||
|
||||
Table('address', m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('email_address', String(100), nullable=False),
|
||||
)
|
||||
|
||||
Table('order', m,
|
||||
Column('order_id', Integer, primary_key=True),
|
||||
Column("amount", Numeric(8, 2), nullable=False,
|
||||
server_default=text("0")),
|
||||
CheckConstraint('amount >= 0', name='ck_order_amount')
|
||||
)
|
||||
|
||||
Table('extra', m,
|
||||
Column("x", CHAR),
|
||||
Column('uid', Integer, ForeignKey('user.id'))
|
||||
)
|
||||
|
||||
return m
|
||||
|
||||
@classmethod
|
||||
def _get_model_schema(cls):
|
||||
schema = cls.schema
|
||||
|
||||
m = MetaData(schema=schema)
|
||||
|
||||
Table('user', m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(50), nullable=False),
|
||||
Column('a1', Text, server_default="x")
|
||||
)
|
||||
|
||||
Table('address', m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('email_address', String(100), nullable=False),
|
||||
Column('street', String(50)),
|
||||
UniqueConstraint('email_address', name="uq_email")
|
||||
)
|
||||
|
||||
Table('order', m,
|
||||
Column('order_id', Integer, primary_key=True),
|
||||
Column('amount', Numeric(10, 2), nullable=True,
|
||||
server_default=text("0")),
|
||||
Column('user_id', Integer, ForeignKey('user.id')),
|
||||
CheckConstraint('amount > -1', name='ck_order_amount'),
|
||||
)
|
||||
|
||||
Table('item', m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('description', String(100)),
|
||||
Column('order_id', Integer, ForeignKey('order.order_id')),
|
||||
CheckConstraint('len(description) > 5')
|
||||
)
|
||||
return m
|
||||
|
||||
|
||||
class _ComparesFKs(object):
|
||||
def _assert_fk_diff(
|
||||
self, diff, type_, source_table, source_columns,
|
||||
target_table, target_columns, name=None, conditional_name=None,
|
||||
source_schema=None):
|
||||
# the public API for ForeignKeyConstraint was not very rich
|
||||
# in 0.7, 0.8, so here we use the well-known but slightly
|
||||
# private API to get at its elements
|
||||
(fk_source_schema, fk_source_table,
|
||||
fk_source_columns, fk_target_schema, fk_target_table,
|
||||
fk_target_columns) = _fk_spec(diff[1])
|
||||
|
||||
eq_(diff[0], type_)
|
||||
eq_(fk_source_table, source_table)
|
||||
eq_(fk_source_columns, source_columns)
|
||||
eq_(fk_target_table, target_table)
|
||||
eq_(fk_source_schema, source_schema)
|
||||
|
||||
eq_([elem.column.name for elem in diff[1].elements],
|
||||
target_columns)
|
||||
if conditional_name is not None:
|
||||
if config.requirements.no_fk_names.enabled:
|
||||
eq_(diff[1].name, None)
|
||||
elif conditional_name == 'servergenerated':
|
||||
fks = Inspector.from_engine(self.bind).\
|
||||
get_foreign_keys(source_table)
|
||||
server_fk_name = fks[0]['name']
|
||||
eq_(diff[1].name, server_fk_name)
|
||||
else:
|
||||
eq_(diff[1].name, conditional_name)
|
||||
else:
|
||||
eq_(diff[1].name, name)
|
||||
|
||||
|
||||
class AutogenTest(_ComparesFKs):
|
||||
|
||||
def _flatten_diffs(self, diffs):
|
||||
for d in diffs:
|
||||
if isinstance(d, list):
|
||||
for fd in self._flatten_diffs(d):
|
||||
yield fd
|
||||
else:
|
||||
yield d
|
||||
|
||||
@classmethod
|
||||
def _get_bind(cls):
|
||||
return config.db
|
||||
|
||||
configure_opts = {}
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
staging_env()
|
||||
cls.bind = cls._get_bind()
|
||||
cls.m1 = cls._get_db_schema()
|
||||
cls.m1.create_all(cls.bind)
|
||||
cls.m2 = cls._get_model_schema()
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
cls.m1.drop_all(cls.bind)
|
||||
clear_staging_env()
|
||||
|
||||
def setUp(self):
|
||||
self.conn = conn = self.bind.connect()
|
||||
ctx_opts = {
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m2,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
if self.configure_opts:
|
||||
ctx_opts.update(self.configure_opts)
|
||||
self.context = context = MigrationContext.configure(
|
||||
connection=conn,
|
||||
opts=ctx_opts
|
||||
)
|
||||
|
||||
connection = context.bind
|
||||
self.autogen_context = {
|
||||
'imports': set(),
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context
|
||||
}
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.close()
|
||||
|
||||
|
||||
class AutogenFixtureTest(_ComparesFKs):
|
||||
|
||||
def _fixture(
|
||||
self, m1, m2, include_schemas=False,
|
||||
opts=None, object_filters=_default_object_filters):
|
||||
self.metadata, model_metadata = m1, m2
|
||||
self.metadata.create_all(self.bind)
|
||||
|
||||
with self.bind.connect() as conn:
|
||||
ctx_opts = {
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': model_metadata,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
if opts:
|
||||
ctx_opts.update(opts)
|
||||
self.context = context = MigrationContext.configure(
|
||||
connection=conn,
|
||||
opts=ctx_opts
|
||||
)
|
||||
|
||||
connection = context.bind
|
||||
autogen_context = {
|
||||
'imports': set(),
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context,
|
||||
'metadata': model_metadata,
|
||||
'object_filters': object_filters,
|
||||
'include_schemas': include_schemas
|
||||
}
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(
|
||||
autogen_context, diffs
|
||||
)
|
||||
return diffs
|
||||
|
||||
reports_unnamed_constraints = False
|
||||
|
||||
def setUp(self):
|
||||
staging_env()
|
||||
self.bind = config.db
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'metadata'):
|
||||
self.metadata.drop_all(self.bind)
|
||||
clear_staging_env()
|
||||
|
328
tests/test_autogen_composition.py
Normal file
328
tests/test_autogen_composition.py
Normal file
@ -0,0 +1,328 @@
|
||||
import re
|
||||
|
||||
from alembic import autogenerate
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.testing import TestBase
|
||||
from alembic.testing import eq_
|
||||
|
||||
from ._autogen_fixtures import AutogenTest, ModelOne, _default_include_object
|
||||
|
||||
|
||||
class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
|
||||
__only_on__ = 'sqlite'
|
||||
|
||||
def test_render_nothing(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._render_migration_diffs(context, template_args, set())
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_nothing_batch(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
'render_as_batch': True,
|
||||
'include_symbol': lambda name, schema: False
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._render_migration_diffs(
|
||||
context, template_args, set(),
|
||||
|
||||
)
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_standard(self):
|
||||
"""test a full render including indentation"""
|
||||
|
||||
template_args = {}
|
||||
autogenerate._render_migration_diffs(
|
||||
self.context, template_args, set())
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.drop_table('extra')
|
||||
op.add_column('address', sa.Column('street', sa.String(length=50), \
|
||||
nullable=True))
|
||||
op.create_unique_constraint('uq_email', 'address', ['email_address'])
|
||||
op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True))
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'))
|
||||
op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True)
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False)
|
||||
op.drop_index('pw_idx', table_name='user')
|
||||
op.drop_column('user', 'pw')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
|
||||
nullable=True))
|
||||
op.create_index('pw_idx', 'user', ['pw'], unique=False)
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True)
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'order', type_='foreignkey')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'))
|
||||
op.drop_column('order', 'user_id')
|
||||
op.drop_constraint('uq_email', 'address', type_='unique')
|
||||
op.drop_column('address', 'street')
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(), nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['user.id'], )
|
||||
)
|
||||
op.drop_table('item')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_batch(self):
|
||||
"""test a full render in batch mode including indentation"""
|
||||
|
||||
template_args = {}
|
||||
self.context.opts['render_as_batch'] = True
|
||||
autogenerate._render_migration_diffs(
|
||||
self.context, template_args, set())
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.drop_table('extra')
|
||||
with op.batch_alter_table('address', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('street', sa.String(length=50), nullable=True))
|
||||
batch_op.create_unique_constraint('uq_email', ['email_address'])
|
||||
|
||||
with op.batch_alter_table('order', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
|
||||
batch_op.alter_column('amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'))
|
||||
batch_op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
|
||||
|
||||
with op.batch_alter_table('user', schema=None) as batch_op:
|
||||
batch_op.alter_column('a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True)
|
||||
batch_op.alter_column('name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False)
|
||||
batch_op.drop_index('pw_idx')
|
||||
batch_op.drop_column('pw')
|
||||
|
||||
### end Alembic commands ###""")
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('user', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('pw', sa.VARCHAR(length=50), nullable=True))
|
||||
batch_op.create_index('pw_idx', ['pw'], unique=False)
|
||||
batch_op.alter_column('name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True)
|
||||
batch_op.alter_column('a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True)
|
||||
|
||||
with op.batch_alter_table('order', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(None, type_='foreignkey')
|
||||
batch_op.alter_column('amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'))
|
||||
batch_op.drop_column('user_id')
|
||||
|
||||
with op.batch_alter_table('address', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('uq_email', type_='unique')
|
||||
batch_op.drop_column('street')
|
||||
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(), nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['user.id'], )
|
||||
)
|
||||
op.drop_table('item')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
|
||||
class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
|
||||
__only_on__ = 'postgresql'
|
||||
schema = "test_schema"
|
||||
|
||||
def test_render_nothing(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
'include_symbol': lambda name, schema: False
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._render_migration_diffs(
|
||||
context, template_args, set(),
|
||||
|
||||
)
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_extras(self):
|
||||
"""test a full render including indentation (include and schema)"""
|
||||
|
||||
template_args = {}
|
||||
self.context.opts.update({
|
||||
'include_object': _default_include_object,
|
||||
'include_schemas': True
|
||||
})
|
||||
autogenerate._render_migration_diffs(
|
||||
self.context, template_args, set()
|
||||
)
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['%(schema)s.order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
schema='%(schema)s'
|
||||
)
|
||||
op.drop_table('extra', schema='%(schema)s')
|
||||
op.add_column('address', sa.Column('street', sa.String(length=50), \
|
||||
nullable=True), schema='%(schema)s')
|
||||
op.create_unique_constraint('uq_email', 'address', ['email_address'], \
|
||||
schema='test_schema')
|
||||
op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), \
|
||||
schema='%(schema)s')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'),
|
||||
schema='%(schema)s')
|
||||
op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'], \
|
||||
source_schema='%(schema)s', referent_schema='%(schema)s')
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False,
|
||||
schema='%(schema)s')
|
||||
op.drop_index('pw_idx', table_name='user', schema='test_schema')
|
||||
op.drop_column('user', 'pw', schema='%(schema)s')
|
||||
### end Alembic commands ###""" % {"schema": self.schema})
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
|
||||
autoincrement=False, nullable=True), schema='%(schema)s')
|
||||
op.create_index('pw_idx', 'user', ['pw'], unique=False, schema='%(schema)s')
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.drop_constraint(None, 'order', schema='%(schema)s', type_='foreignkey')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'),
|
||||
schema='%(schema)s')
|
||||
op.drop_column('order', 'user_id', schema='%(schema)s')
|
||||
op.drop_constraint('uq_email', 'address', schema='test_schema', type_='unique')
|
||||
op.drop_column('address', 'street', schema='%(schema)s')
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(length=1), autoincrement=False, nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['%(schema)s.user.id'], \
|
||||
name='extra_uid_fkey'),
|
||||
schema='%(schema)s'
|
||||
)
|
||||
op.drop_table('item', schema='%(schema)s')
|
||||
### end Alembic commands ###""" % {"schema": self.schema})
|
@ -1,4 +1,3 @@
|
||||
import re
|
||||
import sys
|
||||
|
||||
from sqlalchemy import MetaData, Column, Table, Integer, String, Text, \
|
||||
@ -13,170 +12,13 @@ from alembic.testing import TestBase
|
||||
from alembic.testing import config
|
||||
from alembic.testing import assert_raises_message
|
||||
from alembic.testing.mock import Mock
|
||||
from alembic.testing.env import staging_env, clear_staging_env
|
||||
from alembic.testing import eq_
|
||||
from alembic.ddl.base import _fk_spec
|
||||
from alembic.util import CommandError
|
||||
from ._autogen_fixtures import \
|
||||
AutogenTest, AutogenFixtureTest, _default_object_filters
|
||||
|
||||
py3k = sys.version_info >= (3, )
|
||||
|
||||
names_in_this_test = set()
|
||||
|
||||
|
||||
def _default_include_object(obj, name, type_, reflected, compare_to):
|
||||
if type_ == "table":
|
||||
return name in names_in_this_test
|
||||
else:
|
||||
return True
|
||||
|
||||
_default_object_filters = [
|
||||
_default_include_object
|
||||
]
|
||||
from sqlalchemy import event
|
||||
|
||||
|
||||
@event.listens_for(Table, "after_parent_attach")
|
||||
def new_table(table, parent):
|
||||
names_in_this_test.add(table.name)
|
||||
|
||||
|
||||
class _ComparesFKs(object):
|
||||
def _assert_fk_diff(
|
||||
self, diff, type_, source_table, source_columns,
|
||||
target_table, target_columns, name=None, conditional_name=None,
|
||||
source_schema=None):
|
||||
# the public API for ForeignKeyConstraint was not very rich
|
||||
# in 0.7, 0.8, so here we use the well-known but slightly
|
||||
# private API to get at its elements
|
||||
(fk_source_schema, fk_source_table,
|
||||
fk_source_columns, fk_target_schema, fk_target_table,
|
||||
fk_target_columns) = _fk_spec(diff[1])
|
||||
|
||||
eq_(diff[0], type_)
|
||||
eq_(fk_source_table, source_table)
|
||||
eq_(fk_source_columns, source_columns)
|
||||
eq_(fk_target_table, target_table)
|
||||
eq_(fk_source_schema, source_schema)
|
||||
|
||||
eq_([elem.column.name for elem in diff[1].elements],
|
||||
target_columns)
|
||||
if conditional_name is not None:
|
||||
if config.requirements.no_fk_names.enabled:
|
||||
eq_(diff[1].name, None)
|
||||
elif conditional_name == 'servergenerated':
|
||||
fks = Inspector.from_engine(self.bind).\
|
||||
get_foreign_keys(source_table)
|
||||
server_fk_name = fks[0]['name']
|
||||
eq_(diff[1].name, server_fk_name)
|
||||
else:
|
||||
eq_(diff[1].name, conditional_name)
|
||||
else:
|
||||
eq_(diff[1].name, name)
|
||||
|
||||
|
||||
class AutogenTest(_ComparesFKs):
|
||||
|
||||
@classmethod
|
||||
def _get_bind(cls):
|
||||
return config.db
|
||||
|
||||
configure_opts = {}
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
staging_env()
|
||||
cls.bind = cls._get_bind()
|
||||
cls.m1 = cls._get_db_schema()
|
||||
cls.m1.create_all(cls.bind)
|
||||
cls.m2 = cls._get_model_schema()
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
cls.m1.drop_all(cls.bind)
|
||||
clear_staging_env()
|
||||
|
||||
def setUp(self):
|
||||
self.conn = conn = self.bind.connect()
|
||||
ctx_opts = {
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m2,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
if self.configure_opts:
|
||||
ctx_opts.update(self.configure_opts)
|
||||
self.context = context = MigrationContext.configure(
|
||||
connection=conn,
|
||||
opts=ctx_opts
|
||||
)
|
||||
|
||||
connection = context.bind
|
||||
self.autogen_context = {
|
||||
'imports': set(),
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context
|
||||
}
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.close()
|
||||
|
||||
|
||||
class AutogenFixtureTest(_ComparesFKs):
|
||||
|
||||
def _fixture(
|
||||
self, m1, m2, include_schemas=False,
|
||||
opts=None, object_filters=_default_object_filters):
|
||||
self.metadata, model_metadata = m1, m2
|
||||
self.metadata.create_all(self.bind)
|
||||
|
||||
with self.bind.connect() as conn:
|
||||
ctx_opts = {
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': model_metadata,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
if opts:
|
||||
ctx_opts.update(opts)
|
||||
self.context = context = MigrationContext.configure(
|
||||
connection=conn,
|
||||
opts=ctx_opts
|
||||
)
|
||||
|
||||
connection = context.bind
|
||||
autogen_context = {
|
||||
'imports': set(),
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context
|
||||
}
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(
|
||||
connection, model_metadata, diffs,
|
||||
autogen_context,
|
||||
object_filters=object_filters,
|
||||
include_schemas=include_schemas
|
||||
)
|
||||
return diffs
|
||||
|
||||
reports_unnamed_constraints = False
|
||||
|
||||
def setUp(self):
|
||||
staging_env()
|
||||
self.bind = config.db
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'metadata'):
|
||||
self.metadata.drop_all(self.bind)
|
||||
clear_staging_env()
|
||||
|
||||
|
||||
class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
__only_on__ = 'postgresql'
|
||||
@ -221,8 +63,6 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
return m
|
||||
|
||||
def test_default_schema_omitted_upgrade(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
|
||||
def include_object(obj, name, type_, reflected, compare_to):
|
||||
@ -230,17 +70,17 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
return name == "t3"
|
||||
else:
|
||||
return True
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=[include_object],
|
||||
include_schemas=True
|
||||
)
|
||||
self.autogen_context.update({
|
||||
'object_filters': [include_object],
|
||||
'include_schemas': True,
|
||||
'metadata': self.m2
|
||||
})
|
||||
autogenerate._produce_net_changes(self.autogen_context, diffs)
|
||||
|
||||
eq_(diffs[0][0], "add_table")
|
||||
eq_(diffs[0][1].schema, None)
|
||||
|
||||
def test_alt_schema_included_upgrade(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
|
||||
def include_object(obj, name, type_, reflected, compare_to):
|
||||
@ -248,17 +88,18 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
return name == "t4"
|
||||
else:
|
||||
return True
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=[include_object],
|
||||
include_schemas=True
|
||||
)
|
||||
|
||||
self.autogen_context.update({
|
||||
'object_filters': [include_object],
|
||||
'include_schemas': True,
|
||||
'metadata': self.m2
|
||||
})
|
||||
autogenerate._produce_net_changes(self.autogen_context, diffs)
|
||||
|
||||
eq_(diffs[0][0], "add_table")
|
||||
eq_(diffs[0][1].schema, config.test_schema)
|
||||
|
||||
def test_default_schema_omitted_downgrade(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
|
||||
def include_object(obj, name, type_, reflected, compare_to):
|
||||
@ -266,17 +107,17 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
return name == "t1"
|
||||
else:
|
||||
return True
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=[include_object],
|
||||
include_schemas=True
|
||||
)
|
||||
self.autogen_context.update({
|
||||
'object_filters': [include_object],
|
||||
'include_schemas': True,
|
||||
'metadata': self.m2
|
||||
})
|
||||
autogenerate._produce_net_changes(self.autogen_context, diffs)
|
||||
|
||||
eq_(diffs[0][0], "remove_table")
|
||||
eq_(diffs[0][1].schema, None)
|
||||
|
||||
def test_alt_schema_included_downgrade(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
|
||||
def include_object(obj, name, type_, reflected, compare_to):
|
||||
@ -284,11 +125,12 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
|
||||
return name == "t2"
|
||||
else:
|
||||
return True
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=[include_object],
|
||||
include_schemas=True
|
||||
)
|
||||
self.autogen_context.update({
|
||||
'object_filters': [include_object],
|
||||
'include_schemas': True,
|
||||
'metadata': self.m2
|
||||
})
|
||||
autogenerate._produce_net_changes(self.autogen_context, diffs)
|
||||
eq_(diffs[0][0], "remove_table")
|
||||
eq_(diffs[0][1].schema, config.test_schema)
|
||||
|
||||
@ -426,12 +268,12 @@ class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
|
||||
"""test generation of diff rules"""
|
||||
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
ctx['object_filters'] = _default_object_filters
|
||||
autogenerate._produce_net_changes(
|
||||
connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=_default_object_filters,
|
||||
ctx, diffs
|
||||
)
|
||||
|
||||
eq_(
|
||||
@ -484,228 +326,31 @@ class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
|
||||
eq_(diffs[10][0], 'remove_column')
|
||||
eq_(diffs[10][3].name, 'pw')
|
||||
|
||||
def test_render_nothing(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(context, template_args, set())
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_nothing_batch(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
'render_as_batch': True
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(
|
||||
context, template_args, set(),
|
||||
include_symbol=lambda name, schema: False
|
||||
)
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_standard(self):
|
||||
"""test a full render including indentation"""
|
||||
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(
|
||||
self.context, template_args, set())
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.drop_table('extra')
|
||||
op.add_column('address', sa.Column('street', sa.String(length=50), \
|
||||
nullable=True))
|
||||
op.create_unique_constraint('uq_email', 'address', ['email_address'])
|
||||
op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True))
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'))
|
||||
op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True)
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False)
|
||||
op.drop_index('pw_idx', table_name='user')
|
||||
op.drop_column('user', 'pw')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
|
||||
nullable=True))
|
||||
op.create_index('pw_idx', 'user', ['pw'], unique=False)
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True)
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'order', type_='foreignkey')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'))
|
||||
op.drop_column('order', 'user_id')
|
||||
op.drop_constraint('uq_email', 'address', type_='unique')
|
||||
op.drop_column('address', 'street')
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(), nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['user.id'], )
|
||||
)
|
||||
op.drop_table('item')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_batch(self):
|
||||
"""test a full render in batch mode including indentation"""
|
||||
|
||||
template_args = {}
|
||||
self.context.opts['render_as_batch'] = True
|
||||
autogenerate._produce_migration_diffs(
|
||||
self.context, template_args, set())
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.drop_table('extra')
|
||||
with op.batch_alter_table('address', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('street', sa.String(length=50), nullable=True))
|
||||
batch_op.create_unique_constraint('uq_email', ['email_address'])
|
||||
|
||||
with op.batch_alter_table('order', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
|
||||
batch_op.alter_column('amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'))
|
||||
batch_op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
|
||||
|
||||
with op.batch_alter_table('user', schema=None) as batch_op:
|
||||
batch_op.alter_column('a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True)
|
||||
batch_op.alter_column('name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False)
|
||||
batch_op.drop_index('pw_idx')
|
||||
batch_op.drop_column('pw')
|
||||
|
||||
### end Alembic commands ###""")
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('user', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('pw', sa.VARCHAR(length=50), nullable=True))
|
||||
batch_op.create_index('pw_idx', ['pw'], unique=False)
|
||||
batch_op.alter_column('name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True)
|
||||
batch_op.alter_column('a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True)
|
||||
|
||||
with op.batch_alter_table('order', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(None, type_='foreignkey')
|
||||
batch_op.alter_column('amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'))
|
||||
batch_op.drop_column('user_id')
|
||||
|
||||
with op.batch_alter_table('address', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('uq_email', type_='unique')
|
||||
batch_op.drop_column('street')
|
||||
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(), nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['user.id'], )
|
||||
)
|
||||
op.drop_table('item')
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_include_symbol(self):
|
||||
|
||||
diffs = []
|
||||
|
||||
def include_symbol(name, schema=None):
|
||||
return name in ('address', 'order')
|
||||
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m2,
|
||||
'include_symbol': lambda name, schema=None:
|
||||
name in ('address', 'order'),
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
'include_symbol': include_symbol,
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(context, template_args, set())
|
||||
template_args['upgrades'] = \
|
||||
template_args['upgrades'].replace("u'", "'")
|
||||
template_args['downgrades'] = template_args['downgrades'].\
|
||||
replace("u'", "'")
|
||||
assert "alter_column('user'" not in template_args['upgrades']
|
||||
assert "alter_column('user'" not in template_args['downgrades']
|
||||
assert "alter_column('order'" in template_args['upgrades']
|
||||
assert "alter_column('order'" in template_args['downgrades']
|
||||
|
||||
diffs = autogenerate.compare_metadata(
|
||||
context, context.opts['target_metadata'])
|
||||
|
||||
alter_cols = set([
|
||||
d[2] for d in self._flatten_diffs(diffs)
|
||||
if d[0].startswith('modify')
|
||||
])
|
||||
eq_(alter_cols, set(['order']))
|
||||
|
||||
def test_include_object(self):
|
||||
def include_object(obj, name, type_, reflected, compare_to):
|
||||
@ -732,28 +377,23 @@ nullable=True))
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m2,
|
||||
'include_object': include_object,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(context, template_args, set())
|
||||
|
||||
template_args['upgrades'] = \
|
||||
template_args['upgrades'].replace("u'", "'")
|
||||
template_args['downgrades'] = template_args['downgrades'].\
|
||||
replace("u'", "'")
|
||||
assert "op.create_table('item'" not in template_args['upgrades']
|
||||
assert "op.create_table('item'" not in template_args['downgrades']
|
||||
diffs = autogenerate.compare_metadata(
|
||||
context, context.opts['target_metadata'])
|
||||
|
||||
assert "alter_column('user'" in template_args['upgrades']
|
||||
assert "alter_column('user'" in template_args['downgrades']
|
||||
assert "'street'" not in template_args['upgrades']
|
||||
assert "'street'" not in template_args['downgrades']
|
||||
assert "alter_column('order'" in template_args['upgrades']
|
||||
assert "alter_column('order'" in template_args['downgrades']
|
||||
alter_cols = set([
|
||||
d[2] for d in self._flatten_diffs(diffs)
|
||||
if d[0].startswith('modify')
|
||||
]).union(
|
||||
d[3].name for d in self._flatten_diffs(diffs)
|
||||
if d[0] == 'add_column'
|
||||
).union(
|
||||
d[1].name for d in self._flatten_diffs(diffs)
|
||||
if d[0] == 'add_table'
|
||||
)
|
||||
eq_(alter_cols, set(['user_id', 'order', 'user']))
|
||||
|
||||
def test_skip_null_type_comparison_reflected(self):
|
||||
diff = []
|
||||
@ -841,14 +481,14 @@ class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
|
||||
"""test generation of diff rules"""
|
||||
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(
|
||||
connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
object_filters=_default_object_filters,
|
||||
include_schemas=True
|
||||
)
|
||||
|
||||
self.autogen_context.update({
|
||||
'object_filters': _default_object_filters,
|
||||
'include_schemas': True,
|
||||
'metadata': self.m2
|
||||
})
|
||||
autogenerate._produce_net_changes(self.autogen_context, diffs)
|
||||
|
||||
eq_(
|
||||
diffs[0],
|
||||
@ -901,116 +541,6 @@ class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
|
||||
eq_(diffs[10][0], 'remove_column')
|
||||
eq_(diffs[10][3].name, 'pw')
|
||||
|
||||
def test_render_nothing(self):
|
||||
context = MigrationContext.configure(
|
||||
connection=self.bind.connect(),
|
||||
opts={
|
||||
'compare_type': True,
|
||||
'compare_server_default': True,
|
||||
'target_metadata': self.m1,
|
||||
'upgrade_token': "upgrades",
|
||||
'downgrade_token': "downgrades",
|
||||
'alembic_module_prefix': 'op.',
|
||||
'sqlalchemy_module_prefix': 'sa.',
|
||||
}
|
||||
)
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(
|
||||
context, template_args, set(),
|
||||
include_symbol=lambda name, schema: False
|
||||
)
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###""")
|
||||
|
||||
def test_render_diffs_extras(self):
|
||||
"""test a full render including indentation (include and schema)"""
|
||||
|
||||
template_args = {}
|
||||
autogenerate._produce_migration_diffs(
|
||||
self.context, template_args, set(),
|
||||
include_object=_default_include_object,
|
||||
include_schemas=True
|
||||
)
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['upgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=100), nullable=True),
|
||||
sa.Column('order_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('len(description) > 5'),
|
||||
sa.ForeignKeyConstraint(['order_id'], ['%(schema)s.order.order_id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
schema='%(schema)s'
|
||||
)
|
||||
op.drop_table('extra', schema='%(schema)s')
|
||||
op.add_column('address', sa.Column('street', sa.String(length=50), \
|
||||
nullable=True), schema='%(schema)s')
|
||||
op.create_unique_constraint('uq_email', 'address', ['email_address'], \
|
||||
schema='test_schema')
|
||||
op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), \
|
||||
schema='%(schema)s')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.NUMERIC(precision=8, scale=2),
|
||||
type_=sa.Numeric(precision=10, scale=2),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text('0'),
|
||||
schema='%(schema)s')
|
||||
op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'], \
|
||||
source_schema='%(schema)s', referent_schema='%(schema)s')
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default='x',
|
||||
existing_nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=False,
|
||||
schema='%(schema)s')
|
||||
op.drop_index('pw_idx', table_name='user', schema='test_schema')
|
||||
op.drop_column('user', 'pw', schema='%(schema)s')
|
||||
### end Alembic commands ###""" % {"schema": self.schema})
|
||||
|
||||
eq_(re.sub(r"u'", "'", template_args['downgrades']),
|
||||
"""### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
|
||||
autoincrement=False, nullable=True), schema='%(schema)s')
|
||||
op.create_index('pw_idx', 'user', ['pw'], unique=False, schema='%(schema)s')
|
||||
op.alter_column('user', 'name',
|
||||
existing_type=sa.VARCHAR(length=50),
|
||||
nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.alter_column('user', 'a1',
|
||||
existing_type=sa.TEXT(),
|
||||
server_default=None,
|
||||
existing_nullable=True,
|
||||
schema='%(schema)s')
|
||||
op.drop_constraint(None, 'order', schema='%(schema)s', type_='foreignkey')
|
||||
op.alter_column('order', 'amount',
|
||||
existing_type=sa.Numeric(precision=10, scale=2),
|
||||
type_=sa.NUMERIC(precision=8, scale=2),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text('0'),
|
||||
schema='%(schema)s')
|
||||
op.drop_column('order', 'user_id', schema='%(schema)s')
|
||||
op.drop_constraint('uq_email', 'address', schema='test_schema', type_='unique')
|
||||
op.drop_column('address', 'street', schema='%(schema)s')
|
||||
op.create_table('extra',
|
||||
sa.Column('x', sa.CHAR(length=1), autoincrement=False, nullable=True),
|
||||
sa.Column('uid', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['uid'], ['%(schema)s.user.id'], \
|
||||
name='extra_uid_fkey'),
|
||||
schema='%(schema)s'
|
||||
)
|
||||
op.drop_table('item', schema='%(schema)s')
|
||||
### end Alembic commands ###""" % {"schema": self.schema})
|
||||
|
||||
|
||||
class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
|
||||
__only_on__ = 'sqlite'
|
||||
@ -1038,8 +568,9 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
|
||||
self.context._user_compare_type = my_compare_type
|
||||
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(self.context.bind, self.m2,
|
||||
diffs, self.autogen_context)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
|
||||
first_table = self.m2.tables['sometable']
|
||||
first_column = first_table.columns['id']
|
||||
@ -1062,8 +593,10 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
|
||||
self.context._user_compare_type = my_compare_type
|
||||
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(self.context.bind, self.m2,
|
||||
diffs, self.autogen_context)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
|
||||
eq_(diffs, [])
|
||||
|
||||
@ -1072,9 +605,10 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
|
||||
my_compare_type.return_value = True
|
||||
self.context._user_compare_type = my_compare_type
|
||||
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(self.context.bind, self.m2,
|
||||
diffs, self.autogen_context)
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
|
||||
eq_(diffs[0][0][0], 'modify_type')
|
||||
eq_(diffs[1][0][0], 'modify_type')
|
||||
@ -1101,14 +635,10 @@ class PKConstraintUpgradesIgnoresNullableTest(AutogenTest, TestBase):
|
||||
return cls._get_db_schema()
|
||||
|
||||
def test_no_change(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
|
||||
diffs = []
|
||||
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context
|
||||
)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
eq_(diffs, [])
|
||||
|
||||
|
||||
@ -1143,15 +673,12 @@ class AutogenKeyTest(AutogenTest, TestBase):
|
||||
symbols = ['someothertable', 'sometable']
|
||||
|
||||
def test_autogen(self):
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
|
||||
diffs = []
|
||||
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context,
|
||||
include_schemas=False
|
||||
)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
eq_(diffs[0][0], "add_table")
|
||||
eq_(diffs[0][1].name, "sometable")
|
||||
eq_(diffs[1][0], "add_column")
|
||||
@ -1178,8 +705,10 @@ class AutogenVersionTableTest(AutogenTest, TestBase):
|
||||
|
||||
def test_no_version_table(self):
|
||||
diffs = []
|
||||
autogenerate._produce_net_changes(self.context.bind, self.m2,
|
||||
diffs, self.autogen_context)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
eq_(diffs, [])
|
||||
|
||||
def test_version_table_in_target(self):
|
||||
@ -1188,8 +717,9 @@ class AutogenVersionTableTest(AutogenTest, TestBase):
|
||||
self.version_table_name,
|
||||
self.m2, Column('x', Integer), schema=self.version_table_schema)
|
||||
|
||||
autogenerate._produce_net_changes(self.context.bind, self.m2,
|
||||
diffs, self.autogen_context)
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
eq_(diffs, [])
|
||||
|
||||
|
||||
@ -1239,13 +769,10 @@ class AutogenerateDiffOrderTest(AutogenTest, TestBase):
|
||||
before their parent tables
|
||||
"""
|
||||
|
||||
metadata = self.m2
|
||||
connection = self.context.bind
|
||||
ctx = self.autogen_context.copy()
|
||||
ctx['metadata'] = self.m2
|
||||
diffs = []
|
||||
|
||||
autogenerate._produce_net_changes(connection, metadata, diffs,
|
||||
self.autogen_context
|
||||
)
|
||||
autogenerate._produce_net_changes(ctx, diffs)
|
||||
|
||||
eq_(diffs[0][0], 'add_table')
|
||||
eq_(diffs[0][1].name, "parent")
|
@ -1,5 +1,5 @@
|
||||
import sys
|
||||
from alembic.testing import TestBase, config
|
||||
from alembic.testing import TestBase
|
||||
|
||||
from sqlalchemy import MetaData, Column, Table, Integer, String, \
|
||||
ForeignKeyConstraint
|
||||
@ -7,7 +7,7 @@ from alembic.testing import eq_
|
||||
|
||||
py3k = sys.version_info >= (3, )
|
||||
|
||||
from .test_autogenerate import AutogenFixtureTest
|
||||
from ._autogen_fixtures import AutogenFixtureTest
|
||||
|
||||
|
||||
class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase):
|
||||
|
@ -12,7 +12,7 @@ from alembic.testing.env import staging_env
|
||||
|
||||
py3k = sys.version_info >= (3, )
|
||||
|
||||
from .test_autogenerate import AutogenFixtureTest
|
||||
from ._autogen_fixtures import AutogenFixtureTest
|
||||
|
||||
|
||||
class NoUqReflection(object):
|
||||
|
@ -2,6 +2,7 @@ import re
|
||||
import sys
|
||||
from alembic.testing import TestBase, exclusions
|
||||
|
||||
from alembic.operations import ops
|
||||
from sqlalchemy import MetaData, Column, Table, String, \
|
||||
Numeric, CHAR, ForeignKey, DATETIME, Integer, \
|
||||
CheckConstraint, Unicode, Enum, cast,\
|
||||
@ -16,7 +17,8 @@ from sqlalchemy.sql import and_, column, literal_column, false
|
||||
|
||||
from alembic.testing.mock import patch
|
||||
|
||||
from alembic import autogenerate, util, compat
|
||||
from alembic import autogenerate, util
|
||||
from alembic.util import compat
|
||||
from alembic.testing import eq_, eq_ignore_whitespace, config
|
||||
|
||||
from alembic.testing.fixtures import op_fixture
|
||||
@ -58,8 +60,9 @@ class AutogenRenderTest(TestBase):
|
||||
Column('code', String(255)),
|
||||
)
|
||||
idx = Index('test_active_code_idx', t.c.active, t.c.code)
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index('test_active_code_idx', 'test', "
|
||||
"['active', 'code'], unique=False)"
|
||||
)
|
||||
@ -76,8 +79,9 @@ class AutogenRenderTest(TestBase):
|
||||
schema='CamelSchema'
|
||||
)
|
||||
idx = Index('test_active_code_idx', t.c.active, t.c.code)
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index('test_active_code_idx', 'test', "
|
||||
"['active', 'code'], unique=False, schema='CamelSchema')"
|
||||
)
|
||||
@ -94,16 +98,18 @@ class AutogenRenderTest(TestBase):
|
||||
idx = Index('foo_idx', t.c.x, t.c.y,
|
||||
postgresql_where=(t.c.y == 'something'))
|
||||
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
|
||||
if compat.sqla_08:
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, autogen_context),
|
||||
autogenerate.render_op_text(autogen_context, op_obj),
|
||||
"""op.create_index('foo_idx', 't', \
|
||||
['x', 'y'], unique=False, """
|
||||
"""postgresql_where=sa.text(!U"t.y = 'something'"))"""
|
||||
)
|
||||
else:
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, autogen_context),
|
||||
autogenerate.render_op_text(autogen_context, op_obj),
|
||||
"""op.create_index('foo_idx', 't', ['x', 'y'], \
|
||||
unique=False, """
|
||||
"""postgresql_where=sa.text(!U't.y = %(y_1)s'))"""
|
||||
@ -118,8 +124,10 @@ unique=False, """
|
||||
Column('code', String(255))
|
||||
)
|
||||
idx = Index('test_lower_code_idx', func.lower(t.c.code))
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index('test_lower_code_idx', 'test', "
|
||||
"[sa.text(!U'lower(test.code)')], unique=False)"
|
||||
)
|
||||
@ -133,8 +141,9 @@ unique=False, """
|
||||
Column('code', String(255))
|
||||
)
|
||||
idx = Index('test_lower_code_idx', cast(t.c.code, String))
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index('test_lower_code_idx', 'test', "
|
||||
"[sa.text(!U'CAST(test.code AS CHAR)')], unique=False)"
|
||||
)
|
||||
@ -148,8 +157,9 @@ unique=False, """
|
||||
Column('code', String(255))
|
||||
)
|
||||
idx = Index('test_desc_code_idx', t.c.code.desc())
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index('test_desc_code_idx', 'test', "
|
||||
"[sa.text(!U'test.code DESC')], unique=False)"
|
||||
)
|
||||
@ -165,8 +175,9 @@ unique=False, """
|
||||
Column('code', String(255)),
|
||||
)
|
||||
idx = Index('test_active_code_idx', t.c.active, t.c.code)
|
||||
op_obj = ops.DropIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_index('test_active_code_idx', table_name='test')"
|
||||
)
|
||||
|
||||
@ -182,8 +193,9 @@ unique=False, """
|
||||
schema='CamelSchema'
|
||||
)
|
||||
idx = Index('test_active_code_idx', t.c.active, t.c.code)
|
||||
op_obj = ops.DropIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_index('test_active_code_idx', " +
|
||||
"table_name='test', schema='CamelSchema')"
|
||||
)
|
||||
@ -199,9 +211,9 @@ unique=False, """
|
||||
Column('code', String(255)),
|
||||
)
|
||||
uq = UniqueConstraint(t.c.code, name='uq_test_code')
|
||||
op_obj = ops.AddConstraintOp.from_constraint(uq)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_unique_constraint(
|
||||
uq, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_unique_constraint('uq_test_code', 'test', ['code'])"
|
||||
)
|
||||
|
||||
@ -217,9 +229,9 @@ unique=False, """
|
||||
schema='CamelSchema'
|
||||
)
|
||||
uq = UniqueConstraint(t.c.code, name='uq_test_code')
|
||||
op_obj = ops.AddConstraintOp.from_constraint(uq)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_unique_constraint(
|
||||
uq, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_unique_constraint('uq_test_code', 'test', "
|
||||
"['code'], schema='CamelSchema')"
|
||||
)
|
||||
@ -235,8 +247,9 @@ unique=False, """
|
||||
Column('code', String(255)),
|
||||
)
|
||||
uq = UniqueConstraint(t.c.code, name='uq_test_code')
|
||||
op_obj = ops.DropConstraintOp.from_constraint(uq)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_constraint(uq, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_constraint('uq_test_code', 'test', type_='unique')"
|
||||
)
|
||||
|
||||
@ -252,8 +265,9 @@ unique=False, """
|
||||
schema='CamelSchema'
|
||||
)
|
||||
uq = UniqueConstraint(t.c.code, name='uq_test_code')
|
||||
op_obj = ops.DropConstraintOp.from_constraint(uq)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_constraint(uq, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_constraint('uq_test_code', 'test', "
|
||||
"schema='CamelSchema', type_='unique')"
|
||||
)
|
||||
@ -264,8 +278,9 @@ unique=False, """
|
||||
b = Table('b', m, Column('a_id', Integer, ForeignKey('a.id')))
|
||||
fk = ForeignKeyConstraint(['a_id'], ['a.id'], name='fk_a_id')
|
||||
b.append_constraint(fk)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_fk_constraint(fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'])"
|
||||
)
|
||||
|
||||
@ -281,11 +296,12 @@ unique=False, """
|
||||
# SQLA 0.9 generates a u'' here for remote cols while 0.8 does not,
|
||||
# so just whack out "'u" here from the generated
|
||||
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context)),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
),
|
||||
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
|
||||
"onupdate='CASCADE')"
|
||||
)
|
||||
@ -294,11 +310,12 @@ unique=False, """
|
||||
if not util.sqla_08:
|
||||
t1.append_constraint(fk)
|
||||
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context)),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
),
|
||||
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
|
||||
"ondelete='CASCADE')"
|
||||
)
|
||||
@ -306,11 +323,11 @@ unique=False, """
|
||||
fk = ForeignKeyConstraint([t1.c.c], [t2.c.c_rem], deferrable=True)
|
||||
if not util.sqla_08:
|
||||
t1.append_constraint(fk)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
),
|
||||
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
|
||||
"deferrable=True)"
|
||||
@ -319,11 +336,11 @@ unique=False, """
|
||||
fk = ForeignKeyConstraint([t1.c.c], [t2.c.c_rem], initially="XYZ")
|
||||
if not util.sqla_08:
|
||||
t1.append_constraint(fk)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context)
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
),
|
||||
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
|
||||
"initially='XYZ')"
|
||||
@ -334,11 +351,11 @@ unique=False, """
|
||||
initially="XYZ", ondelete="CASCADE", deferrable=True)
|
||||
if not util.sqla_08:
|
||||
t1.append_constraint(fk)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context)
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
),
|
||||
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
|
||||
"ondelete='CASCADE', initially='XYZ', deferrable=True)"
|
||||
@ -351,7 +368,8 @@ unique=False, """
|
||||
'b', m,
|
||||
Column('a_id', Integer, ForeignKey('a.aid'), key='baid'))
|
||||
|
||||
py_code = autogenerate.render._add_table(b, self.autogen_context)
|
||||
op_obj = ops.CreateTableOp.from_table(b)
|
||||
py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
py_code,
|
||||
@ -373,7 +391,8 @@ unique=False, """
|
||||
fk = ForeignKeyConstraint(['baid'], ['a.aid'], name='fk_a_id')
|
||||
b.append_constraint(fk)
|
||||
|
||||
py_code = autogenerate.render._add_table(b, self.autogen_context)
|
||||
op_obj = ops.CreateTableOp.from_table(b)
|
||||
py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
py_code,
|
||||
@ -389,14 +408,16 @@ unique=False, """
|
||||
"fk_a_id FOREIGN KEY(a_id) REFERENCES a (id))")
|
||||
|
||||
context = op_fixture()
|
||||
py_code = autogenerate.render._add_fk_constraint(
|
||||
fk, self.autogen_context)
|
||||
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_fk_constraint(fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'])"
|
||||
)
|
||||
|
||||
py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
|
||||
eval(py_code)
|
||||
context.assert_(
|
||||
"ALTER TABLE b ADD CONSTRAINT fk_a_id "
|
||||
@ -414,8 +435,9 @@ unique=False, """
|
||||
["a_id"],
|
||||
["CamelSchemaTwo.a.id"], name='fk_a_id')
|
||||
b.append_constraint(fk)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_fk_constraint(fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'],"
|
||||
" source_schema='CamelSchemaOne', "
|
||||
"referent_schema='CamelSchemaTwo')"
|
||||
@ -427,8 +449,9 @@ unique=False, """
|
||||
b = Table('b', m, Column('a_id', Integer, ForeignKey('a.id')))
|
||||
fk = ForeignKeyConstraint(['a_id'], ['a.id'], name='fk_a_id')
|
||||
b.append_constraint(fk)
|
||||
op_obj = ops.DropConstraintOp.from_constraint(fk)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_constraint(fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_constraint('fk_a_id', 'b', type_='foreignkey')"
|
||||
)
|
||||
|
||||
@ -444,9 +467,10 @@ unique=False, """
|
||||
["a_id"],
|
||||
["CamelSchemaTwo.a.id"], name='fk_a_id')
|
||||
b.append_constraint(fk)
|
||||
op_obj = ops.DropConstraintOp.from_constraint(fk)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_constraint(fk, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_constraint('fk_a_id', 'b', schema='CamelSchemaOne', "
|
||||
"type_='foreignkey')"
|
||||
)
|
||||
@ -462,8 +486,10 @@ unique=False, """
|
||||
UniqueConstraint("name", name="uq_name"),
|
||||
UniqueConstraint("timestamp"),
|
||||
)
|
||||
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.Column('name', sa.Unicode(length=255), nullable=True),"
|
||||
@ -487,8 +513,9 @@ unique=False, """
|
||||
Column('q', Integer, ForeignKey('address.id')),
|
||||
schema='foo'
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.Column('q', sa.Integer(), nullable=True),"
|
||||
@ -503,8 +530,9 @@ unique=False, """
|
||||
t = Table(compat.ue('\u0411\u0435\u0437'), m,
|
||||
Column('id', Integer, primary_key=True),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table(%r,"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.PrimaryKeyConstraint('id'))" % compat.ue('\u0411\u0435\u0437')
|
||||
@ -516,8 +544,9 @@ unique=False, """
|
||||
Column('id', Integer, primary_key=True),
|
||||
schema=compat.ue('\u0411\u0435\u0437')
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.PrimaryKeyConstraint('id'),"
|
||||
@ -534,8 +563,9 @@ unique=False, """
|
||||
Column('c', Integer),
|
||||
Column('d', Integer),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"*[sa.Column('a', sa.Integer(), nullable=True),"
|
||||
"sa.Column('b', sa.Integer(), nullable=True),"
|
||||
@ -549,9 +579,10 @@ unique=False, """
|
||||
Column('b', Integer),
|
||||
Column('c', Integer),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t2)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t2, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test2',"
|
||||
"sa.Column('a', sa.Integer(), nullable=True),"
|
||||
"sa.Column('b', sa.Integer(), nullable=True),"
|
||||
@ -564,8 +595,9 @@ unique=False, """
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('q', Integer, ForeignKey('foo.address.id')),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.Column('q', sa.Integer(), nullable=True),"
|
||||
@ -580,10 +612,11 @@ unique=False, """
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('q', Integer, ForeignKey('address.id')),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
re.sub(
|
||||
r"u'", "'",
|
||||
autogenerate.render._add_table(t, self.autogen_context)
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj)
|
||||
),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
@ -600,8 +633,9 @@ unique=False, """
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('q', Integer, ForeignKey('bar.address.id')),
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.Column('q', sa.Integer(), nullable=True),"
|
||||
@ -618,8 +652,9 @@ unique=False, """
|
||||
Column('q', Integer, ForeignKey('bar.address.id')),
|
||||
sqlite_autoincrement=True, mysql_engine="InnoDB"
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('id', sa.Integer(), nullable=False),"
|
||||
"sa.Column('q', sa.Integer(), nullable=True),"
|
||||
@ -629,17 +664,20 @@ unique=False, """
|
||||
)
|
||||
|
||||
def test_render_drop_table(self):
|
||||
op_obj = ops.DropTableOp.from_table(
|
||||
Table("sometable", MetaData())
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_table(Table("sometable", MetaData()),
|
||||
self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_table('sometable')"
|
||||
)
|
||||
|
||||
def test_render_drop_table_w_schema(self):
|
||||
op_obj = ops.DropTableOp.from_table(
|
||||
Table("sometable", MetaData(), schema='foo')
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_table(
|
||||
Table("sometable", MetaData(), schema='foo'),
|
||||
self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_table('sometable', schema='foo')"
|
||||
)
|
||||
|
||||
@ -647,8 +685,9 @@ unique=False, """
|
||||
m = MetaData()
|
||||
t = Table('test', m, Column('x', Boolean()))
|
||||
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('test',"
|
||||
"sa.Column('x', sa.Boolean(), nullable=True))"
|
||||
)
|
||||
@ -658,52 +697,53 @@ unique=False, """
|
||||
t1 = Table('t1', m, Column('x', Integer))
|
||||
t2 = Table('t2', m, Column('x', Integer, primary_key=True))
|
||||
|
||||
op_obj = ops.CreateTableOp.from_table(t1)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t1, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('t1',"
|
||||
"sa.Column('x', sa.Integer(), nullable=True))"
|
||||
)
|
||||
|
||||
op_obj = ops.CreateTableOp.from_table(t2)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t2, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('t2',"
|
||||
"sa.Column('x', sa.Integer(), nullable=False),"
|
||||
"sa.PrimaryKeyConstraint('x'))"
|
||||
)
|
||||
|
||||
def test_render_add_column(self):
|
||||
op_obj = ops.AddColumnOp(
|
||||
"foo", Column("x", Integer, server_default="5"))
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_column(
|
||||
None, "foo", Column("x", Integer, server_default="5"),
|
||||
self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.add_column('foo', sa.Column('x', sa.Integer(), "
|
||||
"server_default='5', nullable=True))"
|
||||
)
|
||||
|
||||
def test_render_add_column_w_schema(self):
|
||||
op_obj = ops.AddColumnOp(
|
||||
"bar", Column("x", Integer, server_default="5"),
|
||||
schema="foo")
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_column(
|
||||
"foo", "bar", Column("x", Integer, server_default="5"),
|
||||
self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.add_column('bar', sa.Column('x', sa.Integer(), "
|
||||
"server_default='5', nullable=True), schema='foo')"
|
||||
)
|
||||
|
||||
def test_render_drop_column(self):
|
||||
op_obj = ops.DropColumnOp.from_column_and_tablename(
|
||||
None, "foo", Column("x", Integer, server_default="5"))
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_column(
|
||||
None, "foo", Column("x", Integer, server_default="5"),
|
||||
self.autogen_context),
|
||||
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_column('foo', 'x')"
|
||||
)
|
||||
|
||||
def test_render_drop_column_w_schema(self):
|
||||
op_obj = ops.DropColumnOp.from_column_and_tablename(
|
||||
"foo", "bar", Column("x", Integer, server_default="5"))
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_column(
|
||||
"foo", "bar", Column("x", Integer, server_default="5"),
|
||||
self.autogen_context),
|
||||
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_column('bar', 'x', schema='foo')"
|
||||
)
|
||||
|
||||
@ -783,9 +823,8 @@ unique=False, """
|
||||
PrimaryKeyConstraint('x'),
|
||||
ForeignKeyConstraint(['x'], ['y'])
|
||||
)
|
||||
result = autogenerate.render._add_table(
|
||||
t, autogen_context
|
||||
)
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
result = autogenerate.render_op_text(autogen_context, op_obj)
|
||||
eq_ignore_whitespace(
|
||||
result,
|
||||
"sa.create_table('t',"
|
||||
@ -794,45 +833,50 @@ unique=False, """
|
||||
)
|
||||
|
||||
def test_render_modify_type(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
type_=CHAR(10), existing_type=CHAR(20)),
|
||||
modify_type=CHAR(10), existing_type=CHAR(20)
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10))"
|
||||
)
|
||||
|
||||
def test_render_modify_type_w_schema(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
type_=CHAR(10), existing_type=CHAR(20),
|
||||
schema='foo'),
|
||||
modify_type=CHAR(10), existing_type=CHAR(20),
|
||||
schema='foo'
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10), "
|
||||
"schema='foo')"
|
||||
)
|
||||
|
||||
def test_render_modify_nullable(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
existing_type=Integer(),
|
||||
nullable=True),
|
||||
modify_nullable=True
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.Integer(), nullable=True)"
|
||||
)
|
||||
|
||||
def test_render_modify_nullable_w_schema(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
existing_type=Integer(),
|
||||
nullable=True, schema='foo'),
|
||||
modify_nullable=True, schema='foo'
|
||||
)
|
||||
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.Integer(), nullable=True, schema='foo')"
|
||||
)
|
||||
@ -993,23 +1037,22 @@ unique=False, """
|
||||
't', m, Column('c', Integer),
|
||||
schema=compat.ue('\u0411\u0435\u0437')
|
||||
)
|
||||
op_obj = ops.AddConstraintOp.from_constraint(UniqueConstraint(t.c.c))
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_unique_constraint(
|
||||
UniqueConstraint(t.c.c),
|
||||
self.autogen_context
|
||||
),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_unique_constraint(None, 't', ['c'], "
|
||||
"schema=%r)" % compat.ue('\u0411\u0435\u0437')
|
||||
)
|
||||
|
||||
def test_render_modify_nullable_w_default(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
existing_type=Integer(),
|
||||
existing_server_default="5",
|
||||
nullable=True),
|
||||
modify_nullable=True
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.Integer(), nullable=True, "
|
||||
"existing_server_default='5')"
|
||||
@ -1236,13 +1279,14 @@ unique=False, """
|
||||
)
|
||||
|
||||
def test_render_modify_reflected_int_server_default(self):
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._modify_col(
|
||||
op_obj = ops.AlterColumnOp(
|
||||
"sometable", "somecolumn",
|
||||
self.autogen_context,
|
||||
existing_type=Integer(),
|
||||
existing_server_default=DefaultClause(text("5")),
|
||||
nullable=True),
|
||||
modify_nullable=True
|
||||
)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.alter_column('sometable', 'somecolumn', "
|
||||
"existing_type=sa.Integer(), nullable=True, "
|
||||
"existing_server_default=sa.text(!U'5'))"
|
||||
@ -1280,10 +1324,9 @@ class RenderNamingConventionTest(TestBase):
|
||||
|
||||
def test_schema_type_boolean(self):
|
||||
t = Table('t', self.metadata, Column('c', Boolean(name='xyz')))
|
||||
op_obj = ops.AddColumnOp.from_column(t.c.c)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_column(
|
||||
None, "t", t.c.c,
|
||||
self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.add_column('t', "
|
||||
"sa.Column('c', sa.Boolean(name='xyz'), nullable=True))"
|
||||
)
|
||||
@ -1316,8 +1359,9 @@ class RenderNamingConventionTest(TestBase):
|
||||
Column('code', String(255)),
|
||||
)
|
||||
idx = Index(None, t.c.active, t.c.code)
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index(op.f('ix_ct_test_active'), 'test', "
|
||||
"['active', 'code'], unique=False)"
|
||||
)
|
||||
@ -1329,8 +1373,9 @@ class RenderNamingConventionTest(TestBase):
|
||||
Column('code', String(255)),
|
||||
)
|
||||
idx = Index(None, t.c.active, t.c.code)
|
||||
op_obj = ops.DropIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._drop_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.drop_index(op.f('ix_ct_test_active'), table_name='test')"
|
||||
)
|
||||
|
||||
@ -1342,8 +1387,9 @@ class RenderNamingConventionTest(TestBase):
|
||||
schema='CamelSchema'
|
||||
)
|
||||
idx = Index(None, t.c.active, t.c.code)
|
||||
op_obj = ops.CreateIndexOp.from_index(idx)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_index(idx, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_index(op.f('ix_ct_CamelSchema_test_active'), 'test', "
|
||||
"['active', 'code'], unique=False, schema='CamelSchema')"
|
||||
)
|
||||
@ -1360,8 +1406,9 @@ class RenderNamingConventionTest(TestBase):
|
||||
|
||||
def test_inline_pk_constraint(self):
|
||||
t = Table('t', self.metadata, Column('c', Integer, primary_key=True))
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=False),"
|
||||
"sa.PrimaryKeyConstraint('c', name=op.f('pk_ct_t')))"
|
||||
)
|
||||
@ -1369,16 +1416,18 @@ class RenderNamingConventionTest(TestBase):
|
||||
def test_inline_ck_constraint(self):
|
||||
t = Table(
|
||||
't', self.metadata, Column('c', Integer), CheckConstraint("c > 5"))
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=True),"
|
||||
"sa.CheckConstraint(!U'c > 5', name=op.f('ck_ct_t')))"
|
||||
)
|
||||
|
||||
def test_inline_fk(self):
|
||||
t = Table('t', self.metadata, Column('c', Integer, ForeignKey('q.id')))
|
||||
op_obj = ops.CreateTableOp.from_table(t)
|
||||
eq_ignore_whitespace(
|
||||
autogenerate.render._add_table(t, self.autogen_context),
|
||||
autogenerate.render_op_text(self.autogen_context, op_obj),
|
||||
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=True),"
|
||||
"sa.ForeignKeyConstraint(['c'], ['q.id'], "
|
||||
"name=op.f('fk_ct_t_c_q')))"
|
||||
|
@ -1,15 +1,13 @@
|
||||
from contextlib import contextmanager
|
||||
import re
|
||||
|
||||
import io
|
||||
|
||||
from alembic.testing import exclusions
|
||||
from alembic.testing import TestBase, eq_, config
|
||||
from alembic.testing.fixtures import op_fixture
|
||||
from alembic.testing import mock
|
||||
from alembic.operations import Operations
|
||||
from alembic.batch import ApplyBatchImpl
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.operations.batch import ApplyBatchImpl
|
||||
from alembic.runtime.migration import MigrationContext
|
||||
|
||||
|
||||
from sqlalchemy import Integer, Table, Column, String, MetaData, ForeignKey, \
|
||||
@ -330,7 +328,7 @@ class BatchApplyTest(TestBase):
|
||||
impl = self._simple_fixture()
|
||||
col = Column('g', Integer)
|
||||
# operations.add_column produces a table
|
||||
t = self.op._table('tname', col) # noqa
|
||||
t = self.op.schema_obj.table('tname', col) # noqa
|
||||
impl.add_column('tname', col)
|
||||
new_table = self._assert_impl(impl, colnames=['id', 'x', 'y', 'g'])
|
||||
eq_(new_table.c.g.name, 'g')
|
||||
@ -420,7 +418,7 @@ class BatchApplyTest(TestBase):
|
||||
def test_add_fk(self):
|
||||
impl = self._simple_fixture()
|
||||
impl.add_column('tname', Column('user_id', Integer))
|
||||
fk = self.op._foreign_key_constraint(
|
||||
fk = self.op.schema_obj.foreign_key_constraint(
|
||||
'fk1', 'tname', 'user',
|
||||
['user_id'], ['id'])
|
||||
impl.add_constraint(fk)
|
||||
@ -447,7 +445,7 @@ class BatchApplyTest(TestBase):
|
||||
|
||||
def test_add_uq(self):
|
||||
impl = self._simple_fixture()
|
||||
uq = self.op._unique_constraint(
|
||||
uq = self.op.schema_obj.unique_constraint(
|
||||
'uq1', 'tname', ['y']
|
||||
)
|
||||
|
||||
@ -459,7 +457,7 @@ class BatchApplyTest(TestBase):
|
||||
def test_drop_uq(self):
|
||||
impl = self._uq_fixture()
|
||||
|
||||
uq = self.op._unique_constraint(
|
||||
uq = self.op.schema_obj.unique_constraint(
|
||||
'uq1', 'tname', ['y']
|
||||
)
|
||||
impl.drop_constraint(uq)
|
||||
@ -469,7 +467,7 @@ class BatchApplyTest(TestBase):
|
||||
|
||||
def test_create_index(self):
|
||||
impl = self._simple_fixture()
|
||||
ix = self.op._index('ix1', 'tname', ['y'])
|
||||
ix = self.op.schema_obj.index('ix1', 'tname', ['y'])
|
||||
|
||||
impl.create_index(ix)
|
||||
self._assert_impl(
|
||||
@ -479,7 +477,7 @@ class BatchApplyTest(TestBase):
|
||||
def test_drop_index(self):
|
||||
impl = self._ix_fixture()
|
||||
|
||||
ix = self.op._index('ix1', 'tname', ['y'])
|
||||
ix = self.op.schema_obj.index('ix1', 'tname', ['y'])
|
||||
impl.drop_index(ix)
|
||||
self._assert_impl(
|
||||
impl, colnames=['id', 'x', 'y'],
|
||||
@ -498,12 +496,14 @@ class BatchAPITest(TestBase):
|
||||
|
||||
@contextmanager
|
||||
def _fixture(self, schema=None):
|
||||
migration_context = mock.Mock(opts={})
|
||||
migration_context = mock.Mock(
|
||||
opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
|
||||
op = Operations(migration_context)
|
||||
batch = op.batch_alter_table(
|
||||
'tname', recreate='never', schema=schema).__enter__()
|
||||
|
||||
with mock.patch("alembic.operations.sa_schema") as mock_schema:
|
||||
mock_schema = mock.MagicMock()
|
||||
with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
|
||||
yield batch
|
||||
batch.impl.flush()
|
||||
self.mock_schema = mock_schema
|
||||
|
@ -1,9 +1,8 @@
|
||||
#!coding: utf-8
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from alembic import config, util, compat
|
||||
from alembic import config, util
|
||||
from alembic.util import compat
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.operations import Operations
|
||||
from alembic.script import ScriptDirectory
|
||||
|
@ -524,7 +524,8 @@ class OpTest(TestBase):
|
||||
def test_add_foreign_key_dialect_kw(self):
|
||||
op_fixture()
|
||||
with mock.patch(
|
||||
"alembic.operations.sa_schema.ForeignKeyConstraint") as fkc:
|
||||
"sqlalchemy.schema.ForeignKeyConstraint"
|
||||
) as fkc:
|
||||
op.create_foreign_key('fk_test', 't1', 't2',
|
||||
['foo', 'bar'], ['bat', 'hoho'],
|
||||
foobar_arg='xyz')
|
||||
@ -808,12 +809,6 @@ class OpTest(TestBase):
|
||||
op.drop_constraint("f1", "t1", type_="foreignkey")
|
||||
context.assert_("ALTER TABLE t1 DROP FOREIGN KEY f1")
|
||||
|
||||
assert_raises_message(
|
||||
TypeError,
|
||||
r"Unknown arguments: badarg\d, badarg\d",
|
||||
op.alter_column, "t", "c", badarg1="x", badarg2="y"
|
||||
)
|
||||
|
||||
@config.requirements.fail_before_sqla_084
|
||||
def test_naming_changes_drop_idx(self):
|
||||
context = op_fixture('mssql')
|
||||
@ -857,3 +852,31 @@ class SQLModeOpTest(TestBase):
|
||||
"CREATE TABLE some_table (id INTEGER NOT NULL, st_id INTEGER, "
|
||||
"PRIMARY KEY (id), FOREIGN KEY(st_id) REFERENCES some_table (id))"
|
||||
)
|
||||
|
||||
|
||||
class CustomOpTest(TestBase):
|
||||
def test_custom_op(self):
|
||||
from alembic.operations import Operations, MigrateOperation
|
||||
|
||||
@Operations.register_operation("create_sequence")
|
||||
class CreateSequenceOp(MigrateOperation):
|
||||
"""Create a SEQUENCE."""
|
||||
|
||||
def __init__(self, sequence_name, **kw):
|
||||
self.sequence_name = sequence_name
|
||||
self.kw = kw
|
||||
|
||||
@classmethod
|
||||
def create_sequence(cls, operations, sequence_name, **kw):
|
||||
"""Issue a "CREATE SEQUENCE" instruction."""
|
||||
|
||||
op = CreateSequenceOp(sequence_name, **kw)
|
||||
return operations.invoke(op)
|
||||
|
||||
@Operations.implementation_for(CreateSequenceOp)
|
||||
def create_sequence(operations, operation):
|
||||
operations.execute("CREATE SEQUENCE %s" % operation.sequence_name)
|
||||
|
||||
context = op_fixture()
|
||||
op.create_sequence('foob')
|
||||
context.assert_("CREATE SEQUENCE foob")
|
||||
|
@ -1,6 +1,6 @@
|
||||
from alembic.testing.fixtures import TestBase
|
||||
from alembic.testing import eq_, assert_raises_message
|
||||
from alembic.revision import RevisionMap, Revision, MultipleHeads, \
|
||||
from alembic.script.revision import RevisionMap, Revision, MultipleHeads, \
|
||||
RevisionError
|
||||
|
||||
|
||||
|
@ -3,7 +3,8 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from alembic import command, util, compat
|
||||
from alembic import command, util
|
||||
from alembic.util import compat
|
||||
from alembic.script import ScriptDirectory, Script
|
||||
from alembic.testing.env import clear_staging_env, staging_env, \
|
||||
_sqlite_testing_config, write_script, _sqlite_file_db, \
|
||||
|
@ -1,15 +1,20 @@
|
||||
from alembic.testing.fixtures import TestBase
|
||||
from alembic.testing import eq_, ne_, is_, assert_raises_message
|
||||
from alembic.testing import eq_, ne_, assert_raises_message
|
||||
from alembic.testing.env import clear_staging_env, staging_env, \
|
||||
_get_staging_directory, _no_sql_testing_config, env_file_fixture, \
|
||||
script_file_fixture, _testing_config, _sqlite_testing_config, \
|
||||
three_rev_fixture, _multi_dir_testing_config
|
||||
three_rev_fixture, _multi_dir_testing_config, write_script,\
|
||||
_sqlite_file_db
|
||||
from alembic import command
|
||||
from alembic.script import ScriptDirectory
|
||||
from alembic.environment import EnvironmentContext
|
||||
from alembic.testing import mock
|
||||
from alembic import util
|
||||
from alembic.operations import ops
|
||||
import os
|
||||
import datetime
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
env, abc, def_ = None, None, None
|
||||
|
||||
@ -214,6 +219,174 @@ class RevisionCommandTest(TestBase):
|
||||
)
|
||||
|
||||
|
||||
class CustomizeRevisionTest(TestBase):
|
||||
def setUp(self):
|
||||
self.env = staging_env()
|
||||
self.cfg = _multi_dir_testing_config()
|
||||
self.cfg.set_main_option("revision_environment", "true")
|
||||
|
||||
script = ScriptDirectory.from_config(self.cfg)
|
||||
# MARKMARK
|
||||
self.model1 = util.rev_id()
|
||||
self.model2 = util.rev_id()
|
||||
self.model3 = util.rev_id()
|
||||
for model, name in [
|
||||
(self.model1, "model1"),
|
||||
(self.model2, "model2"),
|
||||
(self.model3, "model3"),
|
||||
]:
|
||||
script.generate_revision(
|
||||
model, name, refresh=True,
|
||||
version_path=os.path.join(_get_staging_directory(), name),
|
||||
head="base")
|
||||
|
||||
write_script(script, model, """\
|
||||
"%s"
|
||||
revision = '%s'
|
||||
down_revision = None
|
||||
branch_labels = ['%s']
|
||||
|
||||
from alembic import op
|
||||
|
||||
def upgrade():
|
||||
pass
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
|
||||
""" % (name, model, name))
|
||||
|
||||
def tearDown(self):
|
||||
clear_staging_env()
|
||||
|
||||
def _env_fixture(self, fn, target_metadata):
|
||||
self.engine = engine = _sqlite_file_db()
|
||||
|
||||
def run_env(self):
|
||||
from alembic import context
|
||||
|
||||
with engine.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=fn)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
return mock.patch(
|
||||
"alembic.script.base.ScriptDirectory.run_env",
|
||||
run_env
|
||||
)
|
||||
|
||||
def test_new_locations_no_autogen(self):
|
||||
m = sa.MetaData()
|
||||
|
||||
def process_revision_directives(context, rev, generate_revisions):
|
||||
generate_revisions[:] = [
|
||||
ops.MigrationScript(
|
||||
util.rev_id(),
|
||||
ops.UpgradeOps(),
|
||||
ops.DowngradeOps(),
|
||||
version_path=os.path.join(
|
||||
_get_staging_directory(), "model1"),
|
||||
head="model1@head"
|
||||
),
|
||||
ops.MigrationScript(
|
||||
util.rev_id(),
|
||||
ops.UpgradeOps(),
|
||||
ops.DowngradeOps(),
|
||||
version_path=os.path.join(
|
||||
_get_staging_directory(), "model2"),
|
||||
head="model2@head"
|
||||
),
|
||||
ops.MigrationScript(
|
||||
util.rev_id(),
|
||||
ops.UpgradeOps(),
|
||||
ops.DowngradeOps(),
|
||||
version_path=os.path.join(
|
||||
_get_staging_directory(), "model3"),
|
||||
head="model3@head"
|
||||
),
|
||||
]
|
||||
|
||||
with self._env_fixture(process_revision_directives, m):
|
||||
revs = command.revision(self.cfg, message="some message")
|
||||
|
||||
script = ScriptDirectory.from_config(self.cfg)
|
||||
|
||||
for rev, model in [
|
||||
(revs[0], "model1"),
|
||||
(revs[1], "model2"),
|
||||
(revs[2], "model3"),
|
||||
]:
|
||||
rev_script = script.get_revision(rev.revision)
|
||||
eq_(
|
||||
rev_script.path,
|
||||
os.path.abspath(os.path.join(
|
||||
_get_staging_directory(), model,
|
||||
"%s_.py" % (rev_script.revision, )
|
||||
))
|
||||
)
|
||||
assert os.path.exists(rev_script.path)
|
||||
|
||||
def test_autogen(self):
|
||||
m = sa.MetaData()
|
||||
sa.Table('t', m, sa.Column('x', sa.Integer))
|
||||
|
||||
def process_revision_directives(context, rev, generate_revisions):
|
||||
existing_upgrades = generate_revisions[0].upgrade_ops
|
||||
existing_downgrades = generate_revisions[0].downgrade_ops
|
||||
|
||||
# model1 will run the upgrades, e.g. create the table,
|
||||
# model2 will run the downgrades as upgrades, e.g. drop
|
||||
# the table again
|
||||
|
||||
generate_revisions[:] = [
|
||||
ops.MigrationScript(
|
||||
util.rev_id(),
|
||||
existing_upgrades,
|
||||
ops.DowngradeOps(),
|
||||
version_path=os.path.join(
|
||||
_get_staging_directory(), "model1"),
|
||||
head="model1@head"
|
||||
),
|
||||
ops.MigrationScript(
|
||||
util.rev_id(),
|
||||
existing_downgrades,
|
||||
ops.DowngradeOps(),
|
||||
version_path=os.path.join(
|
||||
_get_staging_directory(), "model2"),
|
||||
head="model2@head"
|
||||
)
|
||||
]
|
||||
|
||||
with self._env_fixture(process_revision_directives, m):
|
||||
command.upgrade(self.cfg, "heads")
|
||||
|
||||
eq_(
|
||||
Inspector.from_engine(self.engine).get_table_names(),
|
||||
["alembic_version"]
|
||||
)
|
||||
|
||||
command.revision(
|
||||
self.cfg, message="some message",
|
||||
autogenerate=True)
|
||||
|
||||
command.upgrade(self.cfg, "model1@head")
|
||||
|
||||
eq_(
|
||||
Inspector.from_engine(self.engine).get_table_names(),
|
||||
["alembic_version", "t"]
|
||||
)
|
||||
|
||||
command.upgrade(self.cfg, "model2@head")
|
||||
|
||||
eq_(
|
||||
Inspector.from_engine(self.engine).get_table_names(),
|
||||
["alembic_version"]
|
||||
)
|
||||
|
||||
|
||||
class MultiDirRevisionCommandTest(TestBase):
|
||||
def setUp(self):
|
||||
self.env = staging_env()
|
||||
|
Loading…
Reference in New Issue
Block a user