Open Media Library Platform
This commit is contained in:
commit
411ad5b16f
5849 changed files with 1778641 additions and 0 deletions
11
Darwin/lib/python2.7/site-packages/alembic/__init__.py
Normal file
11
Darwin/lib/python2.7/site-packages/alembic/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from os import path
|
||||
|
||||
__version__ = '0.6.5'
|
||||
|
||||
package_dir = path.abspath(path.dirname(__file__))
|
||||
|
||||
|
||||
from . import op
|
||||
from . import context
|
||||
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
from .api import compare_metadata, _produce_migration_diffs, _produce_net_changes
|
||||
301
Darwin/lib/python2.7/site-packages/alembic/autogenerate/api.py
Normal file
301
Darwin/lib/python2.7/site-packages/alembic/autogenerate/api.py
Normal file
|
|
@ -0,0 +1,301 @@
|
|||
"""Provide the 'autogenerate' feature which can produce migration operations
|
||||
automatically."""
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
from sqlalchemy.util import OrderedSet
|
||||
from .compare import _compare_tables
|
||||
from .render import _drop_table, _drop_column, _drop_index, _drop_constraint, \
|
||||
_add_table, _add_column, _add_index, _add_constraint, _modify_col
|
||||
from .. import util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
###################################################
|
||||
# public
|
||||
def compare_metadata(context, metadata):
|
||||
"""Compare a database schema to that given in a
|
||||
:class:`~sqlalchemy.schema.MetaData` instance.
|
||||
|
||||
The database connection is presented in the context
|
||||
of a :class:`.MigrationContext` object, which
|
||||
provides database connectivity as well as optional
|
||||
comparison functions to use for datatypes and
|
||||
server defaults - see the "autogenerate" arguments
|
||||
at :meth:`.EnvironmentContext.configure`
|
||||
for details on these.
|
||||
|
||||
The return format is a list of "diff" directives,
|
||||
each representing individual differences::
|
||||
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.autogenerate import compare_metadata
|
||||
from sqlalchemy.schema import SchemaItem
|
||||
from sqlalchemy.types import TypeEngine
|
||||
from sqlalchemy import (create_engine, MetaData, Column,
|
||||
Integer, String, Table)
|
||||
import pprint
|
||||
|
||||
engine = create_engine("sqlite://")
|
||||
|
||||
engine.execute('''
|
||||
create table foo (
|
||||
id integer not null primary key,
|
||||
old_data varchar,
|
||||
x integer
|
||||
)''')
|
||||
|
||||
engine.execute('''
|
||||
create table bar (
|
||||
data varchar
|
||||
)''')
|
||||
|
||||
metadata = MetaData()
|
||||
Table('foo', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', Integer),
|
||||
Column('x', Integer, nullable=False)
|
||||
)
|
||||
Table('bat', metadata,
|
||||
Column('info', String)
|
||||
)
|
||||
|
||||
mc = MigrationContext.configure(engine.connect())
|
||||
|
||||
diff = compare_metadata(mc, metadata)
|
||||
pprint.pprint(diff, indent=2, width=20)
|
||||
|
||||
Output::
|
||||
|
||||
[ ( 'add_table',
|
||||
Table('bat', MetaData(bind=None),
|
||||
Column('info', String(), table=<bat>), schema=None)),
|
||||
( 'remove_table',
|
||||
Table(u'bar', MetaData(bind=None),
|
||||
Column(u'data', VARCHAR(), table=<bar>), schema=None)),
|
||||
( 'add_column',
|
||||
None,
|
||||
'foo',
|
||||
Column('data', Integer(), table=<foo>)),
|
||||
( 'remove_column',
|
||||
None,
|
||||
'foo',
|
||||
Column(u'old_data', VARCHAR(), table=None)),
|
||||
[ ( 'modify_nullable',
|
||||
None,
|
||||
'foo',
|
||||
u'x',
|
||||
{ 'existing_server_default': None,
|
||||
'existing_type': INTEGER()},
|
||||
True,
|
||||
False)]]
|
||||
|
||||
|
||||
:param context: a :class:`.MigrationContext`
|
||||
instance.
|
||||
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
|
||||
instance.
|
||||
|
||||
"""
|
||||
autogen_context, connection = _autogen_context(context, None)
|
||||
diffs = []
|
||||
|
||||
object_filters = _get_object_filters(context.opts)
|
||||
include_schemas = context.opts.get('include_schemas', False)
|
||||
|
||||
_produce_net_changes(connection, metadata, diffs, autogen_context,
|
||||
object_filters, include_schemas)
|
||||
|
||||
return diffs
|
||||
|
||||
###################################################
|
||||
# top level
|
||||
|
||||
def _produce_migration_diffs(context, template_args,
|
||||
imports, include_symbol=None,
|
||||
include_object=None,
|
||||
include_schemas=False):
|
||||
opts = context.opts
|
||||
metadata = opts['target_metadata']
|
||||
include_schemas = opts.get('include_schemas', include_schemas)
|
||||
|
||||
object_filters = _get_object_filters(opts, include_symbol, include_object)
|
||||
|
||||
if metadata is None:
|
||||
raise util.CommandError(
|
||||
"Can't proceed with --autogenerate option; environment "
|
||||
"script %s does not provide "
|
||||
"a MetaData object to the context." % (
|
||||
context.script.env_py_location
|
||||
))
|
||||
autogen_context, connection = _autogen_context(context, imports)
|
||||
|
||||
diffs = []
|
||||
_produce_net_changes(connection, metadata, diffs,
|
||||
autogen_context, object_filters, include_schemas)
|
||||
template_args[opts['upgrade_token']] = \
|
||||
_indent(_produce_upgrade_commands(diffs, autogen_context))
|
||||
template_args[opts['downgrade_token']] = \
|
||||
_indent(_produce_downgrade_commands(diffs, autogen_context))
|
||||
template_args['imports'] = "\n".join(sorted(imports))
|
||||
|
||||
|
||||
def _get_object_filters(context_opts, include_symbol=None, include_object=None):
|
||||
include_symbol = context_opts.get('include_symbol', include_symbol)
|
||||
include_object = context_opts.get('include_object', include_object)
|
||||
|
||||
object_filters = []
|
||||
if include_symbol:
|
||||
def include_symbol_filter(object, name, type_, reflected, compare_to):
|
||||
if type_ == "table":
|
||||
return include_symbol(name, object.schema)
|
||||
else:
|
||||
return True
|
||||
object_filters.append(include_symbol_filter)
|
||||
if include_object:
|
||||
object_filters.append(include_object)
|
||||
|
||||
return object_filters
|
||||
|
||||
|
||||
def _autogen_context(context, imports):
|
||||
opts = context.opts
|
||||
connection = context.bind
|
||||
return {
|
||||
'imports': imports,
|
||||
'connection': connection,
|
||||
'dialect': connection.dialect,
|
||||
'context': context,
|
||||
'opts': opts
|
||||
}, connection
|
||||
|
||||
def _indent(text):
|
||||
text = "### commands auto generated by Alembic - "\
|
||||
"please adjust! ###\n" + text
|
||||
text += "\n### end Alembic commands ###"
|
||||
text = re.compile(r'^', re.M).sub(" ", text).strip()
|
||||
return text
|
||||
|
||||
###################################################
|
||||
# walk structures
|
||||
|
||||
|
||||
def _produce_net_changes(connection, metadata, diffs, autogen_context,
|
||||
object_filters=(),
|
||||
include_schemas=False):
|
||||
inspector = Inspector.from_engine(connection)
|
||||
# TODO: not hardcode alembic_version here ?
|
||||
conn_table_names = set()
|
||||
|
||||
default_schema = connection.dialect.default_schema_name
|
||||
if include_schemas:
|
||||
schemas = set(inspector.get_schema_names())
|
||||
# replace default schema name with None
|
||||
schemas.discard("information_schema")
|
||||
# replace the "default" schema with None
|
||||
schemas.add(None)
|
||||
schemas.discard(default_schema)
|
||||
else:
|
||||
schemas = [None]
|
||||
|
||||
for s in schemas:
|
||||
tables = set(inspector.get_table_names(schema=s)).\
|
||||
difference(['alembic_version'])
|
||||
conn_table_names.update(zip([s] * len(tables), tables))
|
||||
|
||||
metadata_table_names = OrderedSet([(table.schema, table.name)
|
||||
for table in metadata.sorted_tables])
|
||||
|
||||
_compare_tables(conn_table_names, metadata_table_names,
|
||||
object_filters,
|
||||
inspector, metadata, diffs, autogen_context)
|
||||
|
||||
|
||||
###################################################
|
||||
# element comparison
|
||||
|
||||
|
||||
###################################################
|
||||
# render python
|
||||
|
||||
|
||||
###################################################
|
||||
# produce command structure
|
||||
|
||||
def _produce_upgrade_commands(diffs, autogen_context):
|
||||
buf = []
|
||||
for diff in diffs:
|
||||
buf.append(_invoke_command("upgrade", diff, autogen_context))
|
||||
if not buf:
|
||||
buf = ["pass"]
|
||||
return "\n".join(buf)
|
||||
|
||||
def _produce_downgrade_commands(diffs, autogen_context):
|
||||
buf = []
|
||||
for diff in reversed(diffs):
|
||||
buf.append(_invoke_command("downgrade", diff, autogen_context))
|
||||
if not buf:
|
||||
buf = ["pass"]
|
||||
return "\n".join(buf)
|
||||
|
||||
def _invoke_command(updown, args, autogen_context):
|
||||
if isinstance(args, tuple):
|
||||
return _invoke_adddrop_command(updown, args, autogen_context)
|
||||
else:
|
||||
return _invoke_modify_command(updown, args, autogen_context)
|
||||
|
||||
def _invoke_adddrop_command(updown, args, autogen_context):
|
||||
cmd_type = args[0]
|
||||
adddrop, cmd_type = cmd_type.split("_")
|
||||
|
||||
cmd_args = args[1:] + (autogen_context,)
|
||||
|
||||
_commands = {
|
||||
"table": (_drop_table, _add_table),
|
||||
"column": (_drop_column, _add_column),
|
||||
"index": (_drop_index, _add_index),
|
||||
"constraint": (_drop_constraint, _add_constraint),
|
||||
}
|
||||
|
||||
cmd_callables = _commands[cmd_type]
|
||||
|
||||
if (
|
||||
updown == "upgrade" and adddrop == "add"
|
||||
) or (
|
||||
updown == "downgrade" and adddrop == "remove"
|
||||
):
|
||||
return cmd_callables[1](*cmd_args)
|
||||
else:
|
||||
return cmd_callables[0](*cmd_args)
|
||||
|
||||
def _invoke_modify_command(updown, args, autogen_context):
|
||||
sname, tname, cname = args[0][1:4]
|
||||
kw = {}
|
||||
|
||||
_arg_struct = {
|
||||
"modify_type": ("existing_type", "type_"),
|
||||
"modify_nullable": ("existing_nullable", "nullable"),
|
||||
"modify_default": ("existing_server_default", "server_default"),
|
||||
}
|
||||
for diff in args:
|
||||
diff_kw = diff[4]
|
||||
for arg in ("existing_type", \
|
||||
"existing_nullable", \
|
||||
"existing_server_default"):
|
||||
if arg in diff_kw:
|
||||
kw.setdefault(arg, diff_kw[arg])
|
||||
old_kw, new_kw = _arg_struct[diff[0]]
|
||||
if updown == "upgrade":
|
||||
kw[new_kw] = diff[-1]
|
||||
kw[old_kw] = diff[-2]
|
||||
else:
|
||||
kw[new_kw] = diff[-2]
|
||||
kw[old_kw] = diff[-1]
|
||||
|
||||
if "nullable" in kw:
|
||||
kw.pop("existing_nullable", None)
|
||||
if "server_default" in kw:
|
||||
kw.pop("existing_server_default", None)
|
||||
return _modify_col(tname, cname, autogen_context, schema=sname, **kw)
|
||||
|
|
@ -0,0 +1,490 @@
|
|||
from sqlalchemy.exc import NoSuchTableError
|
||||
from sqlalchemy import schema as sa_schema, types as sqltypes
|
||||
import logging
|
||||
from .. import compat
|
||||
from .render import _render_server_default
|
||||
from sqlalchemy.util import OrderedSet
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def _run_filters(object_, name, type_, reflected, compare_to, object_filters):
|
||||
for fn in object_filters:
|
||||
if not fn(object_, name, type_, reflected, compare_to):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def _compare_tables(conn_table_names, metadata_table_names,
|
||||
object_filters,
|
||||
inspector, metadata, diffs, autogen_context):
|
||||
|
||||
default_schema = inspector.bind.dialect.default_schema_name
|
||||
|
||||
# tables coming from the connection will not have "schema"
|
||||
# set if it matches default_schema_name; so we need a list
|
||||
# of table names from local metadata that also have "None" if schema
|
||||
# == default_schema_name. Most setups will be like this anyway but
|
||||
# some are not (see #170)
|
||||
metadata_table_names_no_dflt_schema = OrderedSet([
|
||||
(schema if schema != default_schema else None, tname)
|
||||
for schema, tname in metadata_table_names
|
||||
])
|
||||
|
||||
# to adjust for the MetaData collection storing the tables either
|
||||
# as "schemaname.tablename" or just "tablename", create a new lookup
|
||||
# which will match the "non-default-schema" keys to the Table object.
|
||||
tname_to_table = dict(
|
||||
(
|
||||
no_dflt_schema,
|
||||
metadata.tables[sa_schema._get_table_key(tname, schema)]
|
||||
)
|
||||
for no_dflt_schema, (schema, tname) in zip(
|
||||
metadata_table_names_no_dflt_schema,
|
||||
metadata_table_names)
|
||||
)
|
||||
metadata_table_names = metadata_table_names_no_dflt_schema
|
||||
|
||||
for s, tname in metadata_table_names.difference(conn_table_names):
|
||||
name = '%s.%s' % (s, tname) if s else tname
|
||||
metadata_table = tname_to_table[(s, tname)]
|
||||
if _run_filters(metadata_table, tname, "table", False, None, object_filters):
|
||||
diffs.append(("add_table", metadata_table))
|
||||
log.info("Detected added table %r", name)
|
||||
_compare_indexes_and_uniques(s, tname, object_filters,
|
||||
None,
|
||||
metadata_table,
|
||||
diffs, autogen_context, inspector)
|
||||
|
||||
removal_metadata = sa_schema.MetaData()
|
||||
for s, tname in conn_table_names.difference(metadata_table_names):
|
||||
name = sa_schema._get_table_key(tname, s)
|
||||
exists = name in removal_metadata.tables
|
||||
t = sa_schema.Table(tname, removal_metadata, schema=s)
|
||||
if not exists:
|
||||
inspector.reflecttable(t, None)
|
||||
if _run_filters(t, tname, "table", True, None, object_filters):
|
||||
diffs.append(("remove_table", t))
|
||||
log.info("Detected removed table %r", name)
|
||||
|
||||
existing_tables = conn_table_names.intersection(metadata_table_names)
|
||||
|
||||
existing_metadata = sa_schema.MetaData()
|
||||
conn_column_info = {}
|
||||
for s, tname in existing_tables:
|
||||
name = sa_schema._get_table_key(tname, s)
|
||||
exists = name in existing_metadata.tables
|
||||
t = sa_schema.Table(tname, existing_metadata, schema=s)
|
||||
if not exists:
|
||||
inspector.reflecttable(t, None)
|
||||
conn_column_info[(s, tname)] = t
|
||||
|
||||
for s, tname in sorted(existing_tables):
|
||||
name = '%s.%s' % (s, tname) if s else tname
|
||||
metadata_table = tname_to_table[(s, tname)]
|
||||
conn_table = existing_metadata.tables[name]
|
||||
|
||||
if _run_filters(metadata_table, tname, "table", False, conn_table, object_filters):
|
||||
_compare_columns(s, tname, object_filters,
|
||||
conn_table,
|
||||
metadata_table,
|
||||
diffs, autogen_context, inspector)
|
||||
_compare_indexes_and_uniques(s, tname, object_filters,
|
||||
conn_table,
|
||||
metadata_table,
|
||||
diffs, autogen_context, inspector)
|
||||
|
||||
# TODO:
|
||||
# table constraints
|
||||
# sequences
|
||||
|
||||
def _make_index(params, conn_table):
|
||||
return sa_schema.Index(
|
||||
params['name'],
|
||||
*[conn_table.c[cname] for cname in params['column_names']],
|
||||
unique=params['unique']
|
||||
)
|
||||
|
||||
def _make_unique_constraint(params, conn_table):
|
||||
return sa_schema.UniqueConstraint(
|
||||
*[conn_table.c[cname] for cname in params['column_names']],
|
||||
name=params['name']
|
||||
)
|
||||
|
||||
def _compare_columns(schema, tname, object_filters, conn_table, metadata_table,
|
||||
diffs, autogen_context, inspector):
|
||||
name = '%s.%s' % (schema, tname) if schema else tname
|
||||
metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
|
||||
conn_col_names = dict((c.name, c) for c in conn_table.c)
|
||||
metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))
|
||||
|
||||
for cname in metadata_col_names.difference(conn_col_names):
|
||||
if _run_filters(metadata_cols_by_name[cname], cname,
|
||||
"column", False, None, object_filters):
|
||||
diffs.append(
|
||||
("add_column", schema, tname, metadata_cols_by_name[cname])
|
||||
)
|
||||
log.info("Detected added column '%s.%s'", name, cname)
|
||||
|
||||
for cname in set(conn_col_names).difference(metadata_col_names):
|
||||
if _run_filters(conn_table.c[cname], cname,
|
||||
"column", True, None, object_filters):
|
||||
diffs.append(
|
||||
("remove_column", schema, tname, conn_table.c[cname])
|
||||
)
|
||||
log.info("Detected removed column '%s.%s'", name, cname)
|
||||
|
||||
for colname in metadata_col_names.intersection(conn_col_names):
|
||||
metadata_col = metadata_cols_by_name[colname]
|
||||
conn_col = conn_table.c[colname]
|
||||
if not _run_filters(
|
||||
metadata_col, colname, "column", False, conn_col, object_filters):
|
||||
continue
|
||||
col_diff = []
|
||||
_compare_type(schema, tname, colname,
|
||||
conn_col,
|
||||
metadata_col,
|
||||
col_diff, autogen_context
|
||||
)
|
||||
_compare_nullable(schema, tname, colname,
|
||||
conn_col,
|
||||
metadata_col.nullable,
|
||||
col_diff, autogen_context
|
||||
)
|
||||
_compare_server_default(schema, tname, colname,
|
||||
conn_col,
|
||||
metadata_col,
|
||||
col_diff, autogen_context
|
||||
)
|
||||
if col_diff:
|
||||
diffs.append(col_diff)
|
||||
|
||||
class _constraint_sig(object):
|
||||
def __eq__(self, other):
|
||||
return self.const == other.const
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.const != other.const
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.const)
|
||||
|
||||
class _uq_constraint_sig(_constraint_sig):
|
||||
is_index = False
|
||||
is_unique = True
|
||||
|
||||
def __init__(self, const):
|
||||
self.const = const
|
||||
self.name = const.name
|
||||
self.sig = tuple(sorted([col.name for col in const.columns]))
|
||||
|
||||
@property
|
||||
def column_names(self):
|
||||
return [col.name for col in self.const.columns]
|
||||
|
||||
class _ix_constraint_sig(_constraint_sig):
|
||||
is_index = True
|
||||
|
||||
def __init__(self, const):
|
||||
self.const = const
|
||||
self.name = const.name
|
||||
self.sig = tuple(sorted([col.name for col in const.columns]))
|
||||
self.is_unique = bool(const.unique)
|
||||
|
||||
@property
|
||||
def column_names(self):
|
||||
return _get_index_column_names(self.const)
|
||||
|
||||
def _get_index_column_names(idx):
|
||||
if compat.sqla_08:
|
||||
return [getattr(exp, "name", None) for exp in idx.expressions]
|
||||
else:
|
||||
return [getattr(col, "name", None) for col in idx.columns]
|
||||
|
||||
def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table,
|
||||
metadata_table, diffs, autogen_context, inspector):
|
||||
|
||||
is_create_table = conn_table is None
|
||||
|
||||
# 1a. get raw indexes and unique constraints from metadata ...
|
||||
metadata_unique_constraints = set(uq for uq in metadata_table.constraints
|
||||
if isinstance(uq, sa_schema.UniqueConstraint)
|
||||
)
|
||||
metadata_indexes = set(metadata_table.indexes)
|
||||
|
||||
conn_uniques = conn_indexes = frozenset()
|
||||
|
||||
supports_unique_constraints = False
|
||||
|
||||
if conn_table is not None:
|
||||
# 1b. ... and from connection, if the table exists
|
||||
if hasattr(inspector, "get_unique_constraints"):
|
||||
try:
|
||||
conn_uniques = inspector.get_unique_constraints(
|
||||
tname, schema=schema)
|
||||
supports_unique_constraints = True
|
||||
except NotImplementedError:
|
||||
pass
|
||||
try:
|
||||
conn_indexes = inspector.get_indexes(tname, schema=schema)
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
# 2. convert conn-level objects from raw inspector records
|
||||
# into schema objects
|
||||
conn_uniques = set(_make_unique_constraint(uq_def, conn_table)
|
||||
for uq_def in conn_uniques)
|
||||
conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes)
|
||||
|
||||
# 3. give the dialect a chance to omit indexes and constraints that
|
||||
# we know are either added implicitly by the DB or that the DB
|
||||
# can't accurately report on
|
||||
autogen_context['context'].impl.\
|
||||
correct_for_autogen_constraints(
|
||||
conn_uniques, conn_indexes,
|
||||
metadata_unique_constraints,
|
||||
metadata_indexes
|
||||
)
|
||||
|
||||
# 4. organize the constraints into "signature" collections, the
|
||||
# _constraint_sig() objects provide a consistent facade over both
|
||||
# Index and UniqueConstraint so we can easily work with them
|
||||
# interchangeably
|
||||
metadata_unique_constraints = set(_uq_constraint_sig(uq)
|
||||
for uq in metadata_unique_constraints
|
||||
)
|
||||
|
||||
metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes)
|
||||
|
||||
conn_unique_constraints = set(_uq_constraint_sig(uq) for uq in conn_uniques)
|
||||
|
||||
conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes)
|
||||
|
||||
# 5. index things by name, for those objects that have names
|
||||
metadata_names = dict(
|
||||
(c.name, c) for c in
|
||||
metadata_unique_constraints.union(metadata_indexes)
|
||||
if c.name is not None)
|
||||
|
||||
conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints)
|
||||
conn_indexes_by_name = dict((c.name, c) for c in conn_indexes)
|
||||
|
||||
conn_names = dict((c.name, c) for c in
|
||||
conn_unique_constraints.union(conn_indexes)
|
||||
if c.name is not None)
|
||||
|
||||
doubled_constraints = dict(
|
||||
(name, (conn_uniques_by_name[name], conn_indexes_by_name[name]))
|
||||
for name in set(conn_uniques_by_name).intersection(conn_indexes_by_name)
|
||||
)
|
||||
|
||||
# 6. index things by "column signature", to help with unnamed unique
|
||||
# constraints.
|
||||
conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints)
|
||||
metadata_uniques_by_sig = dict(
|
||||
(uq.sig, uq) for uq in metadata_unique_constraints)
|
||||
metadata_indexes_by_sig = dict(
|
||||
(ix.sig, ix) for ix in metadata_indexes)
|
||||
unnamed_metadata_uniques = dict((uq.sig, uq) for uq in
|
||||
metadata_unique_constraints if uq.name is None)
|
||||
|
||||
# assumptions:
|
||||
# 1. a unique constraint or an index from the connection *always*
|
||||
# has a name.
|
||||
# 2. an index on the metadata side *always* has a name.
|
||||
# 3. a unique constraint on the metadata side *might* have a name.
|
||||
# 4. The backend may double up indexes as unique constraints and
|
||||
# vice versa (e.g. MySQL, Postgresql)
|
||||
|
||||
def obj_added(obj):
|
||||
if obj.is_index:
|
||||
diffs.append(("add_index", obj.const))
|
||||
log.info("Detected added index '%s' on %s",
|
||||
obj.name, ', '.join([
|
||||
"'%s'" % obj.column_names
|
||||
])
|
||||
)
|
||||
else:
|
||||
if not supports_unique_constraints:
|
||||
# can't report unique indexes as added if we don't
|
||||
# detect them
|
||||
return
|
||||
if is_create_table:
|
||||
# unique constraints are created inline with table defs
|
||||
return
|
||||
diffs.append(("add_constraint", obj.const))
|
||||
log.info("Detected added unique constraint '%s' on %s",
|
||||
obj.name, ', '.join([
|
||||
"'%s'" % obj.column_names
|
||||
])
|
||||
)
|
||||
|
||||
def obj_removed(obj):
|
||||
if obj.is_index:
|
||||
if obj.is_unique and not supports_unique_constraints:
|
||||
# many databases double up unique constraints
|
||||
# as unique indexes. without that list we can't
|
||||
# be sure what we're doing here
|
||||
return
|
||||
|
||||
diffs.append(("remove_index", obj.const))
|
||||
log.info("Detected removed index '%s' on '%s'", obj.name, tname)
|
||||
else:
|
||||
diffs.append(("remove_constraint", obj.const))
|
||||
log.info("Detected removed unique constraint '%s' on '%s'",
|
||||
obj.name, tname
|
||||
)
|
||||
|
||||
def obj_changed(old, new, msg):
|
||||
if old.is_index:
|
||||
log.info("Detected changed index '%s' on '%s':%s",
|
||||
old.name, tname, ', '.join(msg)
|
||||
)
|
||||
diffs.append(("remove_index", old.const))
|
||||
diffs.append(("add_index", new.const))
|
||||
else:
|
||||
log.info("Detected changed unique constraint '%s' on '%s':%s",
|
||||
old.name, tname, ', '.join(msg)
|
||||
)
|
||||
diffs.append(("remove_constraint", old.const))
|
||||
diffs.append(("add_constraint", new.const))
|
||||
|
||||
for added_name in sorted(set(metadata_names).difference(conn_names)):
|
||||
obj = metadata_names[added_name]
|
||||
obj_added(obj)
|
||||
|
||||
|
||||
for existing_name in sorted(set(metadata_names).intersection(conn_names)):
|
||||
metadata_obj = metadata_names[existing_name]
|
||||
|
||||
if existing_name in doubled_constraints:
|
||||
conn_uq, conn_idx = doubled_constraints[existing_name]
|
||||
if metadata_obj.is_index:
|
||||
conn_obj = conn_idx
|
||||
else:
|
||||
conn_obj = conn_uq
|
||||
else:
|
||||
conn_obj = conn_names[existing_name]
|
||||
|
||||
if conn_obj.is_index != metadata_obj.is_index:
|
||||
obj_removed(conn_obj)
|
||||
obj_added(metadata_obj)
|
||||
else:
|
||||
msg = []
|
||||
if conn_obj.is_unique != metadata_obj.is_unique:
|
||||
msg.append(' unique=%r to unique=%r' % (
|
||||
conn_obj.is_unique, metadata_obj.is_unique
|
||||
))
|
||||
if conn_obj.sig != metadata_obj.sig:
|
||||
msg.append(' columns %r to %r' % (
|
||||
conn_obj.sig, metadata_obj.sig
|
||||
))
|
||||
|
||||
if msg:
|
||||
obj_changed(conn_obj, metadata_obj, msg)
|
||||
|
||||
|
||||
for removed_name in sorted(set(conn_names).difference(metadata_names)):
|
||||
conn_obj = conn_names[removed_name]
|
||||
if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques:
|
||||
continue
|
||||
elif removed_name in doubled_constraints:
|
||||
if conn_obj.sig not in metadata_indexes_by_sig and \
|
||||
conn_obj.sig not in metadata_uniques_by_sig:
|
||||
conn_uq, conn_idx = doubled_constraints[removed_name]
|
||||
obj_removed(conn_uq)
|
||||
obj_removed(conn_idx)
|
||||
else:
|
||||
obj_removed(conn_obj)
|
||||
|
||||
for uq_sig in unnamed_metadata_uniques:
|
||||
if uq_sig not in conn_uniques_by_sig:
|
||||
obj_added(unnamed_metadata_uniques[uq_sig])
|
||||
|
||||
|
||||
def _compare_nullable(schema, tname, cname, conn_col,
|
||||
metadata_col_nullable, diffs,
|
||||
autogen_context):
|
||||
conn_col_nullable = conn_col.nullable
|
||||
if conn_col_nullable is not metadata_col_nullable:
|
||||
diffs.append(
|
||||
("modify_nullable", schema, tname, cname,
|
||||
{
|
||||
"existing_type": conn_col.type,
|
||||
"existing_server_default": conn_col.server_default,
|
||||
},
|
||||
conn_col_nullable,
|
||||
metadata_col_nullable),
|
||||
)
|
||||
log.info("Detected %s on column '%s.%s'",
|
||||
"NULL" if metadata_col_nullable else "NOT NULL",
|
||||
tname,
|
||||
cname
|
||||
)
|
||||
|
||||
def _compare_type(schema, tname, cname, conn_col,
|
||||
metadata_col, diffs,
|
||||
autogen_context):
|
||||
|
||||
conn_type = conn_col.type
|
||||
metadata_type = metadata_col.type
|
||||
if conn_type._type_affinity is sqltypes.NullType:
|
||||
log.info("Couldn't determine database type "
|
||||
"for column '%s.%s'", tname, cname)
|
||||
return
|
||||
if metadata_type._type_affinity is sqltypes.NullType:
|
||||
log.info("Column '%s.%s' has no type within "
|
||||
"the model; can't compare", tname, cname)
|
||||
return
|
||||
|
||||
isdiff = autogen_context['context']._compare_type(conn_col, metadata_col)
|
||||
|
||||
if isdiff:
|
||||
|
||||
diffs.append(
|
||||
("modify_type", schema, tname, cname,
|
||||
{
|
||||
"existing_nullable": conn_col.nullable,
|
||||
"existing_server_default": conn_col.server_default,
|
||||
},
|
||||
conn_type,
|
||||
metadata_type),
|
||||
)
|
||||
log.info("Detected type change from %r to %r on '%s.%s'",
|
||||
conn_type, metadata_type, tname, cname
|
||||
)
|
||||
|
||||
def _compare_server_default(schema, tname, cname, conn_col, metadata_col,
|
||||
diffs, autogen_context):
|
||||
|
||||
metadata_default = metadata_col.server_default
|
||||
conn_col_default = conn_col.server_default
|
||||
if conn_col_default is None and metadata_default is None:
|
||||
return False
|
||||
rendered_metadata_default = _render_server_default(
|
||||
metadata_default, autogen_context)
|
||||
rendered_conn_default = conn_col.server_default.arg.text \
|
||||
if conn_col.server_default else None
|
||||
isdiff = autogen_context['context']._compare_server_default(
|
||||
conn_col, metadata_col,
|
||||
rendered_metadata_default,
|
||||
rendered_conn_default
|
||||
)
|
||||
if isdiff:
|
||||
conn_col_default = rendered_conn_default
|
||||
diffs.append(
|
||||
("modify_default", schema, tname, cname,
|
||||
{
|
||||
"existing_nullable": conn_col.nullable,
|
||||
"existing_type": conn_col.type,
|
||||
},
|
||||
conn_col_default,
|
||||
metadata_default),
|
||||
)
|
||||
log.info("Detected server default on column '%s.%s'",
|
||||
tname,
|
||||
cname
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,457 @@
|
|||
from sqlalchemy import schema as sa_schema, types as sqltypes, sql
|
||||
import logging
|
||||
from .. import compat
|
||||
import re
|
||||
from ..compat import string_types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from sqlalchemy.sql.naming import conv
|
||||
def _render_gen_name(autogen_context, name):
|
||||
if isinstance(name, conv):
|
||||
return _f_name(_alembic_autogenerate_prefix(autogen_context), name)
|
||||
else:
|
||||
return name
|
||||
except ImportError:
|
||||
def _render_gen_name(autogen_context, name):
|
||||
return name
|
||||
|
||||
class _f_name(object):
|
||||
def __init__(self, prefix, name):
|
||||
self.prefix = prefix
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
return "%sf(%r)" % (self.prefix, self.name)
|
||||
|
||||
def _render_potential_expr(value, autogen_context):
|
||||
if isinstance(value, sql.ClauseElement):
|
||||
if compat.sqla_08:
|
||||
compile_kw = dict(compile_kwargs={'literal_binds': True})
|
||||
else:
|
||||
compile_kw = {}
|
||||
|
||||
return "%(prefix)stext(%(sql)r)" % {
|
||||
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
"sql": str(
|
||||
value.compile(dialect=autogen_context['dialect'],
|
||||
**compile_kw)
|
||||
)
|
||||
}
|
||||
|
||||
else:
|
||||
return repr(value)
|
||||
|
||||
def _add_table(table, autogen_context):
|
||||
text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
|
||||
'tablename': table.name,
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': ',\n'.join(
|
||||
[col for col in
|
||||
[_render_column(col, autogen_context) for col in table.c]
|
||||
if col] +
|
||||
sorted([rcons for rcons in
|
||||
[_render_constraint(cons, autogen_context) for cons in
|
||||
table.constraints]
|
||||
if rcons is not None
|
||||
])
|
||||
)
|
||||
}
|
||||
if table.schema:
|
||||
text += ",\nschema=%r" % table.schema
|
||||
for k in sorted(table.kwargs):
|
||||
text += ",\n%s=%r" % (k.replace(" ", "_"), table.kwargs[k])
|
||||
text += "\n)"
|
||||
return text
|
||||
|
||||
def _drop_table(table, autogen_context):
|
||||
text = "%(prefix)sdrop_table(%(tname)r" % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": table.name
|
||||
}
|
||||
if table.schema:
|
||||
text += ", schema=%r" % table.schema
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
def _add_index(index, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the CREATE INDEX of an
|
||||
:class:`~sqlalchemy.schema.Index` instance.
|
||||
"""
|
||||
from .compare import _get_index_column_names
|
||||
|
||||
text = "%(prefix)screate_index(%(name)r, '%(table)s', %(columns)s, "\
|
||||
"unique=%(unique)r%(schema)s%(kwargs)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, index.name),
|
||||
'table': index.table.name,
|
||||
'columns': _get_index_column_names(index),
|
||||
'unique': index.unique or False,
|
||||
'schema': (", schema='%s'" % index.table.schema) if index.table.schema else '',
|
||||
'kwargs': (', '+', '.join(
|
||||
["%s=%s" % (key, _render_potential_expr(val, autogen_context))
|
||||
for key, val in index.kwargs.items()]))\
|
||||
if len(index.kwargs) else ''
|
||||
}
|
||||
return text
|
||||
|
||||
def _drop_index(index, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the DROP INDEX of an
|
||||
:class:`~sqlalchemy.schema.Index` instance.
|
||||
"""
|
||||
text = "%(prefix)sdrop_index(%(name)r, "\
|
||||
"table_name='%(table_name)s'%(schema)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, index.name),
|
||||
'table_name': index.table.name,
|
||||
'schema': ((", schema='%s'" % index.table.schema)
|
||||
if index.table.schema else '')
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _render_unique_constraint(constraint, autogen_context):
|
||||
rendered = _user_defined_render("unique", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
return _uq_constraint(constraint, autogen_context, False)
|
||||
|
||||
|
||||
def _add_unique_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the ALTER TABLE .. ADD CONSTRAINT ...
|
||||
UNIQUE of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
|
||||
"""
|
||||
return _uq_constraint(constraint, autogen_context, True)
|
||||
|
||||
def _uq_constraint(constraint, autogen_context, alter):
|
||||
opts = []
|
||||
if constraint.deferrable:
|
||||
opts.append(("deferrable", str(constraint.deferrable)))
|
||||
if constraint.initially:
|
||||
opts.append(("initially", str(constraint.initially)))
|
||||
if alter and constraint.table.schema:
|
||||
opts.append(("schema", str(constraint.table.schema)))
|
||||
if not alter and constraint.name:
|
||||
opts.append(("name", _render_gen_name(autogen_context, constraint.name)))
|
||||
|
||||
if alter:
|
||||
args = [repr(_render_gen_name(autogen_context, constraint.name)),
|
||||
repr(constraint.table.name)]
|
||||
args.append(repr([col.name for col in constraint.columns]))
|
||||
args.extend(["%s=%r" % (k, v) for k, v in opts])
|
||||
return "%(prefix)screate_unique_constraint(%(args)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'args': ", ".join(args)
|
||||
}
|
||||
else:
|
||||
args = [repr(col.name) for col in constraint.columns]
|
||||
args.extend(["%s=%r" % (k, v) for k, v in opts])
|
||||
return "%(prefix)sUniqueConstraint(%(args)s)" % {
|
||||
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
"args": ", ".join(args)
|
||||
}
|
||||
|
||||
|
||||
def _add_fk_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_pk_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_check_constraint(constraint, autogen_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Dispatcher for the different types of constraints.
|
||||
"""
|
||||
funcs = {
|
||||
"unique_constraint": _add_unique_constraint,
|
||||
"foreign_key_constraint": _add_fk_constraint,
|
||||
"primary_key_constraint": _add_pk_constraint,
|
||||
"check_constraint": _add_check_constraint,
|
||||
"column_check_constraint": _add_check_constraint,
|
||||
}
|
||||
return funcs[constraint.__visit_name__](constraint, autogen_context)
|
||||
|
||||
def _drop_constraint(constraint, autogen_context):
|
||||
"""
|
||||
Generate Alembic operations for the ALTER TABLE ... DROP CONSTRAINT
|
||||
of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
|
||||
"""
|
||||
text = "%(prefix)sdrop_constraint(%(name)r, '%(table_name)s'%(schema)s)" % {
|
||||
'prefix': _alembic_autogenerate_prefix(autogen_context),
|
||||
'name': _render_gen_name(autogen_context, constraint.name),
|
||||
'table_name': constraint.table.name,
|
||||
'schema': (", schema='%s'" % constraint.table.schema)
|
||||
if constraint.table.schema else '',
|
||||
}
|
||||
return text
|
||||
|
||||
def _add_column(schema, tname, column, autogen_context):
|
||||
text = "%(prefix)sadd_column(%(tname)r, %(column)s" % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": tname,
|
||||
"column": _render_column(column, autogen_context)
|
||||
}
|
||||
if schema:
|
||||
text += ", schema=%r" % schema
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
def _drop_column(schema, tname, column, autogen_context):
|
||||
text = "%(prefix)sdrop_column(%(tname)r, %(cname)r" % {
|
||||
"prefix": _alembic_autogenerate_prefix(autogen_context),
|
||||
"tname": tname,
|
||||
"cname": column.name
|
||||
}
|
||||
if schema:
|
||||
text += ", schema=%r" % schema
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
def _modify_col(tname, cname,
|
||||
autogen_context,
|
||||
server_default=False,
|
||||
type_=None,
|
||||
nullable=None,
|
||||
existing_type=None,
|
||||
existing_nullable=None,
|
||||
existing_server_default=False,
|
||||
schema=None):
|
||||
indent = " " * 11
|
||||
text = "%(prefix)salter_column(%(tname)r, %(cname)r" % {
|
||||
'prefix': _alembic_autogenerate_prefix(
|
||||
autogen_context),
|
||||
'tname': tname,
|
||||
'cname': cname}
|
||||
text += ",\n%sexisting_type=%s" % (indent,
|
||||
_repr_type(existing_type, autogen_context))
|
||||
if server_default is not False:
|
||||
rendered = _render_server_default(
|
||||
server_default, autogen_context)
|
||||
text += ",\n%sserver_default=%s" % (indent, rendered)
|
||||
|
||||
if type_ is not None:
|
||||
text += ",\n%stype_=%s" % (indent,
|
||||
_repr_type(type_, autogen_context))
|
||||
if nullable is not None:
|
||||
text += ",\n%snullable=%r" % (
|
||||
indent, nullable,)
|
||||
if existing_nullable is not None:
|
||||
text += ",\n%sexisting_nullable=%r" % (
|
||||
indent, existing_nullable)
|
||||
if existing_server_default:
|
||||
rendered = _render_server_default(
|
||||
existing_server_default,
|
||||
autogen_context)
|
||||
text += ",\n%sexisting_server_default=%s" % (
|
||||
indent, rendered)
|
||||
if schema:
|
||||
text += ",\n%sschema=%r" % (indent, schema)
|
||||
text += ")"
|
||||
return text
|
||||
|
||||
def _user_autogenerate_prefix(autogen_context):
|
||||
prefix = autogen_context['opts']['user_module_prefix']
|
||||
if prefix is None:
|
||||
return _sqlalchemy_autogenerate_prefix(autogen_context)
|
||||
else:
|
||||
return prefix
|
||||
|
||||
def _sqlalchemy_autogenerate_prefix(autogen_context):
|
||||
return autogen_context['opts']['sqlalchemy_module_prefix'] or ''
|
||||
|
||||
def _alembic_autogenerate_prefix(autogen_context):
|
||||
return autogen_context['opts']['alembic_module_prefix'] or ''
|
||||
|
||||
def _user_defined_render(type_, object_, autogen_context):
|
||||
if 'opts' in autogen_context and \
|
||||
'render_item' in autogen_context['opts']:
|
||||
render = autogen_context['opts']['render_item']
|
||||
if render:
|
||||
rendered = render(type_, object_, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
return False
|
||||
|
||||
def _render_column(column, autogen_context):
|
||||
rendered = _user_defined_render("column", column, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
opts = []
|
||||
if column.server_default:
|
||||
rendered = _render_server_default(
|
||||
column.server_default, autogen_context
|
||||
)
|
||||
if rendered:
|
||||
opts.append(("server_default", rendered))
|
||||
|
||||
if not column.autoincrement:
|
||||
opts.append(("autoincrement", column.autoincrement))
|
||||
|
||||
if column.nullable is not None:
|
||||
opts.append(("nullable", column.nullable))
|
||||
|
||||
# TODO: for non-ascii colname, assign a "key"
|
||||
return "%(prefix)sColumn(%(name)r, %(type)s, %(kw)s)" % {
|
||||
'prefix': _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
'name': column.name,
|
||||
'type': _repr_type(column.type, autogen_context),
|
||||
'kw': ", ".join(["%s=%s" % (kwname, val) for kwname, val in opts])
|
||||
}
|
||||
|
||||
def _render_server_default(default, autogen_context):
|
||||
rendered = _user_defined_render("server_default", default, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
if isinstance(default, sa_schema.DefaultClause):
|
||||
if isinstance(default.arg, string_types):
|
||||
default = default.arg
|
||||
else:
|
||||
default = str(default.arg.compile(
|
||||
dialect=autogen_context['dialect']))
|
||||
if isinstance(default, string_types):
|
||||
# TODO: this is just a hack to get
|
||||
# tests to pass until we figure out
|
||||
# WTF sqlite is doing
|
||||
default = re.sub(r"^'|'$", "", default)
|
||||
return repr(default)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _repr_type(type_, autogen_context):
|
||||
rendered = _user_defined_render("type", type_, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
mod = type(type_).__module__
|
||||
imports = autogen_context.get('imports', None)
|
||||
if mod.startswith("sqlalchemy.dialects"):
|
||||
dname = re.match(r"sqlalchemy\.dialects\.(\w+)", mod).group(1)
|
||||
if imports is not None:
|
||||
imports.add("from sqlalchemy.dialects import %s" % dname)
|
||||
return "%s.%r" % (dname, type_)
|
||||
elif mod.startswith("sqlalchemy"):
|
||||
prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
|
||||
return "%s%r" % (prefix, type_)
|
||||
else:
|
||||
prefix = _user_autogenerate_prefix(autogen_context)
|
||||
return "%s%r" % (prefix, type_)
|
||||
|
||||
def _render_constraint(constraint, autogen_context):
|
||||
renderer = _constraint_renderers.get(type(constraint), None)
|
||||
if renderer:
|
||||
return renderer(constraint, autogen_context)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _render_primary_key(constraint, autogen_context):
|
||||
rendered = _user_defined_render("primary_key", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
if not constraint.columns:
|
||||
return None
|
||||
|
||||
opts = []
|
||||
if constraint.name:
|
||||
opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name))))
|
||||
return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % {
|
||||
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
"args": ", ".join(
|
||||
[repr(c.key) for c in constraint.columns] +
|
||||
["%s=%s" % (kwname, val) for kwname, val in opts]
|
||||
),
|
||||
}
|
||||
|
||||
def _fk_colspec(fk, metadata_schema):
|
||||
"""Implement a 'safe' version of ForeignKey._get_colspec() that
|
||||
never tries to resolve the remote table.
|
||||
|
||||
"""
|
||||
if metadata_schema is None:
|
||||
return fk._get_colspec()
|
||||
else:
|
||||
# need to render schema breaking up tokens by hand, since the
|
||||
# ForeignKeyConstraint here may not actually have a remote
|
||||
# Table present
|
||||
tokens = fk._colspec.split(".")
|
||||
# no schema in the colspec, render it
|
||||
if len(tokens) == 2:
|
||||
return "%s.%s" % (metadata_schema, fk._colspec)
|
||||
else:
|
||||
return fk._colspec
|
||||
|
||||
def _render_foreign_key(constraint, autogen_context):
|
||||
rendered = _user_defined_render("foreign_key", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
opts = []
|
||||
if constraint.name:
|
||||
opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name))))
|
||||
if constraint.onupdate:
|
||||
opts.append(("onupdate", repr(constraint.onupdate)))
|
||||
if constraint.ondelete:
|
||||
opts.append(("ondelete", repr(constraint.ondelete)))
|
||||
if constraint.initially:
|
||||
opts.append(("initially", repr(constraint.initially)))
|
||||
if constraint.deferrable:
|
||||
opts.append(("deferrable", repr(constraint.deferrable)))
|
||||
if constraint.use_alter:
|
||||
opts.append(("use_alter", repr(constraint.use_alter)))
|
||||
|
||||
apply_metadata_schema = constraint.parent.metadata.schema
|
||||
return "%(prefix)sForeignKeyConstraint([%(cols)s], "\
|
||||
"[%(refcols)s], %(args)s)" % {
|
||||
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
"cols": ", ".join("'%s'" % f.parent.key for f in constraint.elements),
|
||||
"refcols": ", ".join(repr(_fk_colspec(f, apply_metadata_schema))
|
||||
for f in constraint.elements),
|
||||
"args": ", ".join(
|
||||
["%s=%s" % (kwname, val) for kwname, val in opts]
|
||||
),
|
||||
}
|
||||
|
||||
def _render_check_constraint(constraint, autogen_context):
|
||||
rendered = _user_defined_render("check", constraint, autogen_context)
|
||||
if rendered is not False:
|
||||
return rendered
|
||||
|
||||
# detect the constraint being part of
|
||||
# a parent type which is probably in the Table already.
|
||||
# ideally SQLAlchemy would give us more of a first class
|
||||
# way to detect this.
|
||||
if constraint._create_rule and \
|
||||
hasattr(constraint._create_rule, 'target') and \
|
||||
isinstance(constraint._create_rule.target,
|
||||
sqltypes.TypeEngine):
|
||||
return None
|
||||
opts = []
|
||||
if constraint.name:
|
||||
opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name))))
|
||||
return "%(prefix)sCheckConstraint(%(sqltext)r%(opts)s)" % {
|
||||
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
|
||||
"opts": ", " + (", ".join("%s=%s" % (k, v)
|
||||
for k, v in opts)) if opts else "",
|
||||
"sqltext": str(
|
||||
constraint.sqltext.compile(
|
||||
dialect=autogen_context['dialect']
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
_constraint_renderers = {
|
||||
sa_schema.PrimaryKeyConstraint: _render_primary_key,
|
||||
sa_schema.ForeignKeyConstraint: _render_foreign_key,
|
||||
sa_schema.UniqueConstraint: _render_unique_constraint,
|
||||
sa_schema.CheckConstraint: _render_check_constraint
|
||||
}
|
||||
266
Darwin/lib/python2.7/site-packages/alembic/command.py
Normal file
266
Darwin/lib/python2.7/site-packages/alembic/command.py
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
import os
|
||||
|
||||
from .script import ScriptDirectory
|
||||
from .environment import EnvironmentContext
|
||||
from . import util, autogenerate as autogen
|
||||
|
||||
def list_templates(config):
|
||||
"""List available templates"""
|
||||
|
||||
config.print_stdout("Available templates:\n")
|
||||
for tempname in os.listdir(config.get_template_directory()):
|
||||
with open(os.path.join(
|
||||
config.get_template_directory(),
|
||||
tempname,
|
||||
'README')) as readme:
|
||||
synopsis = next(readme)
|
||||
config.print_stdout("%s - %s", tempname, synopsis)
|
||||
|
||||
config.print_stdout("\nTemplates are used via the 'init' command, e.g.:")
|
||||
config.print_stdout("\n alembic init --template pylons ./scripts")
|
||||
|
||||
def init(config, directory, template='generic'):
|
||||
"""Initialize a new scripts directory."""
|
||||
|
||||
if os.access(directory, os.F_OK):
|
||||
raise util.CommandError("Directory %s already exists" % directory)
|
||||
|
||||
template_dir = os.path.join(config.get_template_directory(),
|
||||
template)
|
||||
if not os.access(template_dir, os.F_OK):
|
||||
raise util.CommandError("No such template %r" % template)
|
||||
|
||||
util.status("Creating directory %s" % os.path.abspath(directory),
|
||||
os.makedirs, directory)
|
||||
|
||||
versions = os.path.join(directory, 'versions')
|
||||
util.status("Creating directory %s" % os.path.abspath(versions),
|
||||
os.makedirs, versions)
|
||||
|
||||
script = ScriptDirectory(directory)
|
||||
|
||||
for file_ in os.listdir(template_dir):
|
||||
file_path = os.path.join(template_dir, file_)
|
||||
if file_ == 'alembic.ini.mako':
|
||||
config_file = os.path.abspath(config.config_file_name)
|
||||
if os.access(config_file, os.F_OK):
|
||||
util.msg("File %s already exists, skipping" % config_file)
|
||||
else:
|
||||
script._generate_template(
|
||||
file_path,
|
||||
config_file,
|
||||
script_location=directory
|
||||
)
|
||||
elif os.path.isfile(file_path):
|
||||
output_file = os.path.join(directory, file_)
|
||||
script._copy_file(
|
||||
file_path,
|
||||
output_file
|
||||
)
|
||||
|
||||
util.msg("Please edit configuration/connection/logging "\
|
||||
"settings in %r before proceeding." % config_file)
|
||||
|
||||
def revision(config, message=None, autogenerate=False, sql=False):
|
||||
"""Create a new revision file."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
template_args = {
|
||||
'config': config # Let templates use config for
|
||||
# e.g. multiple databases
|
||||
}
|
||||
imports = set()
|
||||
|
||||
environment = util.asbool(
|
||||
config.get_main_option("revision_environment")
|
||||
)
|
||||
|
||||
if autogenerate:
|
||||
environment = True
|
||||
def retrieve_migrations(rev, context):
|
||||
if script.get_revision(rev) is not script.get_revision("head"):
|
||||
raise util.CommandError("Target database is not up to date.")
|
||||
autogen._produce_migration_diffs(context, template_args, imports)
|
||||
return []
|
||||
elif environment:
|
||||
def retrieve_migrations(rev, context):
|
||||
return []
|
||||
|
||||
if environment:
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=retrieve_migrations,
|
||||
as_sql=sql,
|
||||
template_args=template_args,
|
||||
):
|
||||
script.run_env()
|
||||
return script.generate_revision(util.rev_id(), message, refresh=True,
|
||||
**template_args)
|
||||
|
||||
|
||||
def upgrade(config, revision, sql=False, tag=None):
|
||||
"""Upgrade to a later version."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
|
||||
starting_rev = None
|
||||
if ":" in revision:
|
||||
if not sql:
|
||||
raise util.CommandError("Range revision not allowed")
|
||||
starting_rev, revision = revision.split(':', 2)
|
||||
|
||||
def upgrade(rev, context):
|
||||
return script._upgrade_revs(revision, rev)
|
||||
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=upgrade,
|
||||
as_sql=sql,
|
||||
starting_rev=starting_rev,
|
||||
destination_rev=revision,
|
||||
tag=tag
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
def downgrade(config, revision, sql=False, tag=None):
|
||||
"""Revert to a previous version."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
starting_rev = None
|
||||
if ":" in revision:
|
||||
if not sql:
|
||||
raise util.CommandError("Range revision not allowed")
|
||||
starting_rev, revision = revision.split(':', 2)
|
||||
elif sql:
|
||||
raise util.CommandError("downgrade with --sql requires <fromrev>:<torev>")
|
||||
|
||||
def downgrade(rev, context):
|
||||
return script._downgrade_revs(revision, rev)
|
||||
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=downgrade,
|
||||
as_sql=sql,
|
||||
starting_rev=starting_rev,
|
||||
destination_rev=revision,
|
||||
tag=tag
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
def history(config, rev_range=None):
|
||||
"""List changeset scripts in chronological order."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
if rev_range is not None:
|
||||
if ":" not in rev_range:
|
||||
raise util.CommandError(
|
||||
"History range requires [start]:[end], "
|
||||
"[start]:, or :[end]")
|
||||
base, head = rev_range.strip().split(":")
|
||||
else:
|
||||
base = head = None
|
||||
|
||||
def _display_history(config, script, base, head):
|
||||
for sc in script.walk_revisions(
|
||||
base=base or "base",
|
||||
head=head or "head"):
|
||||
if sc.is_head:
|
||||
config.print_stdout("")
|
||||
config.print_stdout(sc.log_entry)
|
||||
|
||||
def _display_history_w_current(config, script, base=None, head=None):
|
||||
def _display_current_history(rev, context):
|
||||
if head is None:
|
||||
_display_history(config, script, base, rev)
|
||||
elif base is None:
|
||||
_display_history(config, script, rev, head)
|
||||
return []
|
||||
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=_display_current_history
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
if base == "current":
|
||||
_display_history_w_current(config, script, head=head)
|
||||
elif head == "current":
|
||||
_display_history_w_current(config, script, base=base)
|
||||
else:
|
||||
_display_history(config, script, base, head)
|
||||
|
||||
|
||||
def branches(config):
|
||||
"""Show current un-spliced branch points"""
|
||||
script = ScriptDirectory.from_config(config)
|
||||
for sc in script.walk_revisions():
|
||||
if sc.is_branch_point:
|
||||
config.print_stdout(sc)
|
||||
for rev in sc.nextrev:
|
||||
config.print_stdout("%s -> %s",
|
||||
" " * len(str(sc.down_revision)),
|
||||
script.get_revision(rev)
|
||||
)
|
||||
|
||||
def current(config, head_only=False):
|
||||
"""Display the current revision for each database."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
def display_version(rev, context):
|
||||
rev = script.get_revision(rev)
|
||||
|
||||
if head_only:
|
||||
config.print_stdout("%s%s" % (
|
||||
rev.revision if rev else None,
|
||||
" (head)" if rev and rev.is_head else ""))
|
||||
|
||||
else:
|
||||
config.print_stdout("Current revision for %s: %s",
|
||||
util.obfuscate_url_pw(
|
||||
context.connection.engine.url),
|
||||
rev)
|
||||
return []
|
||||
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=display_version
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
def stamp(config, revision, sql=False, tag=None):
|
||||
"""'stamp' the revision table with the given revision; don't
|
||||
run any migrations."""
|
||||
|
||||
script = ScriptDirectory.from_config(config)
|
||||
def do_stamp(rev, context):
|
||||
if sql:
|
||||
current = False
|
||||
else:
|
||||
current = context._current_rev()
|
||||
dest = script.get_revision(revision)
|
||||
if dest is not None:
|
||||
dest = dest.revision
|
||||
context._update_current_rev(current, dest)
|
||||
return []
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn=do_stamp,
|
||||
as_sql=sql,
|
||||
destination_rev=revision,
|
||||
tag=tag
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
def splice(config, parent, child):
|
||||
"""'splice' two branches, creating a new revision file.
|
||||
|
||||
this command isn't implemented right now.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
130
Darwin/lib/python2.7/site-packages/alembic/compat.py
Normal file
130
Darwin/lib/python2.7/site-packages/alembic/compat.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import io
|
||||
import sys
|
||||
from sqlalchemy import __version__ as sa_version
|
||||
|
||||
if sys.version_info < (2, 6):
|
||||
raise NotImplementedError("Python 2.6 or greater is required.")
|
||||
|
||||
sqla_08 = sa_version >= '0.8.0'
|
||||
sqla_09 = sa_version >= '0.9.0'
|
||||
|
||||
py2k = sys.version_info < (3, 0)
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
py33 = sys.version_info >= (3, 3)
|
||||
|
||||
if py3k:
|
||||
import builtins as compat_builtins
|
||||
string_types = str,
|
||||
binary_type = bytes
|
||||
text_type = str
|
||||
def callable(fn):
|
||||
return hasattr(fn, '__call__')
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
else:
|
||||
import __builtin__ as compat_builtins
|
||||
string_types = basestring,
|
||||
binary_type = str
|
||||
text_type = unicode
|
||||
callable = callable
|
||||
|
||||
def u(s):
|
||||
return unicode(s, "utf-8")
|
||||
|
||||
if py3k:
|
||||
from configparser import ConfigParser as SafeConfigParser
|
||||
import configparser
|
||||
else:
|
||||
from ConfigParser import SafeConfigParser
|
||||
import ConfigParser as configparser
|
||||
|
||||
if py2k:
|
||||
from mako.util import parse_encoding
|
||||
|
||||
if py33:
|
||||
from importlib import machinery
|
||||
def load_module_py(module_id, path):
|
||||
return machinery.SourceFileLoader(module_id, path).load_module(module_id)
|
||||
|
||||
def load_module_pyc(module_id, path):
|
||||
return machinery.SourcelessFileLoader(module_id, path).load_module(module_id)
|
||||
|
||||
else:
|
||||
import imp
|
||||
def load_module_py(module_id, path):
|
||||
with open(path, 'rb') as fp:
|
||||
mod = imp.load_source(module_id, path, fp)
|
||||
if py2k:
|
||||
source_encoding = parse_encoding(fp)
|
||||
if source_encoding:
|
||||
mod._alembic_source_encoding = source_encoding
|
||||
return mod
|
||||
|
||||
def load_module_pyc(module_id, path):
|
||||
with open(path, 'rb') as fp:
|
||||
mod = imp.load_compiled(module_id, path, fp)
|
||||
# no source encoding here
|
||||
return mod
|
||||
|
||||
try:
|
||||
exec_ = getattr(compat_builtins, 'exec')
|
||||
except AttributeError:
|
||||
# Python 2
|
||||
def exec_(func_text, globals_, lcl):
|
||||
exec('exec func_text in globals_, lcl')
|
||||
|
||||
################################################
|
||||
# cross-compatible metaclass implementation
|
||||
# Copyright (c) 2010-2012 Benjamin Peterson
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("%sBase" % meta.__name__, (base,), {})
|
||||
################################################
|
||||
|
||||
|
||||
# produce a wrapper that allows encoded text to stream
|
||||
# into a given buffer, but doesn't close it.
|
||||
# not sure of a more idiomatic approach to this.
|
||||
class EncodedIO(io.TextIOWrapper):
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
if py2k:
|
||||
# in Py2K, the io.* package is awkward because it does not
|
||||
# easily wrap the file type (e.g. sys.stdout) and I can't
|
||||
# figure out at all how to wrap StringIO.StringIO (used by nosetests)
|
||||
# and also might be user specified too. So create a full
|
||||
# adapter.
|
||||
|
||||
class ActLikePy3kIO(object):
|
||||
"""Produce an object capable of wrapping either
|
||||
sys.stdout (e.g. file) *or* StringIO.StringIO().
|
||||
|
||||
"""
|
||||
def _false(self):
|
||||
return False
|
||||
|
||||
def _true(self):
|
||||
return True
|
||||
|
||||
readable = seekable = _false
|
||||
writable = _true
|
||||
closed = False
|
||||
|
||||
def __init__(self, file_):
|
||||
self.file_ = file_
|
||||
|
||||
def write(self, text):
|
||||
return self.file_.write(text)
|
||||
|
||||
def flush(self):
|
||||
return self.file_.flush()
|
||||
|
||||
class EncodedIO(EncodedIO):
|
||||
def __init__(self, file_, encoding):
|
||||
super(EncodedIO, self).__init__(
|
||||
ActLikePy3kIO(file_), encoding=encoding)
|
||||
|
||||
|
||||
301
Darwin/lib/python2.7/site-packages/alembic/config.py
Normal file
301
Darwin/lib/python2.7/site-packages/alembic/config.py
Normal file
|
|
@ -0,0 +1,301 @@
|
|||
from argparse import ArgumentParser
|
||||
from .compat import SafeConfigParser
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import command, util, package_dir, compat
|
||||
|
||||
class Config(object):
|
||||
"""Represent an Alembic configuration.
|
||||
|
||||
Within an ``env.py`` script, this is available
|
||||
via the :attr:`.EnvironmentContext.config` attribute,
|
||||
which in turn is available at ``alembic.context``::
|
||||
|
||||
from alembic import context
|
||||
|
||||
some_param = context.config.get_main_option("my option")
|
||||
|
||||
When invoking Alembic programatically, a new
|
||||
:class:`.Config` can be created by passing
|
||||
the name of an .ini file to the constructor::
|
||||
|
||||
from alembic.config import Config
|
||||
alembic_cfg = Config("/path/to/yourapp/alembic.ini")
|
||||
|
||||
With a :class:`.Config` object, you can then
|
||||
run Alembic commands programmatically using the directives
|
||||
in :mod:`alembic.command`.
|
||||
|
||||
The :class:`.Config` object can also be constructed without
|
||||
a filename. Values can be set programmatically, and
|
||||
new sections will be created as needed::
|
||||
|
||||
from alembic.config import Config
|
||||
alembic_cfg = Config()
|
||||
alembic_cfg.set_main_option("script_location", "myapp:migrations")
|
||||
alembic_cfg.set_main_option("url", "postgresql://foo/bar")
|
||||
alembic_cfg.set_section_option("mysection", "foo", "bar")
|
||||
|
||||
:param file_: name of the .ini file to open.
|
||||
:param ini_section: name of the main Alembic section within the
|
||||
.ini file
|
||||
:param output_buffer: optional file-like input buffer which
|
||||
will be passed to the :class:`.MigrationContext` - used to redirect
|
||||
the output of "offline generation" when using Alembic programmatically.
|
||||
:param stdout: buffer where the "print" output of commands will be sent.
|
||||
Defaults to ``sys.stdout``.
|
||||
|
||||
..versionadded:: 0.4
|
||||
|
||||
"""
|
||||
def __init__(self, file_=None, ini_section='alembic', output_buffer=None,
|
||||
stdout=sys.stdout, cmd_opts=None):
|
||||
"""Construct a new :class:`.Config`
|
||||
|
||||
"""
|
||||
self.config_file_name = file_
|
||||
self.config_ini_section = ini_section
|
||||
self.output_buffer = output_buffer
|
||||
self.stdout = stdout
|
||||
self.cmd_opts = cmd_opts
|
||||
|
||||
cmd_opts = None
|
||||
"""The command-line options passed to the ``alembic`` script.
|
||||
|
||||
Within an ``env.py`` script this can be accessed via the
|
||||
:attr:`.EnvironmentContext.config` attribute.
|
||||
|
||||
.. versionadded:: 0.6.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.EnvironmentContext.get_x_argument`
|
||||
|
||||
"""
|
||||
|
||||
config_file_name = None
|
||||
"""Filesystem path to the .ini file in use."""
|
||||
|
||||
config_ini_section = None
|
||||
"""Name of the config file section to read basic configuration
|
||||
from. Defaults to ``alembic``, that is the ``[alembic]`` section
|
||||
of the .ini file. This value is modified using the ``-n/--name``
|
||||
option to the Alembic runnier.
|
||||
|
||||
"""
|
||||
|
||||
def print_stdout(self, text, *arg):
|
||||
"""Render a message to standard out."""
|
||||
|
||||
util.write_outstream(
|
||||
self.stdout,
|
||||
(compat.text_type(text) % arg),
|
||||
"\n"
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def file_config(self):
|
||||
"""Return the underlying :class:`ConfigParser` object.
|
||||
|
||||
Direct access to the .ini file is available here,
|
||||
though the :meth:`.Config.get_section` and
|
||||
:meth:`.Config.get_main_option`
|
||||
methods provide a possibly simpler interface.
|
||||
|
||||
"""
|
||||
|
||||
if self.config_file_name:
|
||||
here = os.path.abspath(os.path.dirname(self.config_file_name))
|
||||
else:
|
||||
here = ""
|
||||
file_config = SafeConfigParser({'here': here})
|
||||
if self.config_file_name:
|
||||
file_config.read([self.config_file_name])
|
||||
else:
|
||||
file_config.add_section(self.config_ini_section)
|
||||
return file_config
|
||||
|
||||
def get_template_directory(self):
|
||||
"""Return the directory where Alembic setup templates are found.
|
||||
|
||||
This method is used by the alembic ``init`` and ``list_templates``
|
||||
commands.
|
||||
|
||||
"""
|
||||
return os.path.join(package_dir, 'templates')
|
||||
|
||||
def get_section(self, name):
|
||||
"""Return all the configuration options from a given .ini file section
|
||||
as a dictionary.
|
||||
|
||||
"""
|
||||
return dict(self.file_config.items(name))
|
||||
|
||||
def set_main_option(self, name, value):
|
||||
"""Set an option programmatically within the 'main' section.
|
||||
|
||||
This overrides whatever was in the .ini file.
|
||||
|
||||
"""
|
||||
self.file_config.set(self.config_ini_section, name, value)
|
||||
|
||||
def remove_main_option(self, name):
|
||||
self.file_config.remove_option(self.config_ini_section, name)
|
||||
|
||||
def set_section_option(self, section, name, value):
|
||||
"""Set an option programmatically within the given section.
|
||||
|
||||
The section is created if it doesn't exist already.
|
||||
The value here will override whatever was in the .ini
|
||||
file.
|
||||
|
||||
"""
|
||||
if not self.file_config.has_section(section):
|
||||
self.file_config.add_section(section)
|
||||
self.file_config.set(section, name, value)
|
||||
|
||||
def get_section_option(self, section, name, default=None):
|
||||
"""Return an option from the given section of the .ini file.
|
||||
|
||||
"""
|
||||
if not self.file_config.has_section(section):
|
||||
raise util.CommandError("No config file %r found, or file has no "
|
||||
"'[%s]' section" %
|
||||
(self.config_file_name, section))
|
||||
if self.file_config.has_option(section, name):
|
||||
return self.file_config.get(section, name)
|
||||
else:
|
||||
return default
|
||||
|
||||
def get_main_option(self, name, default=None):
|
||||
"""Return an option from the 'main' section of the .ini file.
|
||||
|
||||
This defaults to being a key from the ``[alembic]``
|
||||
section, unless the ``-n/--name`` flag were used to
|
||||
indicate a different section.
|
||||
|
||||
"""
|
||||
return self.get_section_option(self.config_ini_section, name, default)
|
||||
|
||||
|
||||
class CommandLine(object):
|
||||
def __init__(self, prog=None):
|
||||
self._generate_args(prog)
|
||||
|
||||
|
||||
def _generate_args(self, prog):
|
||||
def add_options(parser, positional, kwargs):
|
||||
if 'template' in kwargs:
|
||||
parser.add_argument("-t", "--template",
|
||||
default='generic',
|
||||
type=str,
|
||||
help="Setup template for use with 'init'")
|
||||
if 'message' in kwargs:
|
||||
parser.add_argument("-m", "--message",
|
||||
type=str,
|
||||
help="Message string to use with 'revision'")
|
||||
if 'sql' in kwargs:
|
||||
parser.add_argument("--sql",
|
||||
action="store_true",
|
||||
help="Don't emit SQL to database - dump to "
|
||||
"standard output/file instead")
|
||||
if 'tag' in kwargs:
|
||||
parser.add_argument("--tag",
|
||||
type=str,
|
||||
help="Arbitrary 'tag' name - can be used by "
|
||||
"custom env.py scripts.")
|
||||
if 'autogenerate' in kwargs:
|
||||
parser.add_argument("--autogenerate",
|
||||
action="store_true",
|
||||
help="Populate revision script with candidate "
|
||||
"migration operations, based on comparison "
|
||||
"of database to model.")
|
||||
# "current" command
|
||||
if 'head_only' in kwargs:
|
||||
parser.add_argument("--head-only",
|
||||
action="store_true",
|
||||
help="Only show current version and "
|
||||
"whether or not this is the head revision.")
|
||||
|
||||
if 'rev_range' in kwargs:
|
||||
parser.add_argument("-r", "--rev-range",
|
||||
action="store",
|
||||
help="Specify a revision range; "
|
||||
"format is [start]:[end]")
|
||||
|
||||
|
||||
positional_help = {
|
||||
'directory': "location of scripts directory",
|
||||
'revision': "revision identifier"
|
||||
}
|
||||
for arg in positional:
|
||||
subparser.add_argument(arg, help=positional_help.get(arg))
|
||||
|
||||
parser = ArgumentParser(prog=prog)
|
||||
parser.add_argument("-c", "--config",
|
||||
type=str,
|
||||
default="alembic.ini",
|
||||
help="Alternate config file")
|
||||
parser.add_argument("-n", "--name",
|
||||
type=str,
|
||||
default="alembic",
|
||||
help="Name of section in .ini file to "
|
||||
"use for Alembic config")
|
||||
parser.add_argument("-x", action="append",
|
||||
help="Additional arguments consumed by "
|
||||
"custom env.py scripts, e.g. -x "
|
||||
"setting1=somesetting -x setting2=somesetting")
|
||||
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
for fn in [getattr(command, n) for n in dir(command)]:
|
||||
if inspect.isfunction(fn) and \
|
||||
fn.__name__[0] != '_' and \
|
||||
fn.__module__ == 'alembic.command':
|
||||
|
||||
spec = inspect.getargspec(fn)
|
||||
if spec[3]:
|
||||
positional = spec[0][1:-len(spec[3])]
|
||||
kwarg = spec[0][-len(spec[3]):]
|
||||
else:
|
||||
positional = spec[0][1:]
|
||||
kwarg = []
|
||||
|
||||
subparser = subparsers.add_parser(
|
||||
fn.__name__,
|
||||
help=fn.__doc__)
|
||||
add_options(subparser, positional, kwarg)
|
||||
subparser.set_defaults(cmd=(fn, positional, kwarg))
|
||||
self.parser = parser
|
||||
|
||||
def run_cmd(self, config, options):
|
||||
fn, positional, kwarg = options.cmd
|
||||
|
||||
try:
|
||||
fn(config,
|
||||
*[getattr(options, k) for k in positional],
|
||||
**dict((k, getattr(options, k)) for k in kwarg)
|
||||
)
|
||||
except util.CommandError as e:
|
||||
util.err(str(e))
|
||||
|
||||
def main(self, argv=None):
|
||||
options = self.parser.parse_args(argv)
|
||||
if not hasattr(options, "cmd"):
|
||||
# see http://bugs.python.org/issue9253, argparse
|
||||
# behavior changed incompatibly in py3.3
|
||||
self.parser.error("too few arguments")
|
||||
else:
|
||||
cfg = Config(file_=options.config,
|
||||
ini_section=options.name, cmd_opts=options)
|
||||
self.run_cmd(cfg, options)
|
||||
|
||||
def main(argv=None, prog=None, **kwargs):
|
||||
"""The console runner function for Alembic."""
|
||||
|
||||
CommandLine(prog=prog).main(argv=argv)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
6
Darwin/lib/python2.7/site-packages/alembic/context.py
Normal file
6
Darwin/lib/python2.7/site-packages/alembic/context.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .environment import EnvironmentContext
|
||||
from . import util
|
||||
|
||||
# create proxy functions for
|
||||
# each method on the EnvironmentContext class.
|
||||
util.create_module_class_proxy(EnvironmentContext, globals(), locals())
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
from . import postgresql, mysql, sqlite, mssql, oracle
|
||||
from .impl import DefaultImpl
|
||||
161
Darwin/lib/python2.7/site-packages/alembic/ddl/base.py
Normal file
161
Darwin/lib/python2.7/site-packages/alembic/ddl/base.py
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
import functools
|
||||
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.schema import DDLElement, Column
|
||||
from sqlalchemy import Integer
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
class AlterTable(DDLElement):
|
||||
"""Represent an ALTER TABLE statement.
|
||||
|
||||
Only the string name and optional schema name of the table
|
||||
is required, not a full Table object.
|
||||
|
||||
"""
|
||||
def __init__(self, table_name, schema=None):
|
||||
self.table_name = table_name
|
||||
self.schema = schema
|
||||
|
||||
class RenameTable(AlterTable):
|
||||
def __init__(self, old_table_name, new_table_name, schema=None):
|
||||
super(RenameTable, self).__init__(old_table_name, schema=schema)
|
||||
self.new_table_name = new_table_name
|
||||
|
||||
class AlterColumn(AlterTable):
|
||||
def __init__(self, name, column_name, schema=None,
|
||||
existing_type=None,
|
||||
existing_nullable=None,
|
||||
existing_server_default=None):
|
||||
super(AlterColumn, self).__init__(name, schema=schema)
|
||||
self.column_name = column_name
|
||||
self.existing_type=sqltypes.to_instance(existing_type) \
|
||||
if existing_type is not None else None
|
||||
self.existing_nullable=existing_nullable
|
||||
self.existing_server_default=existing_server_default
|
||||
|
||||
class ColumnNullable(AlterColumn):
|
||||
def __init__(self, name, column_name, nullable, **kw):
|
||||
super(ColumnNullable, self).__init__(name, column_name,
|
||||
**kw)
|
||||
self.nullable = nullable
|
||||
|
||||
class ColumnType(AlterColumn):
|
||||
def __init__(self, name, column_name, type_, **kw):
|
||||
super(ColumnType, self).__init__(name, column_name,
|
||||
**kw)
|
||||
self.type_ = sqltypes.to_instance(type_)
|
||||
|
||||
class ColumnName(AlterColumn):
|
||||
def __init__(self, name, column_name, newname, **kw):
|
||||
super(ColumnName, self).__init__(name, column_name, **kw)
|
||||
self.newname = newname
|
||||
|
||||
class ColumnDefault(AlterColumn):
|
||||
def __init__(self, name, column_name, default, **kw):
|
||||
super(ColumnDefault, self).__init__(name, column_name, **kw)
|
||||
self.default = default
|
||||
|
||||
class AddColumn(AlterTable):
|
||||
def __init__(self, name, column, schema=None):
|
||||
super(AddColumn, self).__init__(name, schema=schema)
|
||||
self.column = column
|
||||
|
||||
class DropColumn(AlterTable):
|
||||
def __init__(self, name, column, schema=None):
|
||||
super(DropColumn, self).__init__(name, schema=schema)
|
||||
self.column = column
|
||||
|
||||
|
||||
@compiles(RenameTable)
|
||||
def visit_rename_table(element, compiler, **kw):
|
||||
return "%s RENAME TO %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_table_name(compiler, element.new_table_name, element.schema)
|
||||
)
|
||||
|
||||
@compiles(AddColumn)
|
||||
def visit_add_column(element, compiler, **kw):
|
||||
return "%s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
add_column(compiler, element.column, **kw)
|
||||
)
|
||||
|
||||
@compiles(DropColumn)
|
||||
def visit_drop_column(element, compiler, **kw):
|
||||
return "%s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
drop_column(compiler, element.column.name, **kw)
|
||||
)
|
||||
|
||||
@compiles(ColumnNullable)
|
||||
def visit_column_nullable(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"DROP NOT NULL" if element.nullable else "SET NOT NULL"
|
||||
)
|
||||
|
||||
@compiles(ColumnType)
|
||||
def visit_column_type(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"TYPE %s" % format_type(compiler, element.type_)
|
||||
)
|
||||
|
||||
@compiles(ColumnName)
|
||||
def visit_column_name(element, compiler, **kw):
|
||||
return "%s RENAME %s TO %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
format_column_name(compiler, element.newname)
|
||||
)
|
||||
|
||||
@compiles(ColumnDefault)
|
||||
def visit_column_default(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"SET DEFAULT %s" %
|
||||
format_server_default(compiler, element.default)
|
||||
if element.default is not None
|
||||
else "DROP DEFAULT"
|
||||
)
|
||||
|
||||
def quote_dotted(name, quote):
|
||||
"""quote the elements of a dotted name"""
|
||||
|
||||
result = '.'.join([quote(x) for x in name.split('.')])
|
||||
return result
|
||||
|
||||
def format_table_name(compiler, name, schema):
|
||||
quote = functools.partial(compiler.preparer.quote, force=None)
|
||||
if schema:
|
||||
return quote_dotted(schema, quote) + "." + quote(name)
|
||||
else:
|
||||
return quote(name)
|
||||
|
||||
def format_column_name(compiler, name):
|
||||
return compiler.preparer.quote(name, None)
|
||||
|
||||
def format_server_default(compiler, default):
|
||||
return compiler.get_column_default_string(
|
||||
Column("x", Integer, server_default=default)
|
||||
)
|
||||
|
||||
def format_type(compiler, type_):
|
||||
return compiler.dialect.type_compiler.process(type_)
|
||||
|
||||
def alter_table(compiler, name, schema):
|
||||
return "ALTER TABLE %s" % format_table_name(compiler, name, schema)
|
||||
|
||||
def drop_column(compiler, name):
|
||||
return 'DROP COLUMN %s' % format_column_name(compiler, name)
|
||||
|
||||
def alter_column(compiler, name):
|
||||
return 'ALTER COLUMN %s' % format_column_name(compiler, name)
|
||||
|
||||
def add_column(compiler, column, **kw):
|
||||
return "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
|
||||
|
||||
|
||||
279
Darwin/lib/python2.7/site-packages/alembic/ddl/impl.py
Normal file
279
Darwin/lib/python2.7/site-packages/alembic/ddl/impl.py
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
from sqlalchemy.sql.expression import _BindParamClause
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy import schema, text
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
from ..compat import string_types, text_type, with_metaclass
|
||||
from .. import util
|
||||
from . import base
|
||||
|
||||
class ImplMeta(type):
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
newtype = type.__init__(cls, classname, bases, dict_)
|
||||
if '__dialect__' in dict_:
|
||||
_impls[dict_['__dialect__']] = cls
|
||||
return newtype
|
||||
|
||||
_impls = {}
|
||||
|
||||
class DefaultImpl(with_metaclass(ImplMeta)):
|
||||
"""Provide the entrypoint for major migration operations,
|
||||
including database-specific behavioral variances.
|
||||
|
||||
While individual SQL/DDL constructs already provide
|
||||
for database-specific implementations, variances here
|
||||
allow for entirely different sequences of operations
|
||||
to take place for a particular migration, such as
|
||||
SQL Server's special 'IDENTITY INSERT' step for
|
||||
bulk inserts.
|
||||
|
||||
"""
|
||||
__dialect__ = 'default'
|
||||
|
||||
transactional_ddl = False
|
||||
command_terminator = ";"
|
||||
|
||||
def __init__(self, dialect, connection, as_sql,
|
||||
transactional_ddl, output_buffer,
|
||||
context_opts):
|
||||
self.dialect = dialect
|
||||
self.connection = connection
|
||||
self.as_sql = as_sql
|
||||
self.output_buffer = output_buffer
|
||||
self.memo = {}
|
||||
self.context_opts = context_opts
|
||||
if transactional_ddl is not None:
|
||||
self.transactional_ddl = transactional_ddl
|
||||
|
||||
@classmethod
|
||||
def get_by_dialect(cls, dialect):
|
||||
return _impls[dialect.name]
|
||||
|
||||
def static_output(self, text):
|
||||
self.output_buffer.write(text_type(text + "\n\n"))
|
||||
self.output_buffer.flush()
|
||||
|
||||
@property
|
||||
def bind(self):
|
||||
return self.connection
|
||||
|
||||
def _exec(self, construct, execution_options=None,
|
||||
multiparams=(),
|
||||
params=util.immutabledict()):
|
||||
if isinstance(construct, string_types):
|
||||
construct = text(construct)
|
||||
if self.as_sql:
|
||||
if multiparams or params:
|
||||
# TODO: coverage
|
||||
raise Exception("Execution arguments not allowed with as_sql")
|
||||
self.static_output(text_type(
|
||||
construct.compile(dialect=self.dialect)
|
||||
).replace("\t", " ").strip() + self.command_terminator)
|
||||
else:
|
||||
conn = self.connection
|
||||
if execution_options:
|
||||
conn = conn.execution_options(**execution_options)
|
||||
conn.execute(construct, *multiparams, **params)
|
||||
|
||||
def execute(self, sql, execution_options=None):
|
||||
self._exec(sql, execution_options)
|
||||
|
||||
def alter_column(self, table_name, column_name,
|
||||
nullable=None,
|
||||
server_default=False,
|
||||
name=None,
|
||||
type_=None,
|
||||
schema=None,
|
||||
autoincrement=None,
|
||||
existing_type=None,
|
||||
existing_server_default=None,
|
||||
existing_nullable=None,
|
||||
existing_autoincrement=None
|
||||
):
|
||||
if autoincrement is not None or existing_autoincrement is not None:
|
||||
util.warn("nautoincrement and existing_autoincrement only make sense for MySQL")
|
||||
if nullable is not None:
|
||||
self._exec(base.ColumnNullable(table_name, column_name,
|
||||
nullable, schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
))
|
||||
if server_default is not False:
|
||||
self._exec(base.ColumnDefault(
|
||||
table_name, column_name, server_default,
|
||||
schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
))
|
||||
if type_ is not None:
|
||||
self._exec(base.ColumnType(
|
||||
table_name, column_name, type_, schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
))
|
||||
# do the new name last ;)
|
||||
if name is not None:
|
||||
self._exec(base.ColumnName(
|
||||
table_name, column_name, name, schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
))
|
||||
|
||||
def add_column(self, table_name, column, schema=None):
|
||||
self._exec(base.AddColumn(table_name, column, schema=schema))
|
||||
|
||||
def drop_column(self, table_name, column, schema=None, **kw):
|
||||
self._exec(base.DropColumn(table_name, column, schema=schema))
|
||||
|
||||
def add_constraint(self, const):
|
||||
if const._create_rule is None or \
|
||||
const._create_rule(self):
|
||||
self._exec(schema.AddConstraint(const))
|
||||
|
||||
def drop_constraint(self, const):
|
||||
self._exec(schema.DropConstraint(const))
|
||||
|
||||
def rename_table(self, old_table_name, new_table_name, schema=None):
|
||||
self._exec(base.RenameTable(old_table_name,
|
||||
new_table_name, schema=schema))
|
||||
|
||||
def create_table(self, table):
|
||||
if util.sqla_07:
|
||||
table.dispatch.before_create(table, self.connection,
|
||||
checkfirst=False,
|
||||
_ddl_runner=self)
|
||||
self._exec(schema.CreateTable(table))
|
||||
if util.sqla_07:
|
||||
table.dispatch.after_create(table, self.connection,
|
||||
checkfirst=False,
|
||||
_ddl_runner=self)
|
||||
for index in table.indexes:
|
||||
self._exec(schema.CreateIndex(index))
|
||||
|
||||
def drop_table(self, table):
|
||||
self._exec(schema.DropTable(table))
|
||||
|
||||
def create_index(self, index):
|
||||
self._exec(schema.CreateIndex(index))
|
||||
|
||||
def drop_index(self, index):
|
||||
self._exec(schema.DropIndex(index))
|
||||
|
||||
def bulk_insert(self, table, rows, multiinsert=True):
|
||||
if not isinstance(rows, list):
|
||||
raise TypeError("List expected")
|
||||
elif rows and not isinstance(rows[0], dict):
|
||||
raise TypeError("List of dictionaries expected")
|
||||
if self.as_sql:
|
||||
for row in rows:
|
||||
self._exec(table.insert(inline=True).values(**dict(
|
||||
(k,
|
||||
_literal_bindparam(k, v, type_=table.c[k].type)
|
||||
if not isinstance(v, _literal_bindparam) else v)
|
||||
for k, v in row.items()
|
||||
)))
|
||||
else:
|
||||
# work around http://www.sqlalchemy.org/trac/ticket/2461
|
||||
if not hasattr(table, '_autoincrement_column'):
|
||||
table._autoincrement_column = None
|
||||
if rows:
|
||||
if multiinsert:
|
||||
self._exec(table.insert(inline=True), multiparams=rows)
|
||||
else:
|
||||
for row in rows:
|
||||
self._exec(table.insert(inline=True).values(**row))
|
||||
|
||||
def compare_type(self, inspector_column, metadata_column):
|
||||
|
||||
conn_type = inspector_column.type
|
||||
metadata_type = metadata_column.type
|
||||
|
||||
metadata_impl = metadata_type.dialect_impl(self.dialect)
|
||||
|
||||
# work around SQLAlchemy bug "stale value for type affinity"
|
||||
# fixed in 0.7.4
|
||||
metadata_impl.__dict__.pop('_type_affinity', None)
|
||||
|
||||
if conn_type._compare_type_affinity(
|
||||
metadata_impl
|
||||
):
|
||||
comparator = _type_comparators.get(conn_type._type_affinity, None)
|
||||
|
||||
return comparator and comparator(metadata_type, conn_type)
|
||||
else:
|
||||
return True
|
||||
|
||||
def compare_server_default(self, inspector_column,
|
||||
metadata_column,
|
||||
rendered_metadata_default,
|
||||
rendered_inspector_default):
|
||||
return rendered_inspector_default != rendered_metadata_default
|
||||
|
||||
def correct_for_autogen_constraints(self, conn_uniques, conn_indexes,
|
||||
metadata_unique_constraints,
|
||||
metadata_indexes):
|
||||
pass
|
||||
|
||||
def start_migrations(self):
|
||||
"""A hook called when :meth:`.EnvironmentContext.run_migrations`
|
||||
is called.
|
||||
|
||||
Implementations can set up per-migration-run state here.
|
||||
|
||||
"""
|
||||
|
||||
def emit_begin(self):
|
||||
"""Emit the string ``BEGIN``, or the backend-specific
|
||||
equivalent, on the current connection context.
|
||||
|
||||
This is used in offline mode and typically
|
||||
via :meth:`.EnvironmentContext.begin_transaction`.
|
||||
|
||||
"""
|
||||
self.static_output("BEGIN" + self.command_terminator)
|
||||
|
||||
def emit_commit(self):
|
||||
"""Emit the string ``COMMIT``, or the backend-specific
|
||||
equivalent, on the current connection context.
|
||||
|
||||
This is used in offline mode and typically
|
||||
via :meth:`.EnvironmentContext.begin_transaction`.
|
||||
|
||||
"""
|
||||
self.static_output("COMMIT" + self.command_terminator)
|
||||
|
||||
class _literal_bindparam(_BindParamClause):
|
||||
pass
|
||||
|
||||
@compiles(_literal_bindparam)
|
||||
def _render_literal_bindparam(element, compiler, **kw):
|
||||
return compiler.render_literal_bindparam(element, **kw)
|
||||
|
||||
|
||||
def _string_compare(t1, t2):
|
||||
return \
|
||||
t1.length is not None and \
|
||||
t1.length != t2.length
|
||||
|
||||
def _numeric_compare(t1, t2):
|
||||
return \
|
||||
(
|
||||
t1.precision is not None and \
|
||||
t1.precision != t2.precision
|
||||
) or \
|
||||
(
|
||||
t1.scale is not None and \
|
||||
t1.scale != t2.scale
|
||||
)
|
||||
_type_comparators = {
|
||||
sqltypes.String:_string_compare,
|
||||
sqltypes.Numeric:_numeric_compare
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
217
Darwin/lib/python2.7/site-packages/alembic/ddl/mssql.py
Normal file
217
Darwin/lib/python2.7/site-packages/alembic/ddl/mssql.py
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
from sqlalchemy.ext.compiler import compiles
|
||||
|
||||
from .. import util
|
||||
from .impl import DefaultImpl
|
||||
from .base import alter_table, AddColumn, ColumnName, \
|
||||
format_table_name, format_column_name, ColumnNullable, alter_column,\
|
||||
format_server_default,ColumnDefault, format_type, ColumnType
|
||||
from sqlalchemy.sql.expression import ClauseElement, Executable
|
||||
|
||||
class MSSQLImpl(DefaultImpl):
|
||||
__dialect__ = 'mssql'
|
||||
transactional_ddl = True
|
||||
batch_separator = "GO"
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(MSSQLImpl, self).__init__(*arg, **kw)
|
||||
self.batch_separator = self.context_opts.get(
|
||||
"mssql_batch_separator",
|
||||
self.batch_separator)
|
||||
|
||||
def _exec(self, construct, *args, **kw):
|
||||
super(MSSQLImpl, self)._exec(construct, *args, **kw)
|
||||
if self.as_sql and self.batch_separator:
|
||||
self.static_output(self.batch_separator)
|
||||
|
||||
def emit_begin(self):
|
||||
self.static_output("BEGIN TRANSACTION" + self.command_terminator)
|
||||
|
||||
def emit_commit(self):
|
||||
super(MSSQLImpl, self).emit_commit()
|
||||
if self.as_sql and self.batch_separator:
|
||||
self.static_output(self.batch_separator)
|
||||
|
||||
def alter_column(self, table_name, column_name,
|
||||
nullable=None,
|
||||
server_default=False,
|
||||
name=None,
|
||||
type_=None,
|
||||
schema=None,
|
||||
autoincrement=None,
|
||||
existing_type=None,
|
||||
existing_server_default=None,
|
||||
existing_nullable=None,
|
||||
existing_autoincrement=None
|
||||
):
|
||||
|
||||
if nullable is not None and existing_type is None:
|
||||
if type_ is not None:
|
||||
existing_type = type_
|
||||
# the NULL/NOT NULL alter will handle
|
||||
# the type alteration
|
||||
type_ = None
|
||||
else:
|
||||
raise util.CommandError(
|
||||
"MS-SQL ALTER COLUMN operations "
|
||||
"with NULL or NOT NULL require the "
|
||||
"existing_type or a new type_ be passed.")
|
||||
|
||||
super(MSSQLImpl, self).alter_column(
|
||||
table_name, column_name,
|
||||
nullable=nullable,
|
||||
type_=type_,
|
||||
schema=schema,
|
||||
autoincrement=autoincrement,
|
||||
existing_type=existing_type,
|
||||
existing_nullable=existing_nullable,
|
||||
existing_autoincrement=existing_autoincrement
|
||||
)
|
||||
|
||||
if server_default is not False:
|
||||
if existing_server_default is not False or \
|
||||
server_default is None:
|
||||
self._exec(
|
||||
_ExecDropConstraint(
|
||||
table_name, column_name,
|
||||
'sys.default_constraints')
|
||||
)
|
||||
if server_default is not None:
|
||||
super(MSSQLImpl, self).alter_column(
|
||||
table_name, column_name,
|
||||
schema=schema,
|
||||
server_default=server_default)
|
||||
|
||||
if name is not None:
|
||||
super(MSSQLImpl, self).alter_column(
|
||||
table_name, column_name,
|
||||
schema=schema,
|
||||
name=name)
|
||||
|
||||
def bulk_insert(self, table, rows, **kw):
|
||||
if self.as_sql:
|
||||
self._exec(
|
||||
"SET IDENTITY_INSERT %s ON" %
|
||||
self.dialect.identifier_preparer.format_table(table)
|
||||
)
|
||||
super(MSSQLImpl, self).bulk_insert(table, rows, **kw)
|
||||
self._exec(
|
||||
"SET IDENTITY_INSERT %s OFF" %
|
||||
self.dialect.identifier_preparer.format_table(table)
|
||||
)
|
||||
else:
|
||||
super(MSSQLImpl, self).bulk_insert(table, rows, **kw)
|
||||
|
||||
|
||||
def drop_column(self, table_name, column, **kw):
|
||||
drop_default = kw.pop('mssql_drop_default', False)
|
||||
if drop_default:
|
||||
self._exec(
|
||||
_ExecDropConstraint(
|
||||
table_name, column,
|
||||
'sys.default_constraints')
|
||||
)
|
||||
drop_check = kw.pop('mssql_drop_check', False)
|
||||
if drop_check:
|
||||
self._exec(
|
||||
_ExecDropConstraint(
|
||||
table_name, column,
|
||||
'sys.check_constraints')
|
||||
)
|
||||
drop_fks = kw.pop('mssql_drop_foreign_key', False)
|
||||
if drop_fks:
|
||||
self._exec(
|
||||
_ExecDropFKConstraint(table_name, column)
|
||||
)
|
||||
super(MSSQLImpl, self).drop_column(table_name, column)
|
||||
|
||||
class _ExecDropConstraint(Executable, ClauseElement):
|
||||
def __init__(self, tname, colname, type_):
|
||||
self.tname = tname
|
||||
self.colname = colname
|
||||
self.type_ = type_
|
||||
|
||||
class _ExecDropFKConstraint(Executable, ClauseElement):
|
||||
def __init__(self, tname, colname):
|
||||
self.tname = tname
|
||||
self.colname = colname
|
||||
|
||||
|
||||
@compiles(_ExecDropConstraint, 'mssql')
|
||||
def _exec_drop_col_constraint(element, compiler, **kw):
|
||||
tname, colname, type_ = element.tname, element.colname, element.type_
|
||||
# from http://www.mssqltips.com/sqlservertip/1425/working-with-default-constraints-in-sql-server/
|
||||
# TODO: needs table formatting, etc.
|
||||
return """declare @const_name varchar(256)
|
||||
select @const_name = [name] from %(type)s
|
||||
where parent_object_id = object_id('%(tname)s')
|
||||
and col_name(parent_object_id, parent_column_id) = '%(colname)s'
|
||||
exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
|
||||
'type': type_,
|
||||
'tname': tname,
|
||||
'colname': colname,
|
||||
'tname_quoted': format_table_name(compiler, tname, None),
|
||||
}
|
||||
|
||||
@compiles(_ExecDropFKConstraint, 'mssql')
|
||||
def _exec_drop_col_fk_constraint(element, compiler, **kw):
|
||||
tname, colname = element.tname, element.colname
|
||||
|
||||
return """declare @const_name varchar(256)
|
||||
select @const_name = [name] from
|
||||
sys.foreign_keys fk join sys.foreign_key_columns fkc
|
||||
on fk.object_id=fkc.constraint_object_id
|
||||
where fkc.parent_object_id = object_id('%(tname)s')
|
||||
and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s'
|
||||
exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
|
||||
'tname': tname,
|
||||
'colname': colname,
|
||||
'tname_quoted': format_table_name(compiler, tname, None),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@compiles(AddColumn, 'mssql')
|
||||
def visit_add_column(element, compiler, **kw):
|
||||
return "%s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
mssql_add_column(compiler, element.column, **kw)
|
||||
)
|
||||
|
||||
def mssql_add_column(compiler, column, **kw):
|
||||
return "ADD %s" % compiler.get_column_specification(column, **kw)
|
||||
|
||||
@compiles(ColumnNullable, 'mssql')
|
||||
def visit_column_nullable(element, compiler, **kw):
|
||||
return "%s %s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
format_type(compiler, element.existing_type),
|
||||
"NULL" if element.nullable else "NOT NULL"
|
||||
)
|
||||
|
||||
@compiles(ColumnDefault, 'mssql')
|
||||
def visit_column_default(element, compiler, **kw):
|
||||
# TODO: there can also be a named constraint
|
||||
# with ADD CONSTRAINT here
|
||||
return "%s ADD DEFAULT %s FOR %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_server_default(compiler, element.default),
|
||||
format_column_name(compiler, element.column_name)
|
||||
)
|
||||
|
||||
@compiles(ColumnName, 'mssql')
|
||||
def visit_rename_column(element, compiler, **kw):
|
||||
return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % (
|
||||
format_table_name(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
format_column_name(compiler, element.newname)
|
||||
)
|
||||
|
||||
@compiles(ColumnType, 'mssql')
|
||||
def visit_column_type(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
format_type(compiler, element.type_)
|
||||
)
|
||||
|
||||
212
Darwin/lib/python2.7/site-packages/alembic/ddl/mysql.py
Normal file
212
Darwin/lib/python2.7/site-packages/alembic/ddl/mysql.py
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import schema
|
||||
|
||||
from ..compat import string_types
|
||||
from .. import util
|
||||
from .impl import DefaultImpl
|
||||
from .base import ColumnNullable, ColumnName, ColumnDefault, \
|
||||
ColumnType, AlterColumn, format_column_name, \
|
||||
format_server_default
|
||||
from .base import alter_table
|
||||
|
||||
class MySQLImpl(DefaultImpl):
|
||||
__dialect__ = 'mysql'
|
||||
|
||||
transactional_ddl = False
|
||||
|
||||
def alter_column(self, table_name, column_name,
|
||||
nullable=None,
|
||||
server_default=False,
|
||||
name=None,
|
||||
type_=None,
|
||||
schema=None,
|
||||
autoincrement=None,
|
||||
existing_type=None,
|
||||
existing_server_default=None,
|
||||
existing_nullable=None,
|
||||
existing_autoincrement=None
|
||||
):
|
||||
if name is not None:
|
||||
self._exec(
|
||||
MySQLChangeColumn(
|
||||
table_name, column_name,
|
||||
schema=schema,
|
||||
newname=name,
|
||||
nullable=nullable if nullable is not None else
|
||||
existing_nullable
|
||||
if existing_nullable is not None
|
||||
else True,
|
||||
type_=type_ if type_ is not None else existing_type,
|
||||
default=server_default if server_default is not False
|
||||
else existing_server_default,
|
||||
autoincrement=autoincrement if autoincrement is not None
|
||||
else existing_autoincrement
|
||||
)
|
||||
)
|
||||
elif nullable is not None or \
|
||||
type_ is not None or \
|
||||
autoincrement is not None:
|
||||
self._exec(
|
||||
MySQLModifyColumn(
|
||||
table_name, column_name,
|
||||
schema=schema,
|
||||
newname=name if name is not None else column_name,
|
||||
nullable=nullable if nullable is not None else
|
||||
existing_nullable
|
||||
if existing_nullable is not None
|
||||
else True,
|
||||
type_=type_ if type_ is not None else existing_type,
|
||||
default=server_default if server_default is not False
|
||||
else existing_server_default,
|
||||
autoincrement=autoincrement if autoincrement is not None
|
||||
else existing_autoincrement
|
||||
)
|
||||
)
|
||||
elif server_default is not False:
|
||||
self._exec(
|
||||
MySQLAlterDefault(
|
||||
table_name, column_name, server_default,
|
||||
schema=schema,
|
||||
)
|
||||
)
|
||||
|
||||
def correct_for_autogen_constraints(self, conn_unique_constraints,
|
||||
conn_indexes,
|
||||
metadata_unique_constraints,
|
||||
metadata_indexes):
|
||||
removed = set()
|
||||
for idx in list(conn_indexes):
|
||||
# MySQL puts implicit indexes on FK columns, even if
|
||||
# composite and even if MyISAM, so can't check this too easily
|
||||
if idx.name == idx.columns.keys()[0]:
|
||||
conn_indexes.remove(idx)
|
||||
removed.add(idx.name)
|
||||
|
||||
# then remove indexes from the "metadata_indexes"
|
||||
# that we've removed from reflected, otherwise they come out
|
||||
# as adds (see #202)
|
||||
for idx in list(metadata_indexes):
|
||||
if idx.name in removed:
|
||||
metadata_indexes.remove(idx)
|
||||
|
||||
class MySQLAlterDefault(AlterColumn):
|
||||
def __init__(self, name, column_name, default, schema=None):
|
||||
super(AlterColumn, self).__init__(name, schema=schema)
|
||||
self.column_name = column_name
|
||||
self.default = default
|
||||
|
||||
|
||||
class MySQLChangeColumn(AlterColumn):
|
||||
def __init__(self, name, column_name, schema=None,
|
||||
newname=None,
|
||||
type_=None,
|
||||
nullable=None,
|
||||
default=False,
|
||||
autoincrement=None):
|
||||
super(AlterColumn, self).__init__(name, schema=schema)
|
||||
self.column_name = column_name
|
||||
self.nullable = nullable
|
||||
self.newname = newname
|
||||
self.default = default
|
||||
self.autoincrement = autoincrement
|
||||
if type_ is None:
|
||||
raise util.CommandError(
|
||||
"All MySQL CHANGE/MODIFY COLUMN operations "
|
||||
"require the existing type."
|
||||
)
|
||||
|
||||
self.type_ = sqltypes.to_instance(type_)
|
||||
|
||||
class MySQLModifyColumn(MySQLChangeColumn):
|
||||
pass
|
||||
|
||||
|
||||
@compiles(ColumnNullable, 'mysql')
|
||||
@compiles(ColumnName, 'mysql')
|
||||
@compiles(ColumnDefault, 'mysql')
|
||||
@compiles(ColumnType, 'mysql')
|
||||
def _mysql_doesnt_support_individual(element, compiler, **kw):
|
||||
raise NotImplementedError(
|
||||
"Individual alter column constructs not supported by MySQL"
|
||||
)
|
||||
|
||||
|
||||
@compiles(MySQLAlterDefault, "mysql")
|
||||
def _mysql_alter_default(element, compiler, **kw):
|
||||
return "%s ALTER COLUMN %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
"SET DEFAULT %s" % format_server_default(compiler, element.default)
|
||||
if element.default is not None
|
||||
else "DROP DEFAULT"
|
||||
)
|
||||
|
||||
@compiles(MySQLModifyColumn, "mysql")
|
||||
def _mysql_modify_column(element, compiler, **kw):
|
||||
return "%s MODIFY %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
_mysql_colspec(
|
||||
compiler,
|
||||
nullable=element.nullable,
|
||||
server_default=element.default,
|
||||
type_=element.type_,
|
||||
autoincrement=element.autoincrement
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@compiles(MySQLChangeColumn, "mysql")
|
||||
def _mysql_change_column(element, compiler, **kw):
|
||||
return "%s CHANGE %s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
format_column_name(compiler, element.newname),
|
||||
_mysql_colspec(
|
||||
compiler,
|
||||
nullable=element.nullable,
|
||||
server_default=element.default,
|
||||
type_=element.type_,
|
||||
autoincrement=element.autoincrement
|
||||
),
|
||||
)
|
||||
|
||||
def _render_value(compiler, expr):
|
||||
if isinstance(expr, string_types):
|
||||
return "'%s'" % expr
|
||||
else:
|
||||
return compiler.sql_compiler.process(expr)
|
||||
|
||||
def _mysql_colspec(compiler, nullable, server_default, type_,
|
||||
autoincrement):
|
||||
spec = "%s %s" % (
|
||||
compiler.dialect.type_compiler.process(type_),
|
||||
"NULL" if nullable else "NOT NULL"
|
||||
)
|
||||
if autoincrement:
|
||||
spec += " AUTO_INCREMENT"
|
||||
if server_default is not False and server_default is not None:
|
||||
spec += " DEFAULT %s" % _render_value(compiler, server_default)
|
||||
|
||||
return spec
|
||||
|
||||
@compiles(schema.DropConstraint, "mysql")
|
||||
def _mysql_drop_constraint(element, compiler, **kw):
|
||||
"""Redefine SQLAlchemy's drop constraint to
|
||||
raise errors for invalid constraint type."""
|
||||
|
||||
constraint = element.element
|
||||
if isinstance(constraint, (schema.ForeignKeyConstraint,
|
||||
schema.PrimaryKeyConstraint,
|
||||
schema.UniqueConstraint)
|
||||
):
|
||||
return compiler.visit_drop_constraint(element, **kw)
|
||||
elif isinstance(constraint, schema.CheckConstraint):
|
||||
raise NotImplementedError(
|
||||
"MySQL does not support CHECK constraints.")
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"No generic 'DROP CONSTRAINT' in MySQL - "
|
||||
"please specify constraint type")
|
||||
|
||||
77
Darwin/lib/python2.7/site-packages/alembic/ddl/oracle.py
Normal file
77
Darwin/lib/python2.7/site-packages/alembic/ddl/oracle.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from sqlalchemy.ext.compiler import compiles
|
||||
|
||||
from .impl import DefaultImpl
|
||||
from .base import alter_table, AddColumn, ColumnName, \
|
||||
format_column_name, ColumnNullable, \
|
||||
format_server_default,ColumnDefault, format_type, ColumnType
|
||||
|
||||
class OracleImpl(DefaultImpl):
|
||||
__dialect__ = 'oracle'
|
||||
transactional_ddl = True
|
||||
batch_separator = "/"
|
||||
command_terminator = ""
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(OracleImpl, self).__init__(*arg, **kw)
|
||||
self.batch_separator = self.context_opts.get(
|
||||
"oracle_batch_separator",
|
||||
self.batch_separator)
|
||||
|
||||
def _exec(self, construct, *args, **kw):
|
||||
super(OracleImpl, self)._exec(construct, *args, **kw)
|
||||
if self.as_sql and self.batch_separator:
|
||||
self.static_output(self.batch_separator)
|
||||
|
||||
def emit_begin(self):
|
||||
self._exec("SET TRANSACTION READ WRITE")
|
||||
|
||||
def emit_commit(self):
|
||||
self._exec("COMMIT")
|
||||
|
||||
@compiles(AddColumn, 'oracle')
|
||||
def visit_add_column(element, compiler, **kw):
|
||||
return "%s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
add_column(compiler, element.column, **kw),
|
||||
)
|
||||
|
||||
@compiles(ColumnNullable, 'oracle')
|
||||
def visit_column_nullable(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"NULL" if element.nullable else "NOT NULL"
|
||||
)
|
||||
|
||||
@compiles(ColumnType, 'oracle')
|
||||
def visit_column_type(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"%s" % format_type(compiler, element.type_)
|
||||
)
|
||||
|
||||
@compiles(ColumnName, 'oracle')
|
||||
def visit_column_name(element, compiler, **kw):
|
||||
return "%s RENAME COLUMN %s TO %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_column_name(compiler, element.column_name),
|
||||
format_column_name(compiler, element.newname)
|
||||
)
|
||||
|
||||
@compiles(ColumnDefault, 'oracle')
|
||||
def visit_column_default(element, compiler, **kw):
|
||||
return "%s %s %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
alter_column(compiler, element.column_name),
|
||||
"DEFAULT %s" %
|
||||
format_server_default(compiler, element.default)
|
||||
if element.default is not None
|
||||
else "DEFAULT NULL"
|
||||
)
|
||||
|
||||
def alter_column(compiler, name):
|
||||
return 'MODIFY %s' % format_column_name(compiler, name)
|
||||
|
||||
def add_column(compiler, column, **kw):
|
||||
return "ADD %s" % compiler.get_column_specification(column, **kw)
|
||||
43
Darwin/lib/python2.7/site-packages/alembic/ddl/postgresql.py
Normal file
43
Darwin/lib/python2.7/site-packages/alembic/ddl/postgresql.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import re
|
||||
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
from .base import compiles, alter_table, format_table_name, RenameTable
|
||||
from .impl import DefaultImpl
|
||||
|
||||
class PostgresqlImpl(DefaultImpl):
|
||||
__dialect__ = 'postgresql'
|
||||
transactional_ddl = True
|
||||
|
||||
def compare_server_default(self, inspector_column,
|
||||
metadata_column,
|
||||
rendered_metadata_default,
|
||||
rendered_inspector_default):
|
||||
|
||||
# don't do defaults for SERIAL columns
|
||||
if metadata_column.primary_key and \
|
||||
metadata_column is metadata_column.table._autoincrement_column:
|
||||
return False
|
||||
|
||||
conn_col_default = rendered_inspector_default
|
||||
|
||||
if None in (conn_col_default, rendered_metadata_default):
|
||||
return conn_col_default != rendered_metadata_default
|
||||
|
||||
if metadata_column.type._type_affinity is not sqltypes.String:
|
||||
rendered_metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default)
|
||||
|
||||
return not self.connection.scalar(
|
||||
"SELECT %s = %s" % (
|
||||
conn_col_default,
|
||||
rendered_metadata_default
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@compiles(RenameTable, "postgresql")
|
||||
def visit_rename_table(element, compiler, **kw):
|
||||
return "%s RENAME TO %s" % (
|
||||
alter_table(compiler, element.table_name, element.schema),
|
||||
format_table_name(compiler, element.new_table_name, None)
|
||||
)
|
||||
73
Darwin/lib/python2.7/site-packages/alembic/ddl/sqlite.py
Normal file
73
Darwin/lib/python2.7/site-packages/alembic/ddl/sqlite.py
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
from .. import util
|
||||
from .impl import DefaultImpl
|
||||
|
||||
#from sqlalchemy.ext.compiler import compiles
|
||||
#from .base import AddColumn, alter_table
|
||||
#from sqlalchemy.schema import AddConstraint
|
||||
|
||||
class SQLiteImpl(DefaultImpl):
|
||||
__dialect__ = 'sqlite'
|
||||
|
||||
transactional_ddl = False
|
||||
"""SQLite supports transactional DDL, but pysqlite does not:
|
||||
see: http://bugs.python.org/issue10740
|
||||
"""
|
||||
|
||||
def add_constraint(self, const):
|
||||
# attempt to distinguish between an
|
||||
# auto-gen constraint and an explicit one
|
||||
if const._create_rule is None:
|
||||
raise NotImplementedError(
|
||||
"No support for ALTER of constraints in SQLite dialect")
|
||||
elif const._create_rule(self):
|
||||
util.warn("Skipping unsupported ALTER for "
|
||||
"creation of implicit constraint")
|
||||
|
||||
|
||||
def drop_constraint(self, const):
|
||||
if const._create_rule is None:
|
||||
raise NotImplementedError(
|
||||
"No support for ALTER of constraints in SQLite dialect")
|
||||
|
||||
def correct_for_autogen_constraints(self, conn_unique_constraints, conn_indexes,
|
||||
metadata_unique_constraints,
|
||||
metadata_indexes):
|
||||
|
||||
def uq_sig(uq):
|
||||
return tuple(sorted(uq.columns.keys()))
|
||||
|
||||
conn_unique_sigs = set(
|
||||
uq_sig(uq)
|
||||
for uq in conn_unique_constraints
|
||||
)
|
||||
|
||||
for idx in list(metadata_unique_constraints):
|
||||
# SQLite backend can't report on unnamed UNIQUE constraints,
|
||||
# so remove these, unless we see an exact signature match
|
||||
if idx.name is None and uq_sig(idx) not in conn_unique_sigs:
|
||||
metadata_unique_constraints.remove(idx)
|
||||
|
||||
for idx in list(conn_unique_constraints):
|
||||
# just in case we fix the backend such that it does report
|
||||
# on them, blow them out of the reflected collection too otherwise
|
||||
# they will come up as removed. if the backend supports this now,
|
||||
# add a version check here for the dialect.
|
||||
if idx.name is None:
|
||||
conn_uniques.remove(idx)
|
||||
|
||||
#@compiles(AddColumn, 'sqlite')
|
||||
#def visit_add_column(element, compiler, **kw):
|
||||
# return "%s %s" % (
|
||||
# alter_table(compiler, element.table_name, element.schema),
|
||||
# add_column(compiler, element.column, **kw)
|
||||
# )
|
||||
|
||||
|
||||
#def add_column(compiler, column, **kw):
|
||||
# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
|
||||
# # need to modify SQLAlchemy so that the CHECK associated with a Boolean
|
||||
# # or Enum gets placed as part of the column constraints, not the Table
|
||||
# # see ticket 98
|
||||
# for const in column.constraints:
|
||||
# text += compiler.process(AddConstraint(const))
|
||||
# return text
|
||||
791
Darwin/lib/python2.7/site-packages/alembic/environment.py
Normal file
791
Darwin/lib/python2.7/site-packages/alembic/environment.py
Normal file
|
|
@ -0,0 +1,791 @@
|
|||
from .operations import Operations
|
||||
from .migration import MigrationContext
|
||||
from . import util
|
||||
|
||||
class EnvironmentContext(object):
|
||||
"""Represent the state made available to an ``env.py`` script.
|
||||
|
||||
:class:`.EnvironmentContext` is normally instantiated
|
||||
by the commands present in the :mod:`alembic.command`
|
||||
module. From within an ``env.py`` script, the current
|
||||
:class:`.EnvironmentContext` is available via the
|
||||
``alembic.context`` datamember.
|
||||
|
||||
:class:`.EnvironmentContext` is also a Python context
|
||||
manager, that is, is intended to be used using the
|
||||
``with:`` statement. A typical use of :class:`.EnvironmentContext`::
|
||||
|
||||
from alembic.config import Config
|
||||
from alembic.script import ScriptDirectory
|
||||
|
||||
config = Config()
|
||||
config.set_main_option("script_location", "myapp:migrations")
|
||||
script = ScriptDirectory.from_config(config)
|
||||
|
||||
def my_function(rev, context):
|
||||
'''do something with revision "rev", which
|
||||
will be the current database revision,
|
||||
and "context", which is the MigrationContext
|
||||
that the env.py will create'''
|
||||
|
||||
with EnvironmentContext(
|
||||
config,
|
||||
script,
|
||||
fn = my_function,
|
||||
as_sql = False,
|
||||
starting_rev = 'base',
|
||||
destination_rev = 'head',
|
||||
tag = "sometag"
|
||||
):
|
||||
script.run_env()
|
||||
|
||||
The above script will invoke the ``env.py`` script
|
||||
within the migration environment. If and when ``env.py``
|
||||
calls :meth:`.MigrationContext.run_migrations`, the
|
||||
``my_function()`` function above will be called
|
||||
by the :class:`.MigrationContext`, given the context
|
||||
itself as well as the current revision in the database.
|
||||
|
||||
.. note::
|
||||
|
||||
For most API usages other than full blown
|
||||
invocation of migration scripts, the :class:`.MigrationContext`
|
||||
and :class:`.ScriptDirectory` objects can be created and
|
||||
used directly. The :class:`.EnvironmentContext` object
|
||||
is *only* needed when you need to actually invoke the
|
||||
``env.py`` module present in the migration environment.
|
||||
|
||||
"""
|
||||
|
||||
_migration_context = None
|
||||
|
||||
config = None
|
||||
"""An instance of :class:`.Config` representing the
|
||||
configuration file contents as well as other variables
|
||||
set programmatically within it."""
|
||||
|
||||
script = None
|
||||
"""An instance of :class:`.ScriptDirectory` which provides
|
||||
programmatic access to version files within the ``versions/``
|
||||
directory.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, config, script, **kw):
|
||||
"""Construct a new :class:`.EnvironmentContext`.
|
||||
|
||||
:param config: a :class:`.Config` instance.
|
||||
:param script: a :class:`.ScriptDirectory` instance.
|
||||
:param \**kw: keyword options that will be ultimately
|
||||
passed along to the :class:`.MigrationContext` when
|
||||
:meth:`.EnvironmentContext.configure` is called.
|
||||
|
||||
"""
|
||||
self.config = config
|
||||
self.script = script
|
||||
self.context_opts = kw
|
||||
|
||||
def __enter__(self):
|
||||
"""Establish a context which provides a
|
||||
:class:`.EnvironmentContext` object to
|
||||
env.py scripts.
|
||||
|
||||
The :class:`.EnvironmentContext` will
|
||||
be made available as ``from alembic import context``.
|
||||
|
||||
"""
|
||||
from .context import _install_proxy
|
||||
_install_proxy(self)
|
||||
return self
|
||||
|
||||
def __exit__(self, *arg, **kw):
|
||||
from . import context, op
|
||||
context._remove_proxy()
|
||||
op._remove_proxy()
|
||||
|
||||
def is_offline_mode(self):
|
||||
"""Return True if the current migrations environment
|
||||
is running in "offline mode".
|
||||
|
||||
This is ``True`` or ``False`` depending
|
||||
on the the ``--sql`` flag passed.
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
"""
|
||||
return self.context_opts.get('as_sql', False)
|
||||
|
||||
def is_transactional_ddl(self):
|
||||
"""Return True if the context is configured to expect a
|
||||
transactional DDL capable backend.
|
||||
|
||||
This defaults to the type of database in use, and
|
||||
can be overridden by the ``transactional_ddl`` argument
|
||||
to :meth:`.configure`
|
||||
|
||||
This function requires that a :class:`.MigrationContext`
|
||||
has first been made available via :meth:`.configure`.
|
||||
|
||||
"""
|
||||
return self.get_context().impl.transactional_ddl
|
||||
|
||||
def requires_connection(self):
|
||||
return not self.is_offline_mode()
|
||||
|
||||
def get_head_revision(self):
|
||||
"""Return the hex identifier of the 'head' revision.
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
"""
|
||||
return self.script._as_rev_number("head")
|
||||
|
||||
def get_starting_revision_argument(self):
|
||||
"""Return the 'starting revision' argument,
|
||||
if the revision was passed using ``start:end``.
|
||||
|
||||
This is only meaningful in "offline" mode.
|
||||
Returns ``None`` if no value is available
|
||||
or was configured.
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
"""
|
||||
if self._migration_context is not None:
|
||||
return self.script._as_rev_number(
|
||||
self.get_context()._start_from_rev)
|
||||
elif 'starting_rev' in self.context_opts:
|
||||
return self.script._as_rev_number(
|
||||
self.context_opts['starting_rev'])
|
||||
else:
|
||||
raise util.CommandError(
|
||||
"No starting revision argument is available.")
|
||||
|
||||
def get_revision_argument(self):
|
||||
"""Get the 'destination' revision argument.
|
||||
|
||||
This is typically the argument passed to the
|
||||
``upgrade`` or ``downgrade`` command.
|
||||
|
||||
If it was specified as ``head``, the actual
|
||||
version number is returned; if specified
|
||||
as ``base``, ``None`` is returned.
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
"""
|
||||
return self.script._as_rev_number(
|
||||
self.context_opts['destination_rev'])
|
||||
|
||||
def get_tag_argument(self):
|
||||
"""Return the value passed for the ``--tag`` argument, if any.
|
||||
|
||||
The ``--tag`` argument is not used directly by Alembic,
|
||||
but is available for custom ``env.py`` configurations that
|
||||
wish to use it; particularly for offline generation scripts
|
||||
that wish to generate tagged filenames.
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.EnvironmentContext.get_x_argument` - a newer and more
|
||||
open ended system of extending ``env.py`` scripts via the command
|
||||
line.
|
||||
|
||||
"""
|
||||
return self.context_opts.get('tag', None)
|
||||
|
||||
def get_x_argument(self, as_dictionary=False):
|
||||
"""Return the value(s) passed for the ``-x`` argument, if any.
|
||||
|
||||
The ``-x`` argument is an open ended flag that allows any user-defined
|
||||
value or values to be passed on the command line, then available
|
||||
here for consumption by a custom ``env.py`` script.
|
||||
|
||||
The return value is a list, returned directly from the ``argparse``
|
||||
structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
|
||||
are parsed using ``key=value`` format into a dictionary that is
|
||||
then returned.
|
||||
|
||||
For example, to support passing a database URL on the command line,
|
||||
the standard ``env.py`` script can be modified like this::
|
||||
|
||||
cmd_line_url = context.get_x_argument(as_dictionary=True).get('dbname')
|
||||
if cmd_line_url:
|
||||
engine = create_engine(cmd_line_url)
|
||||
else:
|
||||
engine = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
This then takes effect by running the ``alembic`` script as::
|
||||
|
||||
alembic -x dbname=postgresql://user:pass@host/dbname upgrade head
|
||||
|
||||
This function does not require that the :class:`.MigrationContext`
|
||||
has been configured.
|
||||
|
||||
.. versionadded:: 0.6.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.EnvironmentContext.get_tag_argument`
|
||||
|
||||
:attr:`.Config.cmd_opts`
|
||||
|
||||
"""
|
||||
if self.config.cmd_opts is not None:
|
||||
value = self.config.cmd_opts.x or []
|
||||
else:
|
||||
value = []
|
||||
if as_dictionary:
|
||||
value = dict(
|
||||
arg.split('=', 1) for arg in value
|
||||
)
|
||||
return value
|
||||
|
||||
def configure(self,
|
||||
connection=None,
|
||||
url=None,
|
||||
dialect_name=None,
|
||||
transactional_ddl=None,
|
||||
transaction_per_migration=False,
|
||||
output_buffer=None,
|
||||
starting_rev=None,
|
||||
tag=None,
|
||||
template_args=None,
|
||||
target_metadata=None,
|
||||
include_symbol=None,
|
||||
include_object=None,
|
||||
include_schemas=False,
|
||||
compare_type=False,
|
||||
compare_server_default=False,
|
||||
render_item=None,
|
||||
upgrade_token="upgrades",
|
||||
downgrade_token="downgrades",
|
||||
alembic_module_prefix="op.",
|
||||
sqlalchemy_module_prefix="sa.",
|
||||
user_module_prefix=None,
|
||||
**kw
|
||||
):
|
||||
"""Configure a :class:`.MigrationContext` within this
|
||||
:class:`.EnvironmentContext` which will provide database
|
||||
connectivity and other configuration to a series of
|
||||
migration scripts.
|
||||
|
||||
Many methods on :class:`.EnvironmentContext` require that
|
||||
this method has been called in order to function, as they
|
||||
ultimately need to have database access or at least access
|
||||
to the dialect in use. Those which do are documented as such.
|
||||
|
||||
The important thing needed by :meth:`.configure` is a
|
||||
means to determine what kind of database dialect is in use.
|
||||
An actual connection to that database is needed only if
|
||||
the :class:`.MigrationContext` is to be used in
|
||||
"online" mode.
|
||||
|
||||
If the :meth:`.is_offline_mode` function returns ``True``,
|
||||
then no connection is needed here. Otherwise, the
|
||||
``connection`` parameter should be present as an
|
||||
instance of :class:`sqlalchemy.engine.Connection`.
|
||||
|
||||
This function is typically called from the ``env.py``
|
||||
script within a migration environment. It can be called
|
||||
multiple times for an invocation. The most recent
|
||||
:class:`~sqlalchemy.engine.Connection`
|
||||
for which it was called is the one that will be operated upon
|
||||
by the next call to :meth:`.run_migrations`.
|
||||
|
||||
General parameters:
|
||||
|
||||
:param connection: a :class:`~sqlalchemy.engine.Connection`
|
||||
to use
|
||||
for SQL execution in "online" mode. When present, is also
|
||||
used to determine the type of dialect in use.
|
||||
:param url: a string database url, or a
|
||||
:class:`sqlalchemy.engine.url.URL` object.
|
||||
The type of dialect to be used will be derived from this if
|
||||
``connection`` is not passed.
|
||||
:param dialect_name: string name of a dialect, such as
|
||||
"postgresql", "mssql", etc.
|
||||
The type of dialect to be used will be derived from this if
|
||||
``connection`` and ``url`` are not passed.
|
||||
:param transactional_ddl: Force the usage of "transactional"
|
||||
DDL on or off;
|
||||
this otherwise defaults to whether or not the dialect in
|
||||
use supports it.
|
||||
:param transaction_per_migration: if True, nest each migration script
|
||||
in a transaction rather than the full series of migrations to
|
||||
run.
|
||||
|
||||
.. versionadded:: 0.6.5
|
||||
|
||||
:param output_buffer: a file-like object that will be used
|
||||
for textual output
|
||||
when the ``--sql`` option is used to generate SQL scripts.
|
||||
Defaults to
|
||||
``sys.stdout`` if not passed here and also not present on
|
||||
the :class:`.Config`
|
||||
object. The value here overrides that of the :class:`.Config`
|
||||
object.
|
||||
:param output_encoding: when using ``--sql`` to generate SQL
|
||||
scripts, apply this encoding to the string output.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param starting_rev: Override the "starting revision" argument
|
||||
when using ``--sql`` mode.
|
||||
:param tag: a string tag for usage by custom ``env.py`` scripts.
|
||||
Set via the ``--tag`` option, can be overridden here.
|
||||
:param template_args: dictionary of template arguments which
|
||||
will be added to the template argument environment when
|
||||
running the "revision" command. Note that the script environment
|
||||
is only run within the "revision" command if the --autogenerate
|
||||
option is used, or if the option "revision_environment=true"
|
||||
is present in the alembic.ini file.
|
||||
|
||||
.. versionadded:: 0.3.3
|
||||
|
||||
:param version_table: The name of the Alembic version table.
|
||||
The default is ``'alembic_version'``.
|
||||
:param version_table_schema: Optional schema to place version
|
||||
table within.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
Parameters specific to the autogenerate feature, when
|
||||
``alembic revision`` is run with the ``--autogenerate`` feature:
|
||||
|
||||
:param target_metadata: a :class:`sqlalchemy.schema.MetaData`
|
||||
object that
|
||||
will be consulted during autogeneration. The tables present
|
||||
will be compared against
|
||||
what is locally available on the target
|
||||
:class:`~sqlalchemy.engine.Connection`
|
||||
to produce candidate upgrade/downgrade operations.
|
||||
|
||||
:param compare_type: Indicates type comparison behavior during
|
||||
an autogenerate
|
||||
operation. Defaults to ``False`` which disables type
|
||||
comparison. Set to
|
||||
``True`` to turn on default type comparison, which has varied
|
||||
accuracy depending on backend.
|
||||
|
||||
To customize type comparison behavior, a callable may be
|
||||
specified which
|
||||
can filter type comparisons during an autogenerate operation.
|
||||
The format of this callable is::
|
||||
|
||||
def my_compare_type(context, inspected_column,
|
||||
metadata_column, inspected_type, metadata_type):
|
||||
# return True if the types are different,
|
||||
# False if not, or None to allow the default implementation
|
||||
# to compare these types
|
||||
return None
|
||||
|
||||
context.configure(
|
||||
# ...
|
||||
compare_type = my_compare_type
|
||||
)
|
||||
|
||||
|
||||
``inspected_column`` is a :class:`sqlalchemy.schema.Column` as returned by
|
||||
:meth:`sqlalchemy.engine.reflection.Inspector.reflecttable`, whereas
|
||||
``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
|
||||
the local model environment.
|
||||
|
||||
A return value of ``None`` indicates to allow default type
|
||||
comparison to proceed.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.compare_server_default`
|
||||
|
||||
:param compare_server_default: Indicates server default comparison
|
||||
behavior during
|
||||
an autogenerate operation. Defaults to ``False`` which disables
|
||||
server default
|
||||
comparison. Set to ``True`` to turn on server default comparison,
|
||||
which has
|
||||
varied accuracy depending on backend.
|
||||
|
||||
To customize server default comparison behavior, a callable may
|
||||
be specified
|
||||
which can filter server default comparisons during an
|
||||
autogenerate operation.
|
||||
defaults during an autogenerate operation. The format of this
|
||||
callable is::
|
||||
|
||||
def my_compare_server_default(context, inspected_column,
|
||||
metadata_column, inspected_default, metadata_default,
|
||||
rendered_metadata_default):
|
||||
# return True if the defaults are different,
|
||||
# False if not, or None to allow the default implementation
|
||||
# to compare these defaults
|
||||
return None
|
||||
|
||||
context.configure(
|
||||
# ...
|
||||
compare_server_default = my_compare_server_default
|
||||
)
|
||||
|
||||
``inspected_column`` is a dictionary structure as returned by
|
||||
:meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
|
||||
``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
|
||||
the local model environment.
|
||||
|
||||
A return value of ``None`` indicates to allow default server default
|
||||
comparison
|
||||
to proceed. Note that some backends such as Postgresql actually
|
||||
execute
|
||||
the two defaults on the database side to compare for equivalence.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.compare_type`
|
||||
|
||||
:param include_object: A callable function which is given
|
||||
the chance to return ``True`` or ``False`` for any object,
|
||||
indicating if the given object should be considered in the
|
||||
autogenerate sweep.
|
||||
|
||||
The function accepts the following positional arguments:
|
||||
|
||||
* ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such as a
|
||||
:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.Column`
|
||||
object
|
||||
* ``name``: the name of the object. This is typically available
|
||||
via ``object.name``.
|
||||
* ``type``: a string describing the type of object; currently
|
||||
``"table"`` or ``"column"``
|
||||
* ``reflected``: ``True`` if the given object was produced based on
|
||||
table reflection, ``False`` if it's from a local :class:`.MetaData`
|
||||
object.
|
||||
* ``compare_to``: the object being compared against, if available,
|
||||
else ``None``.
|
||||
|
||||
E.g.::
|
||||
|
||||
def include_object(object, name, type_, reflected, compare_to):
|
||||
if (type_ == "column" and
|
||||
not reflected and
|
||||
object.info.get("skip_autogenerate", False)):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
context.configure(
|
||||
# ...
|
||||
include_object = include_object
|
||||
)
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.include_object` can also
|
||||
be used to filter on specific schemas to include or omit, when
|
||||
the :paramref:`.EnvironmentContext.configure.include_schemas`
|
||||
flag is set to ``True``. The :attr:`.Table.schema` attribute
|
||||
on each :class:`.Table` object reflected will indicate the name of the
|
||||
schema from which the :class:`.Table` originates.
|
||||
|
||||
.. versionadded:: 0.6.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.include_schemas`
|
||||
|
||||
:param include_symbol: A callable function which, given a table name
|
||||
and schema name (may be ``None``), returns ``True`` or ``False``, indicating
|
||||
if the given table should be considered in the autogenerate sweep.
|
||||
|
||||
.. deprecated:: 0.6.0 :paramref:`.EnvironmentContext.configure.include_symbol`
|
||||
is superceded by the more generic
|
||||
:paramref:`.EnvironmentContext.configure.include_object`
|
||||
parameter.
|
||||
|
||||
E.g.::
|
||||
|
||||
def include_symbol(tablename, schema):
|
||||
return tablename not in ("skip_table_one", "skip_table_two")
|
||||
|
||||
context.configure(
|
||||
# ...
|
||||
include_symbol = include_symbol
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.include_schemas`
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.include_object`
|
||||
|
||||
:param include_schemas: If True, autogenerate will scan across
|
||||
all schemas located by the SQLAlchemy
|
||||
:meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
|
||||
method, and include all differences in tables found across all
|
||||
those schemas. When using this option, you may want to also
|
||||
use the :paramref:`.EnvironmentContext.configure.include_object`
|
||||
option to specify a callable which
|
||||
can filter the tables/schemas that get included.
|
||||
|
||||
.. versionadded :: 0.4.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.EnvironmentContext.configure.include_object`
|
||||
|
||||
:param render_item: Callable that can be used to override how
|
||||
any schema item, i.e. column, constraint, type,
|
||||
etc., is rendered for autogenerate. The callable receives a
|
||||
string describing the type of object, the object, and
|
||||
the autogen context. If it returns False, the
|
||||
default rendering method will be used. If it returns None,
|
||||
the item will not be rendered in the context of a Table
|
||||
construct, that is, can be used to skip columns or constraints
|
||||
within op.create_table()::
|
||||
|
||||
def my_render_column(type_, col, autogen_context):
|
||||
if type_ == "column" and isinstance(col, MySpecialCol):
|
||||
return repr(col)
|
||||
else:
|
||||
return False
|
||||
|
||||
context.configure(
|
||||
# ...
|
||||
render_item = my_render_column
|
||||
)
|
||||
|
||||
Available values for the type string include: ``"column"``,
|
||||
``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``,
|
||||
``"type"``, ``"server_default"``.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`autogen_render_types`
|
||||
|
||||
:param upgrade_token: When autogenerate completes, the text of the
|
||||
candidate upgrade operations will be present in this template
|
||||
variable when ``script.py.mako`` is rendered. Defaults to
|
||||
``upgrades``.
|
||||
:param downgrade_token: When autogenerate completes, the text of the
|
||||
candidate downgrade operations will be present in this
|
||||
template variable when ``script.py.mako`` is rendered. Defaults to
|
||||
``downgrades``.
|
||||
|
||||
:param alembic_module_prefix: When autogenerate refers to Alembic
|
||||
:mod:`alembic.operations` constructs, this prefix will be used
|
||||
(i.e. ``op.create_table``) Defaults to "``op.``".
|
||||
Can be ``None`` to indicate no prefix.
|
||||
|
||||
:param sqlalchemy_module_prefix: When autogenerate refers to
|
||||
SQLAlchemy
|
||||
:class:`~sqlalchemy.schema.Column` or type classes, this prefix
|
||||
will be used
|
||||
(i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
|
||||
Can be ``None`` to indicate no prefix.
|
||||
Note that when dialect-specific types are rendered, autogenerate
|
||||
will render them using the dialect module name, i.e. ``mssql.BIT()``,
|
||||
``postgresql.UUID()``.
|
||||
|
||||
:param user_module_prefix: When autogenerate refers to a SQLAlchemy
|
||||
type (e.g. :class:`.TypeEngine`) where the module name is not
|
||||
under the ``sqlalchemy`` namespace, this prefix will be used
|
||||
within autogenerate, if non-``None``; if left at its default of
|
||||
``None``, the
|
||||
:paramref:`.EnvironmentContext.configure.sqlalchemy_module_prefix`
|
||||
is used instead.
|
||||
|
||||
.. versionadded:: 0.6.3 added
|
||||
:paramref:`.EnvironmentContext.configure.user_module_prefix`
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`autogen_module_prefix`
|
||||
|
||||
Parameters specific to individual backends:
|
||||
|
||||
:param mssql_batch_separator: The "batch separator" which will
|
||||
be placed between each statement when generating offline SQL Server
|
||||
migrations. Defaults to ``GO``. Note this is in addition to the
|
||||
customary semicolon ``;`` at the end of each statement; SQL Server
|
||||
considers the "batch separator" to denote the end of an
|
||||
individual statement execution, and cannot group certain
|
||||
dependent operations in one step.
|
||||
:param oracle_batch_separator: The "batch separator" which will
|
||||
be placed between each statement when generating offline
|
||||
Oracle migrations. Defaults to ``/``. Oracle doesn't add a
|
||||
semicolon between statements like most other backends.
|
||||
|
||||
"""
|
||||
opts = self.context_opts
|
||||
if transactional_ddl is not None:
|
||||
opts["transactional_ddl"] = transactional_ddl
|
||||
if output_buffer is not None:
|
||||
opts["output_buffer"] = output_buffer
|
||||
elif self.config.output_buffer is not None:
|
||||
opts["output_buffer"] = self.config.output_buffer
|
||||
if starting_rev:
|
||||
opts['starting_rev'] = starting_rev
|
||||
if tag:
|
||||
opts['tag'] = tag
|
||||
if template_args and 'template_args' in opts:
|
||||
opts['template_args'].update(template_args)
|
||||
opts["transaction_per_migration"] = transaction_per_migration
|
||||
opts['target_metadata'] = target_metadata
|
||||
opts['include_symbol'] = include_symbol
|
||||
opts['include_object'] = include_object
|
||||
opts['include_schemas'] = include_schemas
|
||||
opts['upgrade_token'] = upgrade_token
|
||||
opts['downgrade_token'] = downgrade_token
|
||||
opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix
|
||||
opts['alembic_module_prefix'] = alembic_module_prefix
|
||||
opts['user_module_prefix'] = user_module_prefix
|
||||
if render_item is not None:
|
||||
opts['render_item'] = render_item
|
||||
if compare_type is not None:
|
||||
opts['compare_type'] = compare_type
|
||||
if compare_server_default is not None:
|
||||
opts['compare_server_default'] = compare_server_default
|
||||
opts['script'] = self.script
|
||||
|
||||
opts.update(kw)
|
||||
|
||||
self._migration_context = MigrationContext.configure(
|
||||
connection=connection,
|
||||
url=url,
|
||||
dialect_name=dialect_name,
|
||||
opts=opts
|
||||
)
|
||||
|
||||
def run_migrations(self, **kw):
|
||||
"""Run migrations as determined by the current command line
|
||||
configuration
|
||||
as well as versioning information present (or not) in the current
|
||||
database connection (if one is present).
|
||||
|
||||
The function accepts optional ``**kw`` arguments. If these are
|
||||
passed, they are sent directly to the ``upgrade()`` and
|
||||
``downgrade()``
|
||||
functions within each target revision file. By modifying the
|
||||
``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
|
||||
functions accept arguments, parameters can be passed here so that
|
||||
contextual information, usually information to identify a particular
|
||||
database in use, can be passed from a custom ``env.py`` script
|
||||
to the migration functions.
|
||||
|
||||
This function requires that a :class:`.MigrationContext` has
|
||||
first been made available via :meth:`.configure`.
|
||||
|
||||
"""
|
||||
with Operations.context(self._migration_context):
|
||||
self.get_context().run_migrations(**kw)
|
||||
|
||||
def execute(self, sql, execution_options=None):
|
||||
"""Execute the given SQL using the current change context.
|
||||
|
||||
The behavior of :meth:`.execute` is the same
|
||||
as that of :meth:`.Operations.execute`. Please see that
|
||||
function's documentation for full detail including
|
||||
caveats and limitations.
|
||||
|
||||
This function requires that a :class:`.MigrationContext` has
|
||||
first been made available via :meth:`.configure`.
|
||||
|
||||
"""
|
||||
self.get_context().execute(sql,
|
||||
execution_options=execution_options)
|
||||
|
||||
def static_output(self, text):
|
||||
"""Emit text directly to the "offline" SQL stream.
|
||||
|
||||
Typically this is for emitting comments that
|
||||
start with --. The statement is not treated
|
||||
as a SQL execution, no ; or batch separator
|
||||
is added, etc.
|
||||
|
||||
"""
|
||||
self.get_context().impl.static_output(text)
|
||||
|
||||
|
||||
def begin_transaction(self):
|
||||
"""Return a context manager that will
|
||||
enclose an operation within a "transaction",
|
||||
as defined by the environment's offline
|
||||
and transactional DDL settings.
|
||||
|
||||
e.g.::
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
:meth:`.begin_transaction` is intended to
|
||||
"do the right thing" regardless of
|
||||
calling context:
|
||||
|
||||
* If :meth:`.is_transactional_ddl` is ``False``,
|
||||
returns a "do nothing" context manager
|
||||
which otherwise produces no transactional
|
||||
state or directives.
|
||||
* If :meth:`.is_offline_mode` is ``True``,
|
||||
returns a context manager that will
|
||||
invoke the :meth:`.DefaultImpl.emit_begin`
|
||||
and :meth:`.DefaultImpl.emit_commit`
|
||||
methods, which will produce the string
|
||||
directives ``BEGIN`` and ``COMMIT`` on
|
||||
the output stream, as rendered by the
|
||||
target backend (e.g. SQL Server would
|
||||
emit ``BEGIN TRANSACTION``).
|
||||
* Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin`
|
||||
on the current online connection, which
|
||||
returns a :class:`sqlalchemy.engine.Transaction`
|
||||
object. This object demarcates a real
|
||||
transaction and is itself a context manager,
|
||||
which will roll back if an exception
|
||||
is raised.
|
||||
|
||||
Note that a custom ``env.py`` script which
|
||||
has more specific transactional needs can of course
|
||||
manipulate the :class:`~sqlalchemy.engine.Connection`
|
||||
directly to produce transactional state in "online"
|
||||
mode.
|
||||
|
||||
"""
|
||||
|
||||
return self.get_context().begin_transaction()
|
||||
|
||||
|
||||
def get_context(self):
|
||||
"""Return the current :class:`.MigrationContext` object.
|
||||
|
||||
If :meth:`.EnvironmentContext.configure` has not been
|
||||
called yet, raises an exception.
|
||||
|
||||
"""
|
||||
|
||||
if self._migration_context is None:
|
||||
raise Exception("No context has been configured yet.")
|
||||
return self._migration_context
|
||||
|
||||
def get_bind(self):
|
||||
"""Return the current 'bind'.
|
||||
|
||||
In "online" mode, this is the
|
||||
:class:`sqlalchemy.engine.Connection` currently being used
|
||||
to emit SQL to the database.
|
||||
|
||||
This function requires that a :class:`.MigrationContext`
|
||||
has first been made available via :meth:`.configure`.
|
||||
|
||||
"""
|
||||
return self.get_context().bind
|
||||
|
||||
def get_impl(self):
|
||||
return self.get_context().impl
|
||||
|
||||
352
Darwin/lib/python2.7/site-packages/alembic/migration.py
Normal file
352
Darwin/lib/python2.7/site-packages/alembic/migration.py
Normal file
|
|
@ -0,0 +1,352 @@
|
|||
import io
|
||||
import logging
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
from sqlalchemy import MetaData, Table, Column, String, literal_column
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.engine import url as sqla_url
|
||||
|
||||
from .compat import callable, EncodedIO
|
||||
from . import ddl, util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class MigrationContext(object):
|
||||
"""Represent the database state made available to a migration
|
||||
script.
|
||||
|
||||
:class:`.MigrationContext` is the front end to an actual
|
||||
database connection, or alternatively a string output
|
||||
stream given a particular database dialect,
|
||||
from an Alembic perspective.
|
||||
|
||||
When inside the ``env.py`` script, the :class:`.MigrationContext`
|
||||
is available via the
|
||||
:meth:`.EnvironmentContext.get_context` method,
|
||||
which is available at ``alembic.context``::
|
||||
|
||||
# from within env.py script
|
||||
from alembic import context
|
||||
migration_context = context.get_context()
|
||||
|
||||
For usage outside of an ``env.py`` script, such as for
|
||||
utility routines that want to check the current version
|
||||
in the database, the :meth:`.MigrationContext.configure`
|
||||
method to create new :class:`.MigrationContext` objects.
|
||||
For example, to get at the current revision in the
|
||||
database using :meth:`.MigrationContext.get_current_revision`::
|
||||
|
||||
# in any application, outside of an env.py script
|
||||
from alembic.migration import MigrationContext
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
engine = create_engine("postgresql://mydatabase")
|
||||
conn = engine.connect()
|
||||
|
||||
context = MigrationContext.configure(conn)
|
||||
current_rev = context.get_current_revision()
|
||||
|
||||
The above context can also be used to produce
|
||||
Alembic migration operations with an :class:`.Operations`
|
||||
instance::
|
||||
|
||||
# in any application, outside of the normal Alembic environment
|
||||
from alembic.operations import Operations
|
||||
op = Operations(context)
|
||||
op.alter_column("mytable", "somecolumn", nullable=True)
|
||||
|
||||
"""
|
||||
def __init__(self, dialect, connection, opts):
|
||||
self.opts = opts
|
||||
self.dialect = dialect
|
||||
self.script = opts.get('script')
|
||||
|
||||
as_sql = opts.get('as_sql', False)
|
||||
transactional_ddl = opts.get("transactional_ddl")
|
||||
|
||||
self._transaction_per_migration = opts.get(
|
||||
"transaction_per_migration", False)
|
||||
|
||||
if as_sql:
|
||||
self.connection = self._stdout_connection(connection)
|
||||
assert self.connection is not None
|
||||
else:
|
||||
self.connection = connection
|
||||
self._migrations_fn = opts.get('fn')
|
||||
self.as_sql = as_sql
|
||||
|
||||
if "output_encoding" in opts:
|
||||
self.output_buffer = EncodedIO(
|
||||
opts.get("output_buffer") or sys.stdout,
|
||||
opts['output_encoding']
|
||||
)
|
||||
else:
|
||||
self.output_buffer = opts.get("output_buffer", sys.stdout)
|
||||
|
||||
self._user_compare_type = opts.get('compare_type', False)
|
||||
self._user_compare_server_default = opts.get(
|
||||
'compare_server_default',
|
||||
False)
|
||||
version_table = opts.get('version_table', 'alembic_version')
|
||||
version_table_schema = opts.get('version_table_schema', None)
|
||||
self._version = Table(
|
||||
version_table, MetaData(),
|
||||
Column('version_num', String(32), nullable=False),
|
||||
schema=version_table_schema)
|
||||
|
||||
self._start_from_rev = opts.get("starting_rev")
|
||||
self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
|
||||
dialect, self.connection, self.as_sql,
|
||||
transactional_ddl,
|
||||
self.output_buffer,
|
||||
opts
|
||||
)
|
||||
log.info("Context impl %s.", self.impl.__class__.__name__)
|
||||
if self.as_sql:
|
||||
log.info("Generating static SQL")
|
||||
log.info("Will assume %s DDL.",
|
||||
"transactional" if self.impl.transactional_ddl
|
||||
else "non-transactional")
|
||||
|
||||
@classmethod
|
||||
def configure(cls,
|
||||
connection=None,
|
||||
url=None,
|
||||
dialect_name=None,
|
||||
opts={},
|
||||
):
|
||||
"""Create a new :class:`.MigrationContext`.
|
||||
|
||||
This is a factory method usually called
|
||||
by :meth:`.EnvironmentContext.configure`.
|
||||
|
||||
:param connection: a :class:`~sqlalchemy.engine.Connection`
|
||||
to use for SQL execution in "online" mode. When present,
|
||||
is also used to determine the type of dialect in use.
|
||||
:param url: a string database url, or a
|
||||
:class:`sqlalchemy.engine.url.URL` object.
|
||||
The type of dialect to be used will be derived from this if
|
||||
``connection`` is not passed.
|
||||
:param dialect_name: string name of a dialect, such as
|
||||
"postgresql", "mssql", etc. The type of dialect to be used will be
|
||||
derived from this if ``connection`` and ``url`` are not passed.
|
||||
:param opts: dictionary of options. Most other options
|
||||
accepted by :meth:`.EnvironmentContext.configure` are passed via
|
||||
this dictionary.
|
||||
|
||||
"""
|
||||
if connection:
|
||||
dialect = connection.dialect
|
||||
elif url:
|
||||
url = sqla_url.make_url(url)
|
||||
dialect = url.get_dialect()()
|
||||
elif dialect_name:
|
||||
url = sqla_url.make_url("%s://" % dialect_name)
|
||||
dialect = url.get_dialect()()
|
||||
else:
|
||||
raise Exception("Connection, url, or dialect_name is required.")
|
||||
|
||||
return MigrationContext(dialect, connection, opts)
|
||||
|
||||
|
||||
def begin_transaction(self, _per_migration=False):
|
||||
transaction_now = _per_migration == self._transaction_per_migration
|
||||
|
||||
if not transaction_now:
|
||||
@contextmanager
|
||||
def do_nothing():
|
||||
yield
|
||||
return do_nothing()
|
||||
|
||||
elif not self.impl.transactional_ddl:
|
||||
@contextmanager
|
||||
def do_nothing():
|
||||
yield
|
||||
return do_nothing()
|
||||
elif self.as_sql:
|
||||
@contextmanager
|
||||
def begin_commit():
|
||||
self.impl.emit_begin()
|
||||
yield
|
||||
self.impl.emit_commit()
|
||||
return begin_commit()
|
||||
else:
|
||||
return self.bind.begin()
|
||||
|
||||
def get_current_revision(self):
|
||||
"""Return the current revision, usually that which is present
|
||||
in the ``alembic_version`` table in the database.
|
||||
|
||||
If this :class:`.MigrationContext` was configured in "offline"
|
||||
mode, that is with ``as_sql=True``, the ``starting_rev``
|
||||
parameter is returned instead, if any.
|
||||
|
||||
"""
|
||||
if self.as_sql:
|
||||
return self._start_from_rev
|
||||
else:
|
||||
if self._start_from_rev:
|
||||
raise util.CommandError(
|
||||
"Can't specify current_rev to context "
|
||||
"when using a database connection")
|
||||
self._version.create(self.connection, checkfirst=True)
|
||||
return self.connection.scalar(self._version.select())
|
||||
|
||||
_current_rev = get_current_revision
|
||||
"""The 0.2 method name, for backwards compat."""
|
||||
|
||||
def _update_current_rev(self, old, new):
|
||||
if old == new:
|
||||
return
|
||||
if new is None:
|
||||
self.impl._exec(self._version.delete())
|
||||
elif old is None:
|
||||
self.impl._exec(self._version.insert().
|
||||
values(version_num=literal_column("'%s'" % new))
|
||||
)
|
||||
else:
|
||||
self.impl._exec(self._version.update().
|
||||
values(version_num=literal_column("'%s'" % new))
|
||||
)
|
||||
|
||||
def run_migrations(self, **kw):
|
||||
"""Run the migration scripts established for this :class:`.MigrationContext`,
|
||||
if any.
|
||||
|
||||
The commands in :mod:`alembic.command` will set up a function
|
||||
that is ultimately passed to the :class:`.MigrationContext`
|
||||
as the ``fn`` argument. This function represents the "work"
|
||||
that will be done when :meth:`.MigrationContext.run_migrations`
|
||||
is called, typically from within the ``env.py`` script of the
|
||||
migration environment. The "work function" then provides an iterable
|
||||
of version callables and other version information which
|
||||
in the case of the ``upgrade`` or ``downgrade`` commands are the
|
||||
list of version scripts to invoke. Other commands yield nothing,
|
||||
in the case that a command wants to run some other operation
|
||||
against the database such as the ``current`` or ``stamp`` commands.
|
||||
|
||||
:param \**kw: keyword arguments here will be passed to each
|
||||
migration callable, that is the ``upgrade()`` or ``downgrade()``
|
||||
method within revision scripts.
|
||||
|
||||
"""
|
||||
current_rev = rev = False
|
||||
stamp_per_migration = not self.impl.transactional_ddl or \
|
||||
self._transaction_per_migration
|
||||
|
||||
self.impl.start_migrations()
|
||||
for change, prev_rev, rev, doc in self._migrations_fn(
|
||||
self.get_current_revision(),
|
||||
self):
|
||||
with self.begin_transaction(_per_migration=True):
|
||||
if current_rev is False:
|
||||
current_rev = prev_rev
|
||||
if self.as_sql and not current_rev:
|
||||
self._version.create(self.connection)
|
||||
if doc:
|
||||
log.info("Running %s %s -> %s, %s", change.__name__, prev_rev,
|
||||
rev, doc)
|
||||
else:
|
||||
log.info("Running %s %s -> %s", change.__name__, prev_rev, rev)
|
||||
if self.as_sql:
|
||||
self.impl.static_output(
|
||||
"-- Running %s %s -> %s" %
|
||||
(change.__name__, prev_rev, rev)
|
||||
)
|
||||
change(**kw)
|
||||
if stamp_per_migration:
|
||||
self._update_current_rev(prev_rev, rev)
|
||||
prev_rev = rev
|
||||
|
||||
if rev is not False:
|
||||
if not stamp_per_migration:
|
||||
self._update_current_rev(current_rev, rev)
|
||||
|
||||
if self.as_sql and not rev:
|
||||
self._version.drop(self.connection)
|
||||
|
||||
def execute(self, sql, execution_options=None):
|
||||
"""Execute a SQL construct or string statement.
|
||||
|
||||
The underlying execution mechanics are used, that is
|
||||
if this is "offline mode" the SQL is written to the
|
||||
output buffer, otherwise the SQL is emitted on
|
||||
the current SQLAlchemy connection.
|
||||
|
||||
"""
|
||||
self.impl._exec(sql, execution_options)
|
||||
|
||||
def _stdout_connection(self, connection):
|
||||
def dump(construct, *multiparams, **params):
|
||||
self.impl._exec(construct)
|
||||
|
||||
return create_engine("%s://" % self.dialect.name,
|
||||
strategy="mock", executor=dump)
|
||||
|
||||
@property
|
||||
def bind(self):
|
||||
"""Return the current "bind".
|
||||
|
||||
In online mode, this is an instance of
|
||||
:class:`sqlalchemy.engine.Connection`, and is suitable
|
||||
for ad-hoc execution of any kind of usage described
|
||||
in :ref:`sqlexpression_toplevel` as well as
|
||||
for usage with the :meth:`sqlalchemy.schema.Table.create`
|
||||
and :meth:`sqlalchemy.schema.MetaData.create_all` methods
|
||||
of :class:`~sqlalchemy.schema.Table`, :class:`~sqlalchemy.schema.MetaData`.
|
||||
|
||||
Note that when "standard output" mode is enabled,
|
||||
this bind will be a "mock" connection handler that cannot
|
||||
return results and is only appropriate for a very limited
|
||||
subset of commands.
|
||||
|
||||
"""
|
||||
return self.connection
|
||||
|
||||
def _compare_type(self, inspector_column, metadata_column):
|
||||
if self._user_compare_type is False:
|
||||
return False
|
||||
|
||||
if callable(self._user_compare_type):
|
||||
user_value = self._user_compare_type(
|
||||
self,
|
||||
inspector_column,
|
||||
metadata_column,
|
||||
inspector_column.type,
|
||||
metadata_column.type
|
||||
)
|
||||
if user_value is not None:
|
||||
return user_value
|
||||
|
||||
return self.impl.compare_type(
|
||||
inspector_column,
|
||||
metadata_column)
|
||||
|
||||
def _compare_server_default(self, inspector_column,
|
||||
metadata_column,
|
||||
rendered_metadata_default,
|
||||
rendered_column_default):
|
||||
|
||||
if self._user_compare_server_default is False:
|
||||
return False
|
||||
|
||||
if callable(self._user_compare_server_default):
|
||||
user_value = self._user_compare_server_default(
|
||||
self,
|
||||
inspector_column,
|
||||
metadata_column,
|
||||
rendered_column_default,
|
||||
metadata_column.server_default,
|
||||
rendered_metadata_default
|
||||
)
|
||||
if user_value is not None:
|
||||
return user_value
|
||||
|
||||
return self.impl.compare_server_default(
|
||||
inspector_column,
|
||||
metadata_column,
|
||||
rendered_metadata_default,
|
||||
rendered_column_default)
|
||||
|
||||
6
Darwin/lib/python2.7/site-packages/alembic/op.py
Normal file
6
Darwin/lib/python2.7/site-packages/alembic/op.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .operations import Operations
|
||||
from . import util
|
||||
|
||||
# create proxy functions for
|
||||
# each method on the Operations class.
|
||||
util.create_module_class_proxy(Operations, globals(), locals())
|
||||
1037
Darwin/lib/python2.7/site-packages/alembic/operations.py
Normal file
1037
Darwin/lib/python2.7/site-packages/alembic/operations.py
Normal file
File diff suppressed because it is too large
Load diff
513
Darwin/lib/python2.7/site-packages/alembic/script.py
Normal file
513
Darwin/lib/python2.7/site-packages/alembic/script.py
Normal file
|
|
@ -0,0 +1,513 @@
|
|||
import datetime
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from . import util
|
||||
|
||||
_sourceless_rev_file = re.compile(r'(.*\.py)(c|o)?$')
|
||||
_only_source_rev_file = re.compile(r'(.*\.py)$')
|
||||
_legacy_rev = re.compile(r'([a-f0-9]+)\.py$')
|
||||
_mod_def_re = re.compile(r'(upgrade|downgrade)_([a-z0-9]+)')
|
||||
_slug_re = re.compile(r'\w+')
|
||||
_default_file_template = "%(rev)s_%(slug)s"
|
||||
_relative_destination = re.compile(r'(?:\+|-)\d+')
|
||||
|
||||
class ScriptDirectory(object):
|
||||
"""Provides operations upon an Alembic script directory.
|
||||
|
||||
This object is useful to get information as to current revisions,
|
||||
most notably being able to get at the "head" revision, for schemes
|
||||
that want to test if the current revision in the database is the most
|
||||
recent::
|
||||
|
||||
from alembic.script import ScriptDirectory
|
||||
from alembic.config import Config
|
||||
config = Config()
|
||||
config.set_main_option("script_location", "myapp:migrations")
|
||||
script = ScriptDirectory.from_config(config)
|
||||
|
||||
head_revision = script.get_current_head()
|
||||
|
||||
|
||||
|
||||
"""
|
||||
def __init__(self, dir, file_template=_default_file_template,
|
||||
truncate_slug_length=40,
|
||||
sourceless=False):
|
||||
self.dir = dir
|
||||
self.versions = os.path.join(self.dir, 'versions')
|
||||
self.file_template = file_template
|
||||
self.truncate_slug_length = truncate_slug_length or 40
|
||||
self.sourceless = sourceless
|
||||
|
||||
if not os.access(dir, os.F_OK):
|
||||
raise util.CommandError("Path doesn't exist: %r. Please use "
|
||||
"the 'init' command to create a new "
|
||||
"scripts folder." % dir)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
"""Produce a new :class:`.ScriptDirectory` given a :class:`.Config`
|
||||
instance.
|
||||
|
||||
The :class:`.Config` need only have the ``script_location`` key
|
||||
present.
|
||||
|
||||
"""
|
||||
script_location = config.get_main_option('script_location')
|
||||
if script_location is None:
|
||||
raise util.CommandError("No 'script_location' key "
|
||||
"found in configuration.")
|
||||
truncate_slug_length = config.get_main_option("truncate_slug_length")
|
||||
if truncate_slug_length is not None:
|
||||
truncate_slug_length = int(truncate_slug_length)
|
||||
return ScriptDirectory(
|
||||
util.coerce_resource_to_filename(script_location),
|
||||
file_template=config.get_main_option(
|
||||
'file_template',
|
||||
_default_file_template),
|
||||
truncate_slug_length=truncate_slug_length,
|
||||
sourceless=config.get_main_option("sourceless") == "true"
|
||||
)
|
||||
|
||||
def walk_revisions(self, base="base", head="head"):
|
||||
"""Iterate through all revisions.
|
||||
|
||||
This is actually a breadth-first tree traversal,
|
||||
with leaf nodes being heads.
|
||||
|
||||
"""
|
||||
if head == "head":
|
||||
heads = set(self.get_heads())
|
||||
else:
|
||||
heads = set([head])
|
||||
while heads:
|
||||
todo = set(heads)
|
||||
heads = set()
|
||||
for head in todo:
|
||||
if head in heads:
|
||||
break
|
||||
for sc in self.iterate_revisions(head, base):
|
||||
if sc.is_branch_point and sc.revision not in todo:
|
||||
heads.add(sc.revision)
|
||||
break
|
||||
else:
|
||||
yield sc
|
||||
|
||||
def get_revision(self, id_):
|
||||
"""Return the :class:`.Script` instance with the given rev id."""
|
||||
|
||||
id_ = self.as_revision_number(id_)
|
||||
try:
|
||||
return self._revision_map[id_]
|
||||
except KeyError:
|
||||
# do a partial lookup
|
||||
revs = [x for x in self._revision_map
|
||||
if x is not None and x.startswith(id_)]
|
||||
if not revs:
|
||||
raise util.CommandError("No such revision '%s'" % id_)
|
||||
elif len(revs) > 1:
|
||||
raise util.CommandError(
|
||||
"Multiple revisions start "
|
||||
"with '%s', %s..." % (
|
||||
id_,
|
||||
", ".join("'%s'" % r for r in revs[0:3])
|
||||
))
|
||||
else:
|
||||
return self._revision_map[revs[0]]
|
||||
|
||||
_get_rev = get_revision
|
||||
|
||||
def as_revision_number(self, id_):
|
||||
"""Convert a symbolic revision, i.e. 'head' or 'base', into
|
||||
an actual revision number."""
|
||||
|
||||
if id_ == 'head':
|
||||
id_ = self.get_current_head()
|
||||
elif id_ == 'base':
|
||||
id_ = None
|
||||
return id_
|
||||
|
||||
_as_rev_number = as_revision_number
|
||||
|
||||
def iterate_revisions(self, upper, lower):
|
||||
"""Iterate through script revisions, starting at the given
|
||||
upper revision identifier and ending at the lower.
|
||||
|
||||
The traversal uses strictly the `down_revision`
|
||||
marker inside each migration script, so
|
||||
it is a requirement that upper >= lower,
|
||||
else you'll get nothing back.
|
||||
|
||||
The iterator yields :class:`.Script` objects.
|
||||
|
||||
"""
|
||||
if upper is not None and _relative_destination.match(upper):
|
||||
relative = int(upper)
|
||||
revs = list(self._iterate_revisions("head", lower))
|
||||
revs = revs[-relative:]
|
||||
if len(revs) != abs(relative):
|
||||
raise util.CommandError("Relative revision %s didn't "
|
||||
"produce %d migrations" % (upper, abs(relative)))
|
||||
return iter(revs)
|
||||
elif lower is not None and _relative_destination.match(lower):
|
||||
relative = int(lower)
|
||||
revs = list(self._iterate_revisions(upper, "base"))
|
||||
revs = revs[0:-relative]
|
||||
if len(revs) != abs(relative):
|
||||
raise util.CommandError("Relative revision %s didn't "
|
||||
"produce %d migrations" % (lower, abs(relative)))
|
||||
return iter(revs)
|
||||
else:
|
||||
return self._iterate_revisions(upper, lower)
|
||||
|
||||
def _iterate_revisions(self, upper, lower):
|
||||
lower = self.get_revision(lower)
|
||||
upper = self.get_revision(upper)
|
||||
orig = lower.revision if lower else 'base', \
|
||||
upper.revision if upper else 'base'
|
||||
script = upper
|
||||
while script != lower:
|
||||
if script is None and lower is not None:
|
||||
raise util.CommandError(
|
||||
"Revision %s is not an ancestor of %s" % orig)
|
||||
yield script
|
||||
downrev = script.down_revision
|
||||
script = self._revision_map[downrev]
|
||||
|
||||
def _upgrade_revs(self, destination, current_rev):
|
||||
revs = self.iterate_revisions(destination, current_rev)
|
||||
return [
|
||||
(script.module.upgrade, script.down_revision, script.revision,
|
||||
script.doc)
|
||||
for script in reversed(list(revs))
|
||||
]
|
||||
|
||||
def _downgrade_revs(self, destination, current_rev):
|
||||
revs = self.iterate_revisions(current_rev, destination)
|
||||
return [
|
||||
(script.module.downgrade, script.revision, script.down_revision,
|
||||
script.doc)
|
||||
for script in revs
|
||||
]
|
||||
|
||||
def run_env(self):
|
||||
"""Run the script environment.
|
||||
|
||||
This basically runs the ``env.py`` script present
|
||||
in the migration environment. It is called exclusively
|
||||
by the command functions in :mod:`alembic.command`.
|
||||
|
||||
|
||||
"""
|
||||
util.load_python_file(self.dir, 'env.py')
|
||||
|
||||
@property
|
||||
def env_py_location(self):
|
||||
return os.path.abspath(os.path.join(self.dir, "env.py"))
|
||||
|
||||
@util.memoized_property
|
||||
def _revision_map(self):
|
||||
map_ = {}
|
||||
for file_ in os.listdir(self.versions):
|
||||
script = Script._from_filename(self, self.versions, file_)
|
||||
if script is None:
|
||||
continue
|
||||
if script.revision in map_:
|
||||
util.warn("Revision %s is present more than once" %
|
||||
script.revision)
|
||||
map_[script.revision] = script
|
||||
for rev in map_.values():
|
||||
if rev.down_revision is None:
|
||||
continue
|
||||
if rev.down_revision not in map_:
|
||||
util.warn("Revision %s referenced from %s is not present"
|
||||
% (rev.down_revision, rev))
|
||||
rev.down_revision = None
|
||||
else:
|
||||
map_[rev.down_revision].add_nextrev(rev.revision)
|
||||
map_[None] = None
|
||||
return map_
|
||||
|
||||
def _rev_path(self, rev_id, message, create_date):
|
||||
slug = "_".join(_slug_re.findall(message or "")).lower()
|
||||
if len(slug) > self.truncate_slug_length:
|
||||
slug = slug[:self.truncate_slug_length].rsplit('_', 1)[0] + '_'
|
||||
filename = "%s.py" % (
|
||||
self.file_template % {
|
||||
'rev': rev_id,
|
||||
'slug': slug,
|
||||
'year': create_date.year,
|
||||
'month': create_date.month,
|
||||
'day': create_date.day,
|
||||
'hour': create_date.hour,
|
||||
'minute': create_date.minute,
|
||||
'second': create_date.second
|
||||
}
|
||||
)
|
||||
return os.path.join(self.versions, filename)
|
||||
|
||||
def get_current_head(self):
|
||||
"""Return the current head revision.
|
||||
|
||||
If the script directory has multiple heads
|
||||
due to branching, an error is raised.
|
||||
|
||||
Returns a string revision number.
|
||||
|
||||
"""
|
||||
current_heads = self.get_heads()
|
||||
if len(current_heads) > 1:
|
||||
raise util.CommandError('Only a single head is supported. The '
|
||||
'script directory has multiple heads (due to branching), which '
|
||||
'must be resolved by manually editing the revision files to '
|
||||
'form a linear sequence. Run `alembic branches` to see the '
|
||||
'divergence(s).')
|
||||
|
||||
if current_heads:
|
||||
return current_heads[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
_current_head = get_current_head
|
||||
"""the 0.2 name, for backwards compat."""
|
||||
|
||||
def get_heads(self):
|
||||
"""Return all "head" revisions as strings.
|
||||
|
||||
Returns a list of string revision numbers.
|
||||
|
||||
This is normally a list of length one,
|
||||
unless branches are present. The
|
||||
:meth:`.ScriptDirectory.get_current_head()` method
|
||||
can be used normally when a script directory
|
||||
has only one head.
|
||||
|
||||
"""
|
||||
heads = []
|
||||
for script in self._revision_map.values():
|
||||
if script and script.is_head:
|
||||
heads.append(script.revision)
|
||||
return heads
|
||||
|
||||
def get_base(self):
|
||||
"""Return the "base" revision as a string.
|
||||
|
||||
This is the revision number of the script that
|
||||
has a ``down_revision`` of None.
|
||||
|
||||
Behavior is not defined if more than one script
|
||||
has a ``down_revision`` of None.
|
||||
|
||||
"""
|
||||
for script in self._revision_map.values():
|
||||
if script and script.down_revision is None \
|
||||
and script.revision in self._revision_map:
|
||||
return script.revision
|
||||
else:
|
||||
return None
|
||||
|
||||
def _generate_template(self, src, dest, **kw):
|
||||
util.status("Generating %s" % os.path.abspath(dest),
|
||||
util.template_to_file,
|
||||
src,
|
||||
dest,
|
||||
**kw
|
||||
)
|
||||
|
||||
def _copy_file(self, src, dest):
|
||||
util.status("Generating %s" % os.path.abspath(dest),
|
||||
shutil.copy,
|
||||
src, dest)
|
||||
|
||||
def generate_revision(self, revid, message, refresh=False, **kw):
|
||||
"""Generate a new revision file.
|
||||
|
||||
This runs the ``script.py.mako`` template, given
|
||||
template arguments, and creates a new file.
|
||||
|
||||
:param revid: String revision id. Typically this
|
||||
comes from ``alembic.util.rev_id()``.
|
||||
:param message: the revision message, the one passed
|
||||
by the -m argument to the ``revision`` command.
|
||||
:param refresh: when True, the in-memory state of this
|
||||
:class:`.ScriptDirectory` will be updated with a new
|
||||
:class:`.Script` instance representing the new revision;
|
||||
the :class:`.Script` instance is returned.
|
||||
If False, the file is created but the state of the
|
||||
:class:`.ScriptDirectory` is unmodified; ``None``
|
||||
is returned.
|
||||
|
||||
"""
|
||||
current_head = self.get_current_head()
|
||||
create_date = datetime.datetime.now()
|
||||
path = self._rev_path(revid, message, create_date)
|
||||
self._generate_template(
|
||||
os.path.join(self.dir, "script.py.mako"),
|
||||
path,
|
||||
up_revision=str(revid),
|
||||
down_revision=current_head,
|
||||
create_date=create_date,
|
||||
message=message if message is not None else ("empty message"),
|
||||
**kw
|
||||
)
|
||||
if refresh:
|
||||
script = Script._from_path(self, path)
|
||||
self._revision_map[script.revision] = script
|
||||
if script.down_revision:
|
||||
self._revision_map[script.down_revision].\
|
||||
add_nextrev(script.revision)
|
||||
return script
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class Script(object):
|
||||
"""Represent a single revision file in a ``versions/`` directory.
|
||||
|
||||
The :class:`.Script` instance is returned by methods
|
||||
such as :meth:`.ScriptDirectory.iterate_revisions`.
|
||||
|
||||
"""
|
||||
|
||||
nextrev = frozenset()
|
||||
|
||||
def __init__(self, module, rev_id, path):
|
||||
self.module = module
|
||||
self.revision = rev_id
|
||||
self.path = path
|
||||
self.down_revision = getattr(module, 'down_revision', None)
|
||||
|
||||
revision = None
|
||||
"""The string revision number for this :class:`.Script` instance."""
|
||||
|
||||
module = None
|
||||
"""The Python module representing the actual script itself."""
|
||||
|
||||
path = None
|
||||
"""Filesystem path of the script."""
|
||||
|
||||
down_revision = None
|
||||
"""The ``down_revision`` identifier within the migration script."""
|
||||
|
||||
@property
|
||||
def doc(self):
|
||||
"""Return the docstring given in the script."""
|
||||
|
||||
return re.split("\n\n", self.longdoc)[0]
|
||||
|
||||
@property
|
||||
def longdoc(self):
|
||||
"""Return the docstring given in the script."""
|
||||
|
||||
doc = self.module.__doc__
|
||||
if doc:
|
||||
if hasattr(self.module, "_alembic_source_encoding"):
|
||||
doc = doc.decode(self.module._alembic_source_encoding)
|
||||
return doc.strip()
|
||||
else:
|
||||
return ""
|
||||
|
||||
def add_nextrev(self, rev):
|
||||
self.nextrev = self.nextrev.union([rev])
|
||||
|
||||
@property
|
||||
def is_head(self):
|
||||
"""Return True if this :class:`.Script` is a 'head' revision.
|
||||
|
||||
This is determined based on whether any other :class:`.Script`
|
||||
within the :class:`.ScriptDirectory` refers to this
|
||||
:class:`.Script`. Multiple heads can be present.
|
||||
|
||||
"""
|
||||
return not bool(self.nextrev)
|
||||
|
||||
@property
|
||||
def is_branch_point(self):
|
||||
"""Return True if this :class:`.Script` is a branch point.
|
||||
|
||||
A branchpoint is defined as a :class:`.Script` which is referred
|
||||
to by more than one succeeding :class:`.Script`, that is more
|
||||
than one :class:`.Script` has a `down_revision` identifier pointing
|
||||
here.
|
||||
|
||||
"""
|
||||
return len(self.nextrev) > 1
|
||||
|
||||
@property
|
||||
def log_entry(self):
|
||||
return \
|
||||
"Rev: %s%s%s\n" \
|
||||
"Parent: %s\n" \
|
||||
"Path: %s\n" \
|
||||
"\n%s\n" % (
|
||||
self.revision,
|
||||
" (head)" if self.is_head else "",
|
||||
" (branchpoint)" if self.is_branch_point else "",
|
||||
self.down_revision,
|
||||
self.path,
|
||||
"\n".join(
|
||||
" %s" % para
|
||||
for para in self.longdoc.splitlines()
|
||||
)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "%s -> %s%s%s, %s" % (
|
||||
self.down_revision,
|
||||
self.revision,
|
||||
" (head)" if self.is_head else "",
|
||||
" (branchpoint)" if self.is_branch_point else "",
|
||||
self.doc)
|
||||
|
||||
@classmethod
|
||||
def _from_path(cls, scriptdir, path):
|
||||
dir_, filename = os.path.split(path)
|
||||
return cls._from_filename(scriptdir, dir_, filename)
|
||||
|
||||
@classmethod
|
||||
def _from_filename(cls, scriptdir, dir_, filename):
|
||||
if scriptdir.sourceless:
|
||||
py_match = _sourceless_rev_file.match(filename)
|
||||
else:
|
||||
py_match = _only_source_rev_file.match(filename)
|
||||
|
||||
if not py_match:
|
||||
return None
|
||||
|
||||
py_filename = py_match.group(1)
|
||||
|
||||
if scriptdir.sourceless:
|
||||
is_c = py_match.group(2) == 'c'
|
||||
is_o = py_match.group(2) == 'o'
|
||||
else:
|
||||
is_c = is_o = False
|
||||
|
||||
if is_o or is_c:
|
||||
py_exists = os.path.exists(os.path.join(dir_, py_filename))
|
||||
pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c"))
|
||||
|
||||
# prefer .py over .pyc because we'd like to get the
|
||||
# source encoding; prefer .pyc over .pyo because we'd like to
|
||||
# have the docstrings which a -OO file would not have
|
||||
if py_exists or is_o and pyc_exists:
|
||||
return None
|
||||
|
||||
module = util.load_python_file(dir_, filename)
|
||||
|
||||
if not hasattr(module, "revision"):
|
||||
# attempt to get the revision id from the script name,
|
||||
# this for legacy only
|
||||
m = _legacy_rev.match(filename)
|
||||
if not m:
|
||||
raise util.CommandError(
|
||||
"Could not determine revision id from filename %s. "
|
||||
"Be sure the 'revision' variable is "
|
||||
"declared inside the script (please see 'Upgrading "
|
||||
"from Alembic 0.1 to 0.2' in the documentation)."
|
||||
% filename)
|
||||
else:
|
||||
revision = m.group(1)
|
||||
else:
|
||||
revision = module.revision
|
||||
return Script(module, revision, os.path.join(dir_, filename))
|
||||
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = ${script_location}
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
engine = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
connection = engine.connect()
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
|
||||
try:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
|
@ -0,0 +1 @@
|
|||
Rudimentary multi-database configuration.
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# a multi-database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = ${script_location}
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
databases = engine1, engine2
|
||||
|
||||
[engine1]
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
[engine2]
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname2
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
import logging
|
||||
import re
|
||||
|
||||
USE_TWOPHASE = False
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# gather section names referring to different
|
||||
# databases. These are named "engine1", "engine2"
|
||||
# in the sample .ini file.
|
||||
db_names = config.get_main_option('databases')
|
||||
|
||||
# add your model's MetaData objects here
|
||||
# for 'autogenerate' support. These must be set
|
||||
# up to hold just those tables targeting a
|
||||
# particular database. table.tometadata() may be
|
||||
# helpful here in case a "copy" of
|
||||
# a MetaData is needed.
|
||||
# from myapp import mymodel
|
||||
# target_metadata = {
|
||||
# 'engine1':mymodel.metadata1,
|
||||
# 'engine2':mymodel.metadata2
|
||||
#}
|
||||
target_metadata = {}
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
# for the --sql use case, run migrations for each URL into
|
||||
# individual files.
|
||||
|
||||
engines = {}
|
||||
for name in re.split(r',\s*', db_names):
|
||||
engines[name] = rec = {}
|
||||
rec['url'] = context.config.get_section_option(name,
|
||||
"sqlalchemy.url")
|
||||
|
||||
for name, rec in engines.items():
|
||||
logger.info("Migrating database %s" % name)
|
||||
file_ = "%s.sql" % name
|
||||
logger.info("Writing output to %s" % file_)
|
||||
with open(file_, 'w') as buffer:
|
||||
context.configure(url=rec['url'], output_buffer=buffer,
|
||||
target_metadata=target_metadata.get(name))
|
||||
with context.begin_transaction():
|
||||
context.run_migrations(engine_name=name)
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# for the direct-to-DB use case, start a transaction on all
|
||||
# engines, then run all migrations, then commit all transactions.
|
||||
|
||||
engines = {}
|
||||
for name in re.split(r',\s*', db_names):
|
||||
engines[name] = rec = {}
|
||||
rec['engine'] = engine_from_config(
|
||||
context.config.get_section(name),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
for name, rec in engines.items():
|
||||
engine = rec['engine']
|
||||
rec['connection'] = conn = engine.connect()
|
||||
|
||||
if USE_TWOPHASE:
|
||||
rec['transaction'] = conn.begin_twophase()
|
||||
else:
|
||||
rec['transaction'] = conn.begin()
|
||||
|
||||
try:
|
||||
for name, rec in engines.items():
|
||||
logger.info("Migrating database %s" % name)
|
||||
context.configure(
|
||||
connection=rec['connection'],
|
||||
upgrade_token="%s_upgrades" % name,
|
||||
downgrade_token="%s_downgrades" % name,
|
||||
target_metadata=target_metadata.get(name)
|
||||
)
|
||||
context.run_migrations(engine_name=name)
|
||||
|
||||
if USE_TWOPHASE:
|
||||
for rec in engines.values():
|
||||
rec['transaction'].prepare()
|
||||
|
||||
for rec in engines.values():
|
||||
rec['transaction'].commit()
|
||||
except:
|
||||
for rec in engines.values():
|
||||
rec['transaction'].rollback()
|
||||
raise
|
||||
finally:
|
||||
for rec in engines.values():
|
||||
rec['connection'].close()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
<%!
|
||||
import re
|
||||
|
||||
%>"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade(engine_name):
|
||||
eval("upgrade_%s" % engine_name)()
|
||||
|
||||
|
||||
def downgrade(engine_name):
|
||||
eval("downgrade_%s" % engine_name)()
|
||||
|
||||
<%
|
||||
db_names = config.get_main_option("databases")
|
||||
%>
|
||||
|
||||
## generate an "upgrade_<xyz>() / downgrade_<xyz>()" function
|
||||
## for each database name in the ini file.
|
||||
|
||||
% for db_name in re.split(r',\s*', db_names):
|
||||
|
||||
def upgrade_${db_name}():
|
||||
${context.get("%s_upgrades" % db_name, "pass")}
|
||||
|
||||
|
||||
def downgrade_${db_name}():
|
||||
${context.get("%s_downgrades" % db_name, "pass")}
|
||||
|
||||
% endfor
|
||||
|
|
@ -0,0 +1 @@
|
|||
Configuration that reads from a Pylons project environment.
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# a Pylons configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = ${script_location}
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
pylons_config_file = ./development.ini
|
||||
|
||||
# that's it !
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
"""Pylons bootstrap environment.
|
||||
|
||||
Place 'pylons_config_file' into alembic.ini, and the application will
|
||||
be loaded from there.
|
||||
|
||||
"""
|
||||
from alembic import context
|
||||
from paste.deploy import loadapp
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy.engine.base import Engine
|
||||
|
||||
|
||||
try:
|
||||
# if pylons app already in, don't create a new app
|
||||
from pylons import config as pylons_config
|
||||
pylons_config['__file__']
|
||||
except:
|
||||
config = context.config
|
||||
# can use config['__file__'] here, i.e. the Pylons
|
||||
# ini file, instead of alembic.ini
|
||||
config_file = config.get_main_option('pylons_config_file')
|
||||
fileConfig(config_file)
|
||||
wsgi_app = loadapp('config:%s' % config_file, relative_to='.')
|
||||
|
||||
|
||||
# customize this section for non-standard engine configurations.
|
||||
meta = __import__("%s.model.meta" % wsgi_app.config['pylons.package']).model.meta
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
context.configure(
|
||||
url=meta.engine.url, target_metadata=target_metadata)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
# specify here how the engine is acquired
|
||||
# engine = meta.engine
|
||||
raise NotImplementedError("Please specify engine connectivity here")
|
||||
|
||||
if isinstance(engine, Engine):
|
||||
connection = engine.connect()
|
||||
else:
|
||||
raise Exception(
|
||||
'Expected engine instance got %s instead' % type(engine)
|
||||
)
|
||||
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
|
||||
try:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
348
Darwin/lib/python2.7/site-packages/alembic/util.py
Normal file
348
Darwin/lib/python2.7/site-packages/alembic/util.py
Normal file
|
|
@ -0,0 +1,348 @@
|
|||
import sys
|
||||
import os
|
||||
import textwrap
|
||||
import warnings
|
||||
import re
|
||||
import inspect
|
||||
import uuid
|
||||
|
||||
from mako.template import Template
|
||||
from sqlalchemy.engine import url
|
||||
from sqlalchemy import __version__
|
||||
|
||||
from .compat import callable, exec_, load_module_py, load_module_pyc, binary_type
|
||||
|
||||
class CommandError(Exception):
|
||||
pass
|
||||
|
||||
def _safe_int(value):
|
||||
try:
|
||||
return int(value)
|
||||
except:
|
||||
return value
|
||||
_vers = tuple([_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)])
|
||||
sqla_07 = _vers > (0, 7, 2)
|
||||
sqla_08 = _vers >= (0, 8, 0, 'b2')
|
||||
sqla_09 = _vers >= (0, 9, 0)
|
||||
sqla_092 = _vers >= (0, 9, 2)
|
||||
sqla_094 = _vers >= (0, 9, 4)
|
||||
if not sqla_07:
|
||||
raise CommandError(
|
||||
"SQLAlchemy 0.7.3 or greater is required. ")
|
||||
|
||||
from sqlalchemy.util import format_argspec_plus, update_wrapper
|
||||
from sqlalchemy.util.compat import inspect_getfullargspec
|
||||
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ,
|
||||
struct.pack('HHHH', 0, 0, 0, 0))
|
||||
_h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl)
|
||||
if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
|
||||
TERMWIDTH = None
|
||||
except (ImportError, IOError):
|
||||
TERMWIDTH = None
|
||||
|
||||
|
||||
def template_to_file(template_file, dest, **kw):
|
||||
with open(dest, 'w') as f:
|
||||
f.write(
|
||||
Template(filename=template_file).render(**kw)
|
||||
)
|
||||
|
||||
def create_module_class_proxy(cls, globals_, locals_):
|
||||
"""Create module level proxy functions for the
|
||||
methods on a given class.
|
||||
|
||||
The functions will have a compatible signature
|
||||
as the methods. A proxy is established
|
||||
using the ``_install_proxy(obj)`` function,
|
||||
and removed using ``_remove_proxy()``, both
|
||||
installed by calling this function.
|
||||
|
||||
"""
|
||||
attr_names = set()
|
||||
|
||||
def _install_proxy(obj):
|
||||
globals_['_proxy'] = obj
|
||||
for name in attr_names:
|
||||
globals_[name] = getattr(obj, name)
|
||||
|
||||
def _remove_proxy():
|
||||
globals_['_proxy'] = None
|
||||
for name in attr_names:
|
||||
del globals_[name]
|
||||
|
||||
globals_['_install_proxy'] = _install_proxy
|
||||
globals_['_remove_proxy'] = _remove_proxy
|
||||
|
||||
def _create_op_proxy(name):
|
||||
fn = getattr(cls, name)
|
||||
spec = inspect.getargspec(fn)
|
||||
if spec[0] and spec[0][0] == 'self':
|
||||
spec[0].pop(0)
|
||||
args = inspect.formatargspec(*spec)
|
||||
num_defaults = 0
|
||||
if spec[3]:
|
||||
num_defaults += len(spec[3])
|
||||
name_args = spec[0]
|
||||
if num_defaults:
|
||||
defaulted_vals = name_args[0 - num_defaults:]
|
||||
else:
|
||||
defaulted_vals = ()
|
||||
|
||||
apply_kw = inspect.formatargspec(
|
||||
name_args, spec[1], spec[2],
|
||||
defaulted_vals,
|
||||
formatvalue=lambda x: '=' + x)
|
||||
|
||||
def _name_error(name):
|
||||
raise NameError(
|
||||
"Can't invoke function '%s', as the proxy object has "\
|
||||
"not yet been "
|
||||
"established for the Alembic '%s' class. "
|
||||
"Try placing this code inside a callable." % (
|
||||
name, cls.__name__
|
||||
))
|
||||
globals_['_name_error'] = _name_error
|
||||
|
||||
func_text = textwrap.dedent("""\
|
||||
def %(name)s(%(args)s):
|
||||
%(doc)r
|
||||
try:
|
||||
p = _proxy
|
||||
except NameError:
|
||||
_name_error('%(name)s')
|
||||
return _proxy.%(name)s(%(apply_kw)s)
|
||||
e
|
||||
""" % {
|
||||
'name': name,
|
||||
'args': args[1:-1],
|
||||
'apply_kw': apply_kw[1:-1],
|
||||
'doc': fn.__doc__,
|
||||
})
|
||||
lcl = {}
|
||||
exec_(func_text, globals_, lcl)
|
||||
return lcl[name]
|
||||
|
||||
for methname in dir(cls):
|
||||
if not methname.startswith('_'):
|
||||
if callable(getattr(cls, methname)):
|
||||
locals_[methname] = _create_op_proxy(methname)
|
||||
else:
|
||||
attr_names.add(methname)
|
||||
|
||||
def write_outstream(stream, *text):
|
||||
encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
|
||||
for t in text:
|
||||
if not isinstance(t, binary_type):
|
||||
t = t.encode(encoding, 'replace')
|
||||
t = t.decode(encoding)
|
||||
try:
|
||||
stream.write(t)
|
||||
except IOError:
|
||||
# suppress "broken pipe" errors.
|
||||
# no known way to handle this on Python 3 however
|
||||
# as the exception is "ignored" (noisily) in TextIOWrapper.
|
||||
break
|
||||
|
||||
def coerce_resource_to_filename(fname):
|
||||
"""Interpret a filename as either a filesystem location or as a package resource.
|
||||
|
||||
Names that are non absolute paths and contain a colon
|
||||
are interpreted as resources and coerced to a file location.
|
||||
|
||||
"""
|
||||
if not os.path.isabs(fname) and ":" in fname:
|
||||
import pkg_resources
|
||||
fname = pkg_resources.resource_filename(*fname.split(':'))
|
||||
return fname
|
||||
|
||||
def status(_statmsg, fn, *arg, **kw):
|
||||
msg(_statmsg + " ...", False)
|
||||
try:
|
||||
ret = fn(*arg, **kw)
|
||||
write_outstream(sys.stdout, " done\n")
|
||||
return ret
|
||||
except:
|
||||
write_outstream(sys.stdout, " FAILED\n")
|
||||
raise
|
||||
|
||||
def err(message):
|
||||
msg(message)
|
||||
sys.exit(-1)
|
||||
|
||||
def obfuscate_url_pw(u):
|
||||
u = url.make_url(u)
|
||||
if u.password:
|
||||
u.password = 'XXXXX'
|
||||
return str(u)
|
||||
|
||||
def asbool(value):
|
||||
return value is not None and \
|
||||
value.lower() == 'true'
|
||||
|
||||
def warn(msg):
|
||||
warnings.warn(msg)
|
||||
|
||||
def msg(msg, newline=True):
|
||||
if TERMWIDTH is None:
|
||||
write_outstream(sys.stdout, msg)
|
||||
if newline:
|
||||
write_outstream(sys.stdout, "\n")
|
||||
else:
|
||||
# left indent output lines
|
||||
lines = textwrap.wrap(msg, TERMWIDTH)
|
||||
if len(lines) > 1:
|
||||
for line in lines[0:-1]:
|
||||
write_outstream(sys.stdout, " ", line, "\n")
|
||||
write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
|
||||
|
||||
def load_python_file(dir_, filename):
|
||||
"""Load a file from the given path as a Python module."""
|
||||
|
||||
module_id = re.sub(r'\W', "_", filename)
|
||||
path = os.path.join(dir_, filename)
|
||||
_, ext = os.path.splitext(filename)
|
||||
if ext == ".py":
|
||||
if os.path.exists(path):
|
||||
module = load_module_py(module_id, path)
|
||||
elif os.path.exists(simple_pyc_file_from_path(path)):
|
||||
# look for sourceless load
|
||||
module = load_module_pyc(module_id, simple_pyc_file_from_path(path))
|
||||
else:
|
||||
raise ImportError("Can't find Python file %s" % path)
|
||||
elif ext in (".pyc", ".pyo"):
|
||||
module = load_module_pyc(module_id, path)
|
||||
del sys.modules[module_id]
|
||||
return module
|
||||
|
||||
def simple_pyc_file_from_path(path):
|
||||
"""Given a python source path, return the so-called
|
||||
"sourceless" .pyc or .pyo path.
|
||||
|
||||
This just a .pyc or .pyo file where the .py file would be.
|
||||
|
||||
Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__,
|
||||
this use case remains supported as a so-called "sourceless module import".
|
||||
|
||||
"""
|
||||
if sys.flags.optimize:
|
||||
return path + "o" # e.g. .pyo
|
||||
else:
|
||||
return path + "c" # e.g. .pyc
|
||||
|
||||
def pyc_file_from_path(path):
|
||||
"""Given a python source path, locate the .pyc.
|
||||
|
||||
See http://www.python.org/dev/peps/pep-3147/
|
||||
#detecting-pep-3147-availability
|
||||
http://www.python.org/dev/peps/pep-3147/#file-extension-checks
|
||||
|
||||
"""
|
||||
import imp
|
||||
has3147 = hasattr(imp, 'get_tag')
|
||||
if has3147:
|
||||
return imp.cache_from_source(path)
|
||||
else:
|
||||
return simple_pyc_file_from_path(path)
|
||||
|
||||
def rev_id():
|
||||
val = int(uuid.uuid4()) % 100000000000000
|
||||
return hex(val)[2:-1]
|
||||
|
||||
class memoized_property(object):
|
||||
"""A read-only @property that is only evaluated once."""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return None
|
||||
obj.__dict__[self.__name__] = result = self.fget(obj)
|
||||
return result
|
||||
|
||||
|
||||
class immutabledict(dict):
|
||||
|
||||
def _immutable(self, *arg, **kw):
|
||||
raise TypeError("%s object is immutable" % self.__class__.__name__)
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = \
|
||||
clear = pop = popitem = setdefault = \
|
||||
update = _immutable
|
||||
|
||||
def __new__(cls, *args):
|
||||
new = dict.__new__(cls)
|
||||
dict.__init__(new, *args)
|
||||
return new
|
||||
|
||||
def __init__(self, *args):
|
||||
pass
|
||||
|
||||
def __reduce__(self):
|
||||
return immutabledict, (dict(self), )
|
||||
|
||||
def union(self, d):
|
||||
if not self:
|
||||
return immutabledict(d)
|
||||
else:
|
||||
d2 = immutabledict(self)
|
||||
dict.update(d2, d)
|
||||
return d2
|
||||
|
||||
def __repr__(self):
|
||||
return "immutabledict(%s)" % dict.__repr__(self)
|
||||
|
||||
|
||||
def _with_legacy_names(translations):
|
||||
def decorate(fn):
|
||||
|
||||
spec = inspect_getfullargspec(fn)
|
||||
metadata = dict(target='target', fn='fn')
|
||||
metadata.update(format_argspec_plus(spec, grouped=False))
|
||||
|
||||
has_keywords = bool(spec[2])
|
||||
|
||||
if not has_keywords:
|
||||
metadata['args'] += ", **kw"
|
||||
metadata['apply_kw'] += ", **kw"
|
||||
|
||||
def go(*arg, **kw):
|
||||
names = set(kw).difference(spec[0])
|
||||
for oldname, newname in translations:
|
||||
if oldname in kw:
|
||||
kw[newname] = kw.pop(oldname)
|
||||
names.discard(oldname)
|
||||
|
||||
warnings.warn(
|
||||
"Argument '%s' is now named '%s' for function '%s'" %
|
||||
(oldname, newname, fn.__name__))
|
||||
if not has_keywords and names:
|
||||
raise TypeError("Unknown arguments: %s" % ", ".join(names))
|
||||
return fn(*arg, **kw)
|
||||
|
||||
code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % (
|
||||
metadata)
|
||||
decorated = eval(code, {"target": go})
|
||||
decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__
|
||||
update_wrapper(decorated, fn)
|
||||
if hasattr(decorated, '__wrapped__'):
|
||||
# update_wrapper in py3k applies __wrapped__, which causes
|
||||
# inspect.getargspec() to ignore the extra arguments on our
|
||||
# wrapper as of Python 3.4. We need this for the
|
||||
# "module class proxy" thing though, so just del the __wrapped__
|
||||
# for now. See #175 as well as bugs.python.org/issue17482
|
||||
del decorated.__wrapped__
|
||||
return decorated
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue