update sqlalchemy
This commit is contained in:
parent
22cbffb8a3
commit
e4bd5b5042
362 changed files with 37677 additions and 11013 deletions
|
|
@ -1,6 +1,11 @@
|
|||
# ext/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util as _sa_util
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.ext")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/associationproxy.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -77,16 +77,16 @@ def association_proxy(target_collection, attr, **kw):
|
|||
|
||||
|
||||
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.AssociationProxy`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AssociationProxy(interfaces._InspectionAttr):
|
||||
class AssociationProxy(interfaces.InspectionAttrInfo):
|
||||
"""A descriptor that presents a read/write view of an object attribute."""
|
||||
|
||||
is_attribute = False
|
||||
|
|
@ -94,7 +94,7 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
|
||||
def __init__(self, target_collection, attr, creator=None,
|
||||
getset_factory=None, proxy_factory=None,
|
||||
proxy_bulk_set=None):
|
||||
proxy_bulk_set=None, info=None):
|
||||
"""Construct a new :class:`.AssociationProxy`.
|
||||
|
||||
The :func:`.association_proxy` function is provided as the usual
|
||||
|
|
@ -138,6 +138,11 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
:param proxy_bulk_set: Optional, use with proxy_factory. See
|
||||
the _set() method for details.
|
||||
|
||||
:param info: optional, will be assigned to
|
||||
:attr:`.AssociationProxy.info` if present.
|
||||
|
||||
.. versionadded:: 1.0.9
|
||||
|
||||
"""
|
||||
self.target_collection = target_collection
|
||||
self.value_attr = attr
|
||||
|
|
@ -150,6 +155,8 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
self.key = '_%s_%s_%s' % (
|
||||
type(self).__name__, target_collection, id(self))
|
||||
self.collection_class = None
|
||||
if info:
|
||||
self.info = info
|
||||
|
||||
@property
|
||||
def remote_attr(self):
|
||||
|
|
@ -365,13 +372,17 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
operators of the underlying proxied attributes.
|
||||
|
||||
"""
|
||||
|
||||
if self._value_is_scalar:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).has(criterion, **kwargs)
|
||||
if self._target_is_object:
|
||||
if self._value_is_scalar:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).has(
|
||||
criterion, **kwargs)
|
||||
else:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).any(
|
||||
criterion, **kwargs)
|
||||
else:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).any(criterion, **kwargs)
|
||||
value_expr = criterion
|
||||
|
||||
# check _value_is_scalar here, otherwise
|
||||
# we're scalar->scalar - call .any() so that
|
||||
|
|
@ -527,7 +538,10 @@ class _AssociationList(_AssociationCollection):
|
|||
return self.setter(object, value)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._get(self.col[index])
|
||||
if not isinstance(index, slice):
|
||||
return self._get(self.col[index])
|
||||
else:
|
||||
return [self._get(member) for member in self.col[index]]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
if not isinstance(index, slice):
|
||||
|
|
@ -589,7 +603,7 @@ class _AssociationList(_AssociationCollection):
|
|||
|
||||
for member in self.col:
|
||||
yield self._get(member)
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
def append(self, value):
|
||||
item = self._create(value)
|
||||
|
|
@ -893,7 +907,7 @@ class _AssociationSet(_AssociationCollection):
|
|||
"""
|
||||
for member in self.col:
|
||||
yield self._get(member)
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
def add(self, value):
|
||||
if value not in self:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/automap.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -11,12 +11,6 @@ schema, typically though not necessarily one which is reflected.
|
|||
|
||||
.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :mod:`sqlalchemy.ext.automap` extension should be considered
|
||||
**experimental** as of 0.9.1. Featureset and API stability is
|
||||
not guaranteed at this time.
|
||||
|
||||
It is hoped that the :class:`.AutomapBase` system provides a quick
|
||||
and modernized solution to the problem that the very famous
|
||||
`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
|
||||
|
|
@ -67,7 +61,7 @@ asking it to reflect the schema and produce mappings::
|
|||
Above, calling :meth:`.AutomapBase.prepare` while passing along the
|
||||
:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the
|
||||
:meth:`.MetaData.reflect` method will be called on this declarative base
|
||||
classes' :class:`.MetaData` collection; then, each viable
|
||||
classes' :class:`.MetaData` collection; then, each **viable**
|
||||
:class:`.Table` within the :class:`.MetaData` will get a new mapped class
|
||||
generated automatically. The :class:`.ForeignKeyConstraint` objects which
|
||||
link the various tables together will be used to produce new, bidirectional
|
||||
|
|
@ -76,6 +70,12 @@ follow along a default naming scheme that we can customize. At this point,
|
|||
our basic mapping consisting of related ``User`` and ``Address`` classes is
|
||||
ready to use in the traditional way.
|
||||
|
||||
.. note:: By **viable**, we mean that for a table to be mapped, it must
|
||||
specify a primary key. Additionally, if the table is detected as being
|
||||
a pure association table between two other tables, it will not be directly
|
||||
mapped and will instead be configured as a many-to-many table between
|
||||
the mappings for the two referring tables.
|
||||
|
||||
Generating Mappings from an Existing MetaData
|
||||
=============================================
|
||||
|
||||
|
|
@ -111,8 +111,8 @@ explicit table declaration::
|
|||
User, Address, Order = Base.classes.user, Base.classes.address,\
|
||||
Base.classes.user_order
|
||||
|
||||
Specifying Classes Explcitly
|
||||
============================
|
||||
Specifying Classes Explicitly
|
||||
=============================
|
||||
|
||||
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
|
||||
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
|
||||
|
|
@ -188,7 +188,7 @@ scheme for class names and a "pluralizer" for collection names using the
|
|||
"'words_and_underscores' -> 'WordsAndUnderscores'"
|
||||
|
||||
return str(tablename[0].upper() + \\
|
||||
re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
|
||||
re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:]))
|
||||
|
||||
_pluralizer = inflect.engine()
|
||||
def pluralize_collection(base, local_cls, referred_cls, constraint):
|
||||
|
|
@ -196,10 +196,9 @@ scheme for class names and a "pluralizer" for collection names using the
|
|||
"'SomeTerm' -> 'some_terms'"
|
||||
|
||||
referred_name = referred_cls.__name__
|
||||
uncamelized = referred_name[0].lower() + \\
|
||||
re.sub(r'\W',
|
||||
lambda m: "_%s" % m.group(0).lower(),
|
||||
referred_name[1:])
|
||||
uncamelized = re.sub(r'[A-Z]',
|
||||
lambda m: "_%s" % m.group(0).lower(),
|
||||
referred_name)[1:]
|
||||
pluralized = _pluralizer.plural(uncamelized)
|
||||
return pluralized
|
||||
|
||||
|
|
@ -243,7 +242,26 @@ follows:
|
|||
one-to-many backref will be created on the referred class referring
|
||||
to this class.
|
||||
|
||||
4. The names of the relationships are determined using the
|
||||
4. If any of the columns that are part of the :class:`.ForeignKeyConstraint`
|
||||
are not nullable (e.g. ``nullable=False``), a
|
||||
:paramref:`~.relationship.cascade` keyword argument
|
||||
of ``all, delete-orphan`` will be added to the keyword arguments to
|
||||
be passed to the relationship or backref. If the
|
||||
:class:`.ForeignKeyConstraint` reports that
|
||||
:paramref:`.ForeignKeyConstraint.ondelete`
|
||||
is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable
|
||||
set of columns, the option :paramref:`~.relationship.passive_deletes`
|
||||
flag is set to ``True`` in the set of relationship keyword arguments.
|
||||
Note that not all backends support reflection of ON DELETE.
|
||||
|
||||
.. versionadded:: 1.0.0 - automap will detect non-nullable foreign key
|
||||
constraints when producing a one-to-many relationship and establish
|
||||
a default cascade of ``all, delete-orphan`` if so; additionally,
|
||||
if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete`
|
||||
of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns,
|
||||
the ``passive_deletes=True`` option is also added.
|
||||
|
||||
5. The names of the relationships are determined using the
|
||||
:paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
|
||||
:paramref:`.AutomapBase.prepare.name_for_collection_relationship`
|
||||
callable functions. It is important to note that the default relationship
|
||||
|
|
@ -252,18 +270,18 @@ follows:
|
|||
alternate class naming scheme, that's the name from which the relationship
|
||||
name will be derived.
|
||||
|
||||
5. The classes are inspected for an existing mapped property matching these
|
||||
6. The classes are inspected for an existing mapped property matching these
|
||||
names. If one is detected on one side, but none on the other side,
|
||||
:class:`.AutomapBase` attempts to create a relationship on the missing side,
|
||||
then uses the :paramref:`.relationship.back_populates` parameter in order to
|
||||
point the new relationship to the other side.
|
||||
|
||||
6. In the usual case where no relationship is on either side,
|
||||
7. In the usual case where no relationship is on either side,
|
||||
:meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
|
||||
"many-to-one" side and matches it to the other using the
|
||||
:paramref:`.relationship.backref` parameter.
|
||||
|
||||
7. Production of the :func:`.relationship` and optionally the :func:`.backref`
|
||||
8. Production of the :func:`.relationship` and optionally the :func:`.backref`
|
||||
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
|
||||
function, which can be supplied by the end-user in order to augment
|
||||
the arguments passed to :func:`.relationship` or :func:`.backref` or to
|
||||
|
|
@ -606,7 +624,7 @@ def generate_relationship(
|
|||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param direction: indicate the "direction" of the relationship; this will
|
||||
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
|
||||
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`.
|
||||
|
||||
:param return_fn: the function that is used by default to create the
|
||||
relationship. This will be either :func:`.relationship` or
|
||||
|
|
@ -877,6 +895,19 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
constraint
|
||||
)
|
||||
|
||||
o2m_kws = {}
|
||||
nullable = False not in set([fk.parent.nullable for fk in fks])
|
||||
if not nullable:
|
||||
o2m_kws['cascade'] = "all, delete-orphan"
|
||||
|
||||
if constraint.ondelete and \
|
||||
constraint.ondelete.lower() == "cascade":
|
||||
o2m_kws['passive_deletes'] = True
|
||||
else:
|
||||
if constraint.ondelete and \
|
||||
constraint.ondelete.lower() == "set null":
|
||||
o2m_kws['passive_deletes'] = True
|
||||
|
||||
create_backref = backref_name not in referred_cfg.properties
|
||||
|
||||
if relationship_name not in map_config.properties:
|
||||
|
|
@ -885,7 +916,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
automap_base,
|
||||
interfaces.ONETOMANY, backref,
|
||||
backref_name, referred_cls, local_cls,
|
||||
collection_class=collection_class)
|
||||
collection_class=collection_class,
|
||||
**o2m_kws)
|
||||
else:
|
||||
backref_obj = None
|
||||
rel = generate_relationship(automap_base,
|
||||
|
|
@ -916,7 +948,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
fk.parent
|
||||
for fk in constraint.elements],
|
||||
back_populates=relationship_name,
|
||||
collection_class=collection_class)
|
||||
collection_class=collection_class,
|
||||
**o2m_kws)
|
||||
if rel is not None:
|
||||
referred_cfg.properties[backref_name] = rel
|
||||
map_config.properties[
|
||||
|
|
|
|||
523
lib/python3.4/site-packages/sqlalchemy/ext/baked.py
Normal file
523
lib/python3.4/site-packages/sqlalchemy/ext/baked.py
Normal file
|
|
@ -0,0 +1,523 @@
|
|||
# sqlalchemy/ext/baked.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Baked query extension.
|
||||
|
||||
Provides a creational pattern for the :class:`.query.Query` object which
|
||||
allows the fully constructed object, Core select statement, and string
|
||||
compiled result to be fully cached.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from ..orm.query import Query
|
||||
from ..orm import strategies, attributes, properties, \
|
||||
strategy_options, util as orm_util, interfaces
|
||||
from .. import log as sqla_log
|
||||
from ..sql import util as sql_util
|
||||
from ..orm import exc as orm_exc
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
|
||||
import copy
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BakedQuery(object):
|
||||
"""A builder object for :class:`.query.Query` objects."""
|
||||
|
||||
__slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
|
||||
|
||||
def __init__(self, bakery, initial_fn, args=()):
|
||||
self._cache_key = ()
|
||||
self._update_cache_key(initial_fn, args)
|
||||
self.steps = [initial_fn]
|
||||
self._spoiled = False
|
||||
self._bakery = bakery
|
||||
|
||||
@classmethod
|
||||
def bakery(cls, size=200):
|
||||
"""Construct a new bakery."""
|
||||
|
||||
_bakery = util.LRUCache(size)
|
||||
|
||||
def call(initial_fn, *args):
|
||||
return cls(_bakery, initial_fn, args)
|
||||
|
||||
return call
|
||||
|
||||
def _clone(self):
|
||||
b1 = BakedQuery.__new__(BakedQuery)
|
||||
b1._cache_key = self._cache_key
|
||||
b1.steps = list(self.steps)
|
||||
b1._bakery = self._bakery
|
||||
b1._spoiled = self._spoiled
|
||||
return b1
|
||||
|
||||
def _update_cache_key(self, fn, args=()):
|
||||
self._cache_key += (fn.__code__,) + args
|
||||
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
self.add_criteria(*other)
|
||||
else:
|
||||
self.add_criteria(other)
|
||||
return self
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
return self.with_criteria(*other)
|
||||
else:
|
||||
return self.with_criteria(other)
|
||||
|
||||
def add_criteria(self, fn, *args):
|
||||
"""Add a criteria function to this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to using the ``+=`` operator to
|
||||
modify a :class:`.BakedQuery` in-place.
|
||||
|
||||
"""
|
||||
self._update_cache_key(fn, args)
|
||||
self.steps.append(fn)
|
||||
return self
|
||||
|
||||
def with_criteria(self, fn, *args):
|
||||
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
|
||||
|
||||
This is equivalent to using the ``+`` operator to
|
||||
produce a new :class:`.BakedQuery` with modifications.
|
||||
|
||||
"""
|
||||
return self._clone().add_criteria(fn, *args)
|
||||
|
||||
def for_session(self, session):
|
||||
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to calling the :class:`.BakedQuery` as a
|
||||
Python callable, e.g. ``result = my_baked_query(session)``.
|
||||
|
||||
"""
|
||||
return Result(self, session)
|
||||
|
||||
def __call__(self, session):
|
||||
return self.for_session(session)
|
||||
|
||||
def spoil(self, full=False):
|
||||
"""Cancel any query caching that will occur on this BakedQuery object.
|
||||
|
||||
The BakedQuery can continue to be used normally, however additional
|
||||
creational functions will not be cached; they will be called
|
||||
on every invocation.
|
||||
|
||||
This is to support the case where a particular step in constructing
|
||||
a baked query disqualifies the query from being cacheable, such
|
||||
as a variant that relies upon some uncacheable value.
|
||||
|
||||
:param full: if False, only functions added to this
|
||||
:class:`.BakedQuery` object subsequent to the spoil step will be
|
||||
non-cached; the state of the :class:`.BakedQuery` up until
|
||||
this point will be pulled from the cache. If True, then the
|
||||
entire :class:`.Query` object is built from scratch each
|
||||
time, with all creational functions being called on each
|
||||
invocation.
|
||||
|
||||
"""
|
||||
if not full:
|
||||
_spoil_point = self._clone()
|
||||
_spoil_point._cache_key += ('_query_only', )
|
||||
self.steps = [_spoil_point._retrieve_baked_query]
|
||||
self._spoiled = True
|
||||
return self
|
||||
|
||||
def _retrieve_baked_query(self, session):
|
||||
query = self._bakery.get(self._cache_key, None)
|
||||
if query is None:
|
||||
query = self._as_query(session)
|
||||
self._bakery[self._cache_key] = query.with_session(None)
|
||||
return query.with_session(session)
|
||||
|
||||
def _bake(self, session):
|
||||
query = self._as_query(session)
|
||||
|
||||
context = query._compile_context()
|
||||
self._bake_subquery_loaders(session, context)
|
||||
context.session = None
|
||||
context.query = query = context.query.with_session(None)
|
||||
query._execution_options = query._execution_options.union(
|
||||
{"compiled_cache": self._bakery}
|
||||
)
|
||||
# we'll be holding onto the query for some of its state,
|
||||
# so delete some compilation-use-only attributes that can take up
|
||||
# space
|
||||
for attr in (
|
||||
'_correlate', '_from_obj', '_mapper_adapter_map',
|
||||
'_joinpath', '_joinpoint'):
|
||||
query.__dict__.pop(attr, None)
|
||||
self._bakery[self._cache_key] = context
|
||||
return context
|
||||
|
||||
def _as_query(self, session):
|
||||
query = self.steps[0](session)
|
||||
|
||||
for step in self.steps[1:]:
|
||||
query = step(query)
|
||||
return query
|
||||
|
||||
def _bake_subquery_loaders(self, session, context):
|
||||
"""convert subquery eager loaders in the cache into baked queries.
|
||||
|
||||
For subquery eager loading to work, all we need here is that the
|
||||
Query point to the correct session when it is run. However, since
|
||||
we are "baking" anyway, we may as well also turn the query into
|
||||
a "baked" query so that we save on performance too.
|
||||
|
||||
"""
|
||||
context.attributes['baked_queries'] = baked_queries = []
|
||||
for k, v in list(context.attributes.items()):
|
||||
if isinstance(v, Query):
|
||||
if 'subquery' in k:
|
||||
bk = BakedQuery(self._bakery, lambda *args: v)
|
||||
bk._cache_key = self._cache_key + k
|
||||
bk._bake(session)
|
||||
baked_queries.append((k, bk._cache_key, v))
|
||||
del context.attributes[k]
|
||||
|
||||
def _unbake_subquery_loaders(self, session, context, params):
|
||||
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
|
||||
and turn them back into Result objects that will iterate just
|
||||
like a Query object.
|
||||
|
||||
"""
|
||||
for k, cache_key, query in context.attributes["baked_queries"]:
|
||||
bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess))
|
||||
bk._cache_key = cache_key
|
||||
context.attributes[k] = bk.for_session(session).params(**params)
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
|
||||
|
||||
The :class:`.Result` object is where the actual :class:`.query.Query`
|
||||
object gets created, or retrieved from the cache,
|
||||
against a target :class:`.Session`, and is then invoked for results.
|
||||
|
||||
"""
|
||||
__slots__ = 'bq', 'session', '_params'
|
||||
|
||||
def __init__(self, bq, session):
|
||||
self.bq = bq
|
||||
self.session = session
|
||||
self._params = {}
|
||||
|
||||
def params(self, *args, **kw):
|
||||
"""Specify parameters to be replaced into the string SQL statement."""
|
||||
|
||||
if len(args) == 1:
|
||||
kw.update(args[0])
|
||||
elif len(args) > 0:
|
||||
raise sa_exc.ArgumentError(
|
||||
"params() takes zero or one positional argument, "
|
||||
"which is a dictionary.")
|
||||
self._params.update(kw)
|
||||
return self
|
||||
|
||||
def _as_query(self):
|
||||
return self.bq._as_query(self.session).params(self._params)
|
||||
|
||||
def __str__(self):
|
||||
return str(self._as_query())
|
||||
|
||||
def __iter__(self):
|
||||
bq = self.bq
|
||||
if bq._spoiled:
|
||||
return iter(self._as_query())
|
||||
|
||||
baked_context = bq._bakery.get(bq._cache_key, None)
|
||||
if baked_context is None:
|
||||
baked_context = bq._bake(self.session)
|
||||
|
||||
context = copy.copy(baked_context)
|
||||
context.session = self.session
|
||||
context.attributes = context.attributes.copy()
|
||||
|
||||
bq._unbake_subquery_loaders(self.session, context, self._params)
|
||||
|
||||
context.statement.use_labels = True
|
||||
if context.autoflush and not context.populate_existing:
|
||||
self.session._autoflush()
|
||||
return context.query.params(self._params).\
|
||||
with_session(self.session)._execute_and_instances(context)
|
||||
|
||||
def first(self):
|
||||
"""Return the first row.
|
||||
|
||||
Equivalent to :meth:`.Query.first`.
|
||||
|
||||
"""
|
||||
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
|
||||
ret = list(bq.for_session(self.session).params(self._params))
|
||||
if len(ret) > 0:
|
||||
return ret[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def one(self):
|
||||
"""Return exactly one result or raise an exception.
|
||||
|
||||
Equivalent to :meth:`.Query.one`.
|
||||
|
||||
"""
|
||||
ret = list(self)
|
||||
|
||||
l = len(ret)
|
||||
if l == 1:
|
||||
return ret[0]
|
||||
elif l == 0:
|
||||
raise orm_exc.NoResultFound("No row was found for one()")
|
||||
else:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one()")
|
||||
|
||||
def one_or_none(self):
|
||||
"""Return one or zero results, or raise an exception for multiple
|
||||
rows.
|
||||
|
||||
Equivalent to :meth:`.Query.one_or_none`.
|
||||
|
||||
.. versionadded:: 1.0.9
|
||||
|
||||
"""
|
||||
ret = list(self)
|
||||
|
||||
l = len(ret)
|
||||
if l == 1:
|
||||
return ret[0]
|
||||
elif l == 0:
|
||||
return None
|
||||
else:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one_or_none()")
|
||||
|
||||
def all(self):
|
||||
"""Return all rows.
|
||||
|
||||
Equivalent to :meth:`.Query.all`.
|
||||
|
||||
"""
|
||||
return list(self)
|
||||
|
||||
def get(self, ident):
|
||||
"""Retrieve an object based on identity.
|
||||
|
||||
Equivalent to :meth:`.Query.get`.
|
||||
|
||||
"""
|
||||
|
||||
query = self.bq.steps[0](self.session)
|
||||
return query._get_impl(ident, self._load_on_ident)
|
||||
|
||||
def _load_on_ident(self, query, key):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
ident = key[1]
|
||||
|
||||
mapper = query._mapper_zero()
|
||||
|
||||
_get_clause, _get_params = mapper._get_clause
|
||||
|
||||
def setup(query):
|
||||
_lcl_get_clause = _get_clause
|
||||
q = query._clone()
|
||||
q._get_condition()
|
||||
q._order_by = None
|
||||
|
||||
# None present in ident - turn those comparisons
|
||||
# into "IS NULL"
|
||||
if None in ident:
|
||||
nones = set([
|
||||
_get_params[col].key for col, value in
|
||||
zip(mapper.primary_key, ident) if value is None
|
||||
])
|
||||
_lcl_get_clause = sql_util.adapt_criterion_to_null(
|
||||
_lcl_get_clause, nones)
|
||||
|
||||
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
|
||||
q._criterion = _lcl_get_clause
|
||||
return q
|
||||
|
||||
# cache the query against a key that includes
|
||||
# which positions in the primary key are NULL
|
||||
# (remember, we can map to an OUTER JOIN)
|
||||
bq = self.bq
|
||||
|
||||
# add the clause we got from mapper._get_clause to the cache
|
||||
# key so that if a race causes multiple calls to _get_clause,
|
||||
# we've cached on ours
|
||||
bq = bq._clone()
|
||||
bq._cache_key += (_get_clause, )
|
||||
|
||||
bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
|
||||
|
||||
params = dict([
|
||||
(_get_params[primary_key].key, id_val)
|
||||
for id_val, primary_key in zip(ident, mapper.primary_key)
|
||||
])
|
||||
|
||||
result = list(bq.for_session(self.session).params(**params))
|
||||
l = len(result)
|
||||
if l > 1:
|
||||
raise orm_exc.MultipleResultsFound()
|
||||
elif l:
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def bake_lazy_loaders():
|
||||
"""Enable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation should be safe for all lazy loaders, and will reduce
|
||||
Python overhead for these operations.
|
||||
|
||||
"""
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
|
||||
strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:]
|
||||
|
||||
|
||||
def unbake_lazy_loaders():
|
||||
"""Disable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
|
||||
|
||||
"""
|
||||
strategies.LazyLoader._strategy_keys[:] = []
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
assert strategies.LazyLoader._strategy_keys
|
||||
|
||||
|
||||
@sqla_log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="baked_select")
|
||||
class BakedLazyLoader(strategies.LazyLoader):
|
||||
|
||||
def _emit_lazyload(self, session, state, ident_key, passive):
|
||||
q = BakedQuery(
|
||||
self.mapper._compiled_cache,
|
||||
lambda session: session.query(self.mapper))
|
||||
q.add_criteria(
|
||||
lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
|
||||
self.parent_property)
|
||||
|
||||
if not self.parent_property.bake_queries:
|
||||
q.spoil(full=True)
|
||||
|
||||
if self.parent_property.secondary is not None:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.select_from(self.mapper, self.parent_property.secondary))
|
||||
|
||||
pending = not state.key
|
||||
|
||||
# don't autoflush on pending
|
||||
if pending or passive & attributes.NO_AUTOFLUSH:
|
||||
q.add_criteria(lambda q: q.autoflush(False))
|
||||
|
||||
if state.load_path:
|
||||
q.spoil()
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q._with_current_path(state.load_path[self.parent_property]))
|
||||
|
||||
if state.load_options:
|
||||
q.spoil()
|
||||
q.add_criteria(
|
||||
lambda q: q._conditional_options(*state.load_options))
|
||||
|
||||
if self.use_get:
|
||||
return q(session)._load_on_ident(
|
||||
session.query(self.mapper), ident_key)
|
||||
|
||||
if self.parent_property.order_by:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.order_by(*util.to_list(self.parent_property.order_by)))
|
||||
|
||||
for rev in self.parent_property._reverse_property:
|
||||
# reverse props that are MANYTOONE are loading *this*
|
||||
# object from get(), so don't need to eager out to those.
|
||||
if rev.direction is interfaces.MANYTOONE and \
|
||||
rev._use_get and \
|
||||
not isinstance(rev.strategy, strategies.LazyLoader):
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.options(
|
||||
strategy_options.Load(
|
||||
rev.parent).baked_lazyload(rev.key)))
|
||||
|
||||
lazy_clause, params = self._generate_lazy_clause(state, passive)
|
||||
|
||||
if pending:
|
||||
if orm_util._none_set.intersection(params.values()):
|
||||
return None
|
||||
|
||||
q.add_criteria(lambda q: q.filter(lazy_clause))
|
||||
result = q(session).params(**params).all()
|
||||
if self.uselist:
|
||||
return result
|
||||
else:
|
||||
l = len(result)
|
||||
if l:
|
||||
if l > 1:
|
||||
util.warn(
|
||||
"Multiple rows returned with "
|
||||
"uselist=False for lazily-loaded attribute '%s' "
|
||||
% self.parent_property)
|
||||
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@strategy_options.loader_option()
|
||||
def baked_lazyload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using "lazy"
|
||||
loading with a "baked" query used in the load.
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_fn
|
||||
def baked_lazyload(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_all_fn
|
||||
def baked_lazyload_all(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
|
||||
|
||||
baked_lazyload = baked_lazyload._unbound_fn
|
||||
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
|
||||
|
||||
bakery = BakedQuery.bakery
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/compiler.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -121,9 +121,19 @@ below where we generate a CHECK constraint that embeds a SQL expression::
|
|||
def compile_my_constraint(constraint, ddlcompiler, **kw):
|
||||
return "CONSTRAINT %s CHECK (%s)" % (
|
||||
constraint.name,
|
||||
ddlcompiler.sql_compiler.process(constraint.expression)
|
||||
ddlcompiler.sql_compiler.process(
|
||||
constraint.expression, literal_binds=True)
|
||||
)
|
||||
|
||||
Above, we add an additional flag to the process step as called by
|
||||
:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This
|
||||
indicates that any SQL expression which refers to a :class:`.BindParameter`
|
||||
object or other "literal" object such as those which refer to strings or
|
||||
integers should be rendered **in-place**, rather than being referred to as
|
||||
a bound parameter; when emitting DDL, bound parameters are typically not
|
||||
supported.
|
||||
|
||||
|
||||
.. _enabling_compiled_autocommit:
|
||||
|
||||
Enabling Autocommit on a Construct
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/api.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -7,13 +7,14 @@
|
|||
"""Public API functions and helpers for declarative."""
|
||||
|
||||
|
||||
from ...schema import Table, MetaData
|
||||
from ...orm import synonym as _orm_synonym, mapper,\
|
||||
from ...schema import Table, MetaData, Column
|
||||
from ...orm import synonym as _orm_synonym, \
|
||||
comparable_property,\
|
||||
interfaces, properties
|
||||
interfaces, properties, attributes
|
||||
from ...orm.util import polymorphic_union
|
||||
from ...orm.base import _mapper_or_none
|
||||
from ...util import OrderedDict
|
||||
from ...util import OrderedDict, hybridmethod, hybridproperty
|
||||
from ... import util
|
||||
from ... import exc
|
||||
import weakref
|
||||
|
||||
|
|
@ -21,7 +22,6 @@ from .base import _as_declarative, \
|
|||
_declarative_constructor,\
|
||||
_DeferredMapperConfig, _add_attribute
|
||||
from .clsregistry import _class_resolver
|
||||
from . import clsregistry
|
||||
|
||||
|
||||
def instrument_declarative(cls, registry, metadata):
|
||||
|
|
@ -157,12 +157,90 @@ class declared_attr(interfaces._MappedAttribute, property):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, fget, *arg, **kw):
|
||||
super(declared_attr, self).__init__(fget, *arg, **kw)
|
||||
def __init__(self, fget, cascading=False):
|
||||
super(declared_attr, self).__init__(fget)
|
||||
self.__doc__ = fget.__doc__
|
||||
self._cascading = cascading
|
||||
|
||||
def __get__(desc, self, cls):
|
||||
return desc.fget(cls)
|
||||
reg = cls.__dict__.get('_sa_declared_attr_reg', None)
|
||||
if reg is None:
|
||||
manager = attributes.manager_of_class(cls)
|
||||
if manager is None:
|
||||
util.warn(
|
||||
"Unmanaged access of declarative attribute %s from "
|
||||
"non-mapped class %s" %
|
||||
(desc.fget.__name__, cls.__name__))
|
||||
return desc.fget(cls)
|
||||
|
||||
if reg is None:
|
||||
return desc.fget(cls)
|
||||
elif desc in reg:
|
||||
return reg[desc]
|
||||
else:
|
||||
reg[desc] = obj = desc.fget(cls)
|
||||
return obj
|
||||
|
||||
@hybridmethod
|
||||
def _stateful(cls, **kw):
|
||||
return _stateful_declared_attr(**kw)
|
||||
|
||||
@hybridproperty
|
||||
def cascading(cls):
|
||||
"""Mark a :class:`.declared_attr` as cascading.
|
||||
|
||||
This is a special-use modifier which indicates that a column
|
||||
or MapperProperty-based declared attribute should be configured
|
||||
distinctly per mapped subclass, within a mapped-inheritance scenario.
|
||||
|
||||
Below, both MyClass as well as MySubClass will have a distinct
|
||||
``id`` Column object established::
|
||||
|
||||
class HasSomeAttribute(object):
|
||||
@declared_attr.cascading
|
||||
def some_id(cls):
|
||||
if has_inherited_table(cls):
|
||||
return Column(
|
||||
ForeignKey('myclass.id'), primary_key=True)
|
||||
else:
|
||||
return Column(Integer, primary_key=True)
|
||||
|
||||
return Column('id', Integer, primary_key=True)
|
||||
|
||||
class MyClass(HasSomeAttribute, Base):
|
||||
""
|
||||
# ...
|
||||
|
||||
class MySubClass(MyClass):
|
||||
""
|
||||
# ...
|
||||
|
||||
The behavior of the above configuration is that ``MySubClass``
|
||||
will refer to both its own ``id`` column as well as that of
|
||||
``MyClass`` underneath the attribute named ``some_id``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`declarative_inheritance`
|
||||
|
||||
:ref:`mixin_inheritance_columns`
|
||||
|
||||
|
||||
"""
|
||||
return cls._stateful(cascading=True)
|
||||
|
||||
|
||||
class _stateful_declared_attr(declared_attr):
|
||||
def __init__(self, **kw):
|
||||
self.kw = kw
|
||||
|
||||
def _stateful(self, **kw):
|
||||
new_kw = self.kw.copy()
|
||||
new_kw.update(kw)
|
||||
return _stateful_declared_attr(**new_kw)
|
||||
|
||||
def __call__(self, fn):
|
||||
return declared_attr(fn, **self.kw)
|
||||
|
||||
|
||||
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
|
||||
|
|
@ -319,6 +397,15 @@ class ConcreteBase(object):
|
|||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.AbstractConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
|
|
@ -349,9 +436,11 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
|
||||
:class:`.AbstractConcreteBase` does not produce a mapped
|
||||
table for the class itself. Compare to :class:`.ConcreteBase`,
|
||||
which does.
|
||||
:class:`.AbstractConcreteBase` does produce a mapped class
|
||||
for the base class, however it is not persisted to any table; it
|
||||
is instead mapped directly to the "polymorphic" selectable directly
|
||||
and is only used for selecting. Compare to :class:`.ConcreteBase`,
|
||||
which does create a persisted table for the base class.
|
||||
|
||||
Example::
|
||||
|
||||
|
|
@ -365,20 +454,79 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
employee_id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
The abstract base class is handled by declarative in a special way;
|
||||
at class configuration time, it behaves like a declarative mixin
|
||||
or an ``__abstract__`` base class. Once classes are configured
|
||||
and mappings are produced, it then gets mapped itself, but
|
||||
after all of its decscendants. This is a very unique system of mapping
|
||||
not found in any other SQLAlchemy system.
|
||||
|
||||
Using this approach, we can specify columns and properties
|
||||
that will take place on mapped subclasses, in the way that
|
||||
we normally do as in :ref:`declarative_mixins`::
|
||||
|
||||
class Company(Base):
|
||||
__tablename__ = 'company'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class Employee(AbstractConcreteBase, Base):
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
|
||||
@declared_attr
|
||||
def company_id(cls):
|
||||
return Column(ForeignKey('company.id'))
|
||||
|
||||
@declared_attr
|
||||
def company(cls):
|
||||
return relationship("Company")
|
||||
|
||||
class Manager(Employee):
|
||||
__tablename__ = 'manager'
|
||||
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
When we make use of our mappings however, both ``Manager`` and
|
||||
``Employee`` will have an independently usable ``.company`` attribute::
|
||||
|
||||
session.query(Employee).filter(Employee.company.has(id=5))
|
||||
|
||||
.. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
|
||||
have been reworked to support relationships established directly
|
||||
on the abstract base, without any special configurational steps.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.ConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
__no_table__ = True
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
if hasattr(cls, '__mapper__'):
|
||||
cls._sa_decl_prepare_nocascade()
|
||||
|
||||
@classmethod
|
||||
def _sa_decl_prepare_nocascade(cls):
|
||||
if getattr(cls, '__mapper__', None):
|
||||
return
|
||||
|
||||
clsregistry.add_class(cls.__name__, cls)
|
||||
to_map = _DeferredMapperConfig.config_for_cls(cls)
|
||||
|
||||
# can't rely on 'self_and_descendants' here
|
||||
# since technically an immediate subclass
|
||||
# might not be mapped, but a subclass
|
||||
|
|
@ -392,11 +540,33 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
if mn is not None:
|
||||
mappers.append(mn)
|
||||
pjoin = cls._create_polymorphic_union(mappers)
|
||||
cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type)
|
||||
|
||||
# For columns that were declared on the class, these
|
||||
# are normally ignored with the "__no_table__" mapping,
|
||||
# unless they have a different attribute key vs. col name
|
||||
# and are in the properties argument.
|
||||
# In that case, ensure we update the properties entry
|
||||
# to the correct column from the pjoin target table.
|
||||
declared_cols = set(to_map.declared_columns)
|
||||
for k, v in list(to_map.properties.items()):
|
||||
if v in declared_cols:
|
||||
to_map.properties[k] = pjoin.c[v.key]
|
||||
|
||||
to_map.local_table = pjoin
|
||||
|
||||
m_args = to_map.mapper_args_fn or dict
|
||||
|
||||
def mapper_args():
|
||||
args = m_args()
|
||||
args['polymorphic_on'] = pjoin.c.type
|
||||
return args
|
||||
to_map.mapper_args_fn = mapper_args
|
||||
|
||||
m = to_map.map()
|
||||
|
||||
for scls in cls.__subclasses__():
|
||||
sm = _mapper_or_none(scls)
|
||||
if sm.concrete and cls in scls.__bases__:
|
||||
if sm and sm.concrete and cls in scls.__bases__:
|
||||
sm._set_concrete_base(m)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -19,6 +19,9 @@ from ... import event
|
|||
from . import clsregistry
|
||||
import collections
|
||||
import weakref
|
||||
from sqlalchemy.orm import instrumentation
|
||||
|
||||
declared_attr = declarative_props = None
|
||||
|
||||
|
||||
def _declared_mapping_info(cls):
|
||||
|
|
@ -32,322 +35,431 @@ def _declared_mapping_info(cls):
|
|||
return None
|
||||
|
||||
|
||||
def _as_declarative(cls, classname, dict_):
|
||||
from .api import declared_attr
|
||||
def _resolve_for_abstract(cls):
|
||||
if cls is object:
|
||||
return None
|
||||
|
||||
# dict_ will be a dictproxy, which we can't write to, and we need to!
|
||||
dict_ = dict(dict_)
|
||||
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
|
||||
for sup in cls.__bases__:
|
||||
sup = _resolve_for_abstract(sup)
|
||||
if sup is not None:
|
||||
return sup
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return cls
|
||||
|
||||
column_copies = {}
|
||||
potential_columns = {}
|
||||
|
||||
mapper_args_fn = None
|
||||
table_args = inherited_table_args = None
|
||||
tablename = None
|
||||
def _get_immediate_cls_attr(cls, attrname, strict=False):
|
||||
"""return an attribute of the class that is either present directly
|
||||
on the class, e.g. not on a superclass, or is from a superclass but
|
||||
this superclass is a mixin, that is, not a descendant of
|
||||
the declarative base.
|
||||
|
||||
declarative_props = (declared_attr, util.classproperty)
|
||||
This is used to detect attributes that indicate something about
|
||||
a mapped class independently from any mapped classes that it may
|
||||
inherit from.
|
||||
|
||||
"""
|
||||
if not issubclass(cls, object):
|
||||
return None
|
||||
|
||||
for base in cls.__mro__:
|
||||
_is_declarative_inherits = hasattr(base, '_decl_class_registry')
|
||||
|
||||
if '__declare_last__' in base.__dict__:
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def go():
|
||||
cls.__declare_last__()
|
||||
if '__declare_first__' in base.__dict__:
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def go():
|
||||
cls.__declare_first__()
|
||||
if '__abstract__' in base.__dict__ and base.__abstract__:
|
||||
if (base is cls or
|
||||
(base in cls.__bases__ and not _is_declarative_inherits)):
|
||||
return
|
||||
|
||||
class_mapped = _declared_mapping_info(base) is not None
|
||||
|
||||
for name, obj in vars(base).items():
|
||||
if name == '__mapper_args__':
|
||||
if not mapper_args_fn and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
# don't even invoke __mapper_args__ until
|
||||
# after we've determined everything about the
|
||||
# mapped table.
|
||||
# make a copy of it so a class-level dictionary
|
||||
# is not overwritten when we update column-based
|
||||
# arguments.
|
||||
mapper_args_fn = lambda: dict(cls.__mapper_args__)
|
||||
elif name == '__tablename__':
|
||||
if not tablename and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
tablename = cls.__tablename__
|
||||
elif name == '__table_args__':
|
||||
if not table_args and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
table_args = cls.__table_args__
|
||||
if not isinstance(table_args, (tuple, dict, type(None))):
|
||||
raise exc.ArgumentError(
|
||||
"__table_args__ value must be a tuple, "
|
||||
"dict, or None")
|
||||
if base is not cls:
|
||||
inherited_table_args = True
|
||||
elif class_mapped:
|
||||
if isinstance(obj, declarative_props):
|
||||
util.warn("Regular (i.e. not __special__) "
|
||||
"attribute '%s.%s' uses @declared_attr, "
|
||||
"but owning class %s is mapped - "
|
||||
"not applying to subclass %s."
|
||||
% (base.__name__, name, base, cls))
|
||||
continue
|
||||
elif base is not cls:
|
||||
# we're a mixin.
|
||||
if isinstance(obj, Column):
|
||||
if getattr(cls, name) is not obj:
|
||||
# if column has been overridden
|
||||
# (like by the InstrumentedAttribute of the
|
||||
# superclass), skip
|
||||
continue
|
||||
if obj.foreign_keys:
|
||||
raise exc.InvalidRequestError(
|
||||
"Columns with foreign keys to other columns "
|
||||
"must be declared as @declared_attr callables "
|
||||
"on declarative mixin classes. ")
|
||||
if name not in dict_ and not (
|
||||
'__table__' in dict_ and
|
||||
(obj.name or name) in dict_['__table__'].c
|
||||
) and name not in potential_columns:
|
||||
potential_columns[name] = \
|
||||
column_copies[obj] = \
|
||||
obj.copy()
|
||||
column_copies[obj]._creation_order = \
|
||||
obj._creation_order
|
||||
elif isinstance(obj, MapperProperty):
|
||||
raise exc.InvalidRequestError(
|
||||
"Mapper properties (i.e. deferred,"
|
||||
"column_property(), relationship(), etc.) must "
|
||||
"be declared as @declared_attr callables "
|
||||
"on declarative mixin classes.")
|
||||
elif isinstance(obj, declarative_props):
|
||||
dict_[name] = ret = \
|
||||
column_copies[obj] = getattr(cls, name)
|
||||
if isinstance(ret, (Column, MapperProperty)) and \
|
||||
ret.doc is None:
|
||||
ret.doc = obj.__doc__
|
||||
|
||||
# apply inherited columns as we should
|
||||
for k, v in potential_columns.items():
|
||||
dict_[k] = v
|
||||
|
||||
if inherited_table_args and not tablename:
|
||||
table_args = None
|
||||
|
||||
clsregistry.add_class(classname, cls)
|
||||
our_stuff = util.OrderedDict()
|
||||
|
||||
for k in list(dict_):
|
||||
|
||||
# TODO: improve this ? all dunders ?
|
||||
if k in ('__table__', '__tablename__', '__mapper_args__'):
|
||||
continue
|
||||
|
||||
value = dict_[k]
|
||||
if isinstance(value, declarative_props):
|
||||
value = getattr(cls, k)
|
||||
|
||||
elif isinstance(value, QueryableAttribute) and \
|
||||
value.class_ is not cls and \
|
||||
value.key != k:
|
||||
# detect a QueryableAttribute that's already mapped being
|
||||
# assigned elsewhere in userland, turn into a synonym()
|
||||
value = synonym(value.key)
|
||||
setattr(cls, k, value)
|
||||
|
||||
if (isinstance(value, tuple) and len(value) == 1 and
|
||||
isinstance(value[0], (Column, MapperProperty))):
|
||||
util.warn("Ignoring declarative-like tuple value of attribute "
|
||||
"%s: possibly a copy-and-paste error with a comma "
|
||||
"left at the end of the line?" % k)
|
||||
continue
|
||||
if not isinstance(value, (Column, MapperProperty)):
|
||||
if not k.startswith('__'):
|
||||
dict_.pop(k)
|
||||
setattr(cls, k, value)
|
||||
continue
|
||||
if k == 'metadata':
|
||||
raise exc.InvalidRequestError(
|
||||
"Attribute name 'metadata' is reserved "
|
||||
"for the MetaData instance when using a "
|
||||
"declarative base class."
|
||||
)
|
||||
prop = clsregistry._deferred_relationship(cls, value)
|
||||
our_stuff[k] = prop
|
||||
|
||||
# set up attributes in the order they were created
|
||||
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
|
||||
|
||||
# extract columns from the class dict
|
||||
declared_columns = set()
|
||||
name_to_prop_key = collections.defaultdict(set)
|
||||
for key, c in list(our_stuff.items()):
|
||||
if isinstance(c, (ColumnProperty, CompositeProperty)):
|
||||
for col in c.columns:
|
||||
if isinstance(col, Column) and \
|
||||
col.table is None:
|
||||
_undefer_column_name(key, col)
|
||||
if not isinstance(c, CompositeProperty):
|
||||
name_to_prop_key[col.name].add(key)
|
||||
declared_columns.add(col)
|
||||
elif isinstance(c, Column):
|
||||
_undefer_column_name(key, c)
|
||||
name_to_prop_key[c.name].add(key)
|
||||
declared_columns.add(c)
|
||||
# if the column is the same name as the key,
|
||||
# remove it from the explicit properties dict.
|
||||
# the normal rules for assigning column-based properties
|
||||
# will take over, including precedence of columns
|
||||
# in multi-column ColumnProperties.
|
||||
if key == c.key:
|
||||
del our_stuff[key]
|
||||
|
||||
for name, keys in name_to_prop_key.items():
|
||||
if len(keys) > 1:
|
||||
util.warn(
|
||||
"On class %r, Column object %r named directly multiple times, "
|
||||
"only one will be used: %s" %
|
||||
(classname, name, (", ".join(sorted(keys))))
|
||||
)
|
||||
|
||||
declared_columns = sorted(
|
||||
declared_columns, key=lambda c: c._creation_order)
|
||||
table = None
|
||||
|
||||
if hasattr(cls, '__table_cls__'):
|
||||
table_cls = util.unbound_method_to_callable(cls.__table_cls__)
|
||||
if attrname in base.__dict__ and (
|
||||
base is cls or
|
||||
((base in cls.__bases__ if strict else True)
|
||||
and not _is_declarative_inherits)
|
||||
):
|
||||
return getattr(base, attrname)
|
||||
else:
|
||||
table_cls = Table
|
||||
return None
|
||||
|
||||
if '__table__' not in dict_:
|
||||
if tablename is not None:
|
||||
|
||||
args, table_kw = (), {}
|
||||
if table_args:
|
||||
if isinstance(table_args, dict):
|
||||
table_kw = table_args
|
||||
elif isinstance(table_args, tuple):
|
||||
if isinstance(table_args[-1], dict):
|
||||
args, table_kw = table_args[0:-1], table_args[-1]
|
||||
else:
|
||||
args = table_args
|
||||
def _as_declarative(cls, classname, dict_):
|
||||
global declared_attr, declarative_props
|
||||
if declared_attr is None:
|
||||
from .api import declared_attr
|
||||
declarative_props = (declared_attr, util.classproperty)
|
||||
|
||||
autoload = dict_.get('__autoload__')
|
||||
if autoload:
|
||||
table_kw['autoload'] = True
|
||||
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
|
||||
return
|
||||
|
||||
cls.__table__ = table = table_cls(
|
||||
tablename, cls.metadata,
|
||||
*(tuple(declared_columns) + tuple(args)),
|
||||
**table_kw)
|
||||
else:
|
||||
table = cls.__table__
|
||||
if declared_columns:
|
||||
for c in declared_columns:
|
||||
if not table.c.contains_column(c):
|
||||
raise exc.ArgumentError(
|
||||
"Can't add additional column %r when "
|
||||
"specifying __table__" % c.key
|
||||
)
|
||||
|
||||
if hasattr(cls, '__mapper_cls__'):
|
||||
mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
|
||||
else:
|
||||
mapper_cls = mapper
|
||||
|
||||
for c in cls.__bases__:
|
||||
if _declared_mapping_info(c) is not None:
|
||||
inherits = c
|
||||
break
|
||||
else:
|
||||
inherits = None
|
||||
|
||||
if table is None and inherits is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Class %r does not have a __table__ or __tablename__ "
|
||||
"specified and does not inherit from an existing "
|
||||
"table-mapped class." % cls
|
||||
)
|
||||
elif inherits:
|
||||
inherited_mapper = _declared_mapping_info(inherits)
|
||||
inherited_table = inherited_mapper.local_table
|
||||
inherited_mapped_table = inherited_mapper.mapped_table
|
||||
|
||||
if table is None:
|
||||
# single table inheritance.
|
||||
# ensure no table args
|
||||
if table_args:
|
||||
raise exc.ArgumentError(
|
||||
"Can't place __table_args__ on an inherited class "
|
||||
"with no table."
|
||||
)
|
||||
# add any columns declared here to the inherited table.
|
||||
for c in declared_columns:
|
||||
if c.primary_key:
|
||||
raise exc.ArgumentError(
|
||||
"Can't place primary key columns on an inherited "
|
||||
"class with no table."
|
||||
)
|
||||
if c.name in inherited_table.c:
|
||||
if inherited_table.c[c.name] is c:
|
||||
continue
|
||||
raise exc.ArgumentError(
|
||||
"Column '%s' on class %s conflicts with "
|
||||
"existing column '%s'" %
|
||||
(c, cls, inherited_table.c[c.name])
|
||||
)
|
||||
inherited_table.append_column(c)
|
||||
if inherited_mapped_table is not None and \
|
||||
inherited_mapped_table is not inherited_table:
|
||||
inherited_mapped_table._refresh_for_new_column(c)
|
||||
|
||||
defer_map = hasattr(cls, '_sa_decl_prepare')
|
||||
if defer_map:
|
||||
cfg_cls = _DeferredMapperConfig
|
||||
else:
|
||||
cfg_cls = _MapperConfig
|
||||
mt = cfg_cls(mapper_cls,
|
||||
cls, table,
|
||||
inherits,
|
||||
declared_columns,
|
||||
column_copies,
|
||||
our_stuff,
|
||||
mapper_args_fn)
|
||||
if not defer_map:
|
||||
mt.map()
|
||||
_MapperConfig.setup_mapping(cls, classname, dict_)
|
||||
|
||||
|
||||
class _MapperConfig(object):
|
||||
|
||||
mapped_table = None
|
||||
@classmethod
|
||||
def setup_mapping(cls, cls_, classname, dict_):
|
||||
defer_map = _get_immediate_cls_attr(
|
||||
cls_, '_sa_decl_prepare_nocascade', strict=True) or \
|
||||
hasattr(cls_, '_sa_decl_prepare')
|
||||
|
||||
def __init__(self, mapper_cls,
|
||||
cls,
|
||||
table,
|
||||
inherits,
|
||||
declared_columns,
|
||||
column_copies,
|
||||
properties, mapper_args_fn):
|
||||
self.mapper_cls = mapper_cls
|
||||
self.cls = cls
|
||||
self.local_table = table
|
||||
self.inherits = inherits
|
||||
self.properties = properties
|
||||
if defer_map:
|
||||
cfg_cls = _DeferredMapperConfig
|
||||
else:
|
||||
cfg_cls = _MapperConfig
|
||||
cfg_cls(cls_, classname, dict_)
|
||||
|
||||
def __init__(self, cls_, classname, dict_):
|
||||
|
||||
self.cls = cls_
|
||||
|
||||
# dict_ will be a dictproxy, which we can't write to, and we need to!
|
||||
self.dict_ = dict(dict_)
|
||||
self.classname = classname
|
||||
self.mapped_table = None
|
||||
self.properties = util.OrderedDict()
|
||||
self.declared_columns = set()
|
||||
self.column_copies = {}
|
||||
self._setup_declared_events()
|
||||
|
||||
# temporary registry. While early 1.0 versions
|
||||
# set up the ClassManager here, by API contract
|
||||
# we can't do that until there's a mapper.
|
||||
self.cls._sa_declared_attr_reg = {}
|
||||
|
||||
self._scan_attributes()
|
||||
|
||||
clsregistry.add_class(self.classname, self.cls)
|
||||
|
||||
self._extract_mappable_attributes()
|
||||
|
||||
self._extract_declared_columns()
|
||||
|
||||
self._setup_table()
|
||||
|
||||
self._setup_inheritance()
|
||||
|
||||
self._early_mapping()
|
||||
|
||||
def _early_mapping(self):
|
||||
self.map()
|
||||
|
||||
def _setup_declared_events(self):
|
||||
if _get_immediate_cls_attr(self.cls, '__declare_last__'):
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def after_configured():
|
||||
self.cls.__declare_last__()
|
||||
|
||||
if _get_immediate_cls_attr(self.cls, '__declare_first__'):
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def before_configured():
|
||||
self.cls.__declare_first__()
|
||||
|
||||
def _scan_attributes(self):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
column_copies = self.column_copies
|
||||
mapper_args_fn = None
|
||||
table_args = inherited_table_args = None
|
||||
tablename = None
|
||||
|
||||
for base in cls.__mro__:
|
||||
class_mapped = base is not cls and \
|
||||
_declared_mapping_info(base) is not None and \
|
||||
not _get_immediate_cls_attr(
|
||||
base, '_sa_decl_prepare_nocascade', strict=True)
|
||||
|
||||
if not class_mapped and base is not cls:
|
||||
self._produce_column_copies(base)
|
||||
|
||||
for name, obj in vars(base).items():
|
||||
if name == '__mapper_args__':
|
||||
if not mapper_args_fn and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
# don't even invoke __mapper_args__ until
|
||||
# after we've determined everything about the
|
||||
# mapped table.
|
||||
# make a copy of it so a class-level dictionary
|
||||
# is not overwritten when we update column-based
|
||||
# arguments.
|
||||
mapper_args_fn = lambda: dict(cls.__mapper_args__)
|
||||
elif name == '__tablename__':
|
||||
if not tablename and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
tablename = cls.__tablename__
|
||||
elif name == '__table_args__':
|
||||
if not table_args and (
|
||||
not class_mapped or
|
||||
isinstance(obj, declarative_props)
|
||||
):
|
||||
table_args = cls.__table_args__
|
||||
if not isinstance(
|
||||
table_args, (tuple, dict, type(None))):
|
||||
raise exc.ArgumentError(
|
||||
"__table_args__ value must be a tuple, "
|
||||
"dict, or None")
|
||||
if base is not cls:
|
||||
inherited_table_args = True
|
||||
elif class_mapped:
|
||||
if isinstance(obj, declarative_props):
|
||||
util.warn("Regular (i.e. not __special__) "
|
||||
"attribute '%s.%s' uses @declared_attr, "
|
||||
"but owning class %s is mapped - "
|
||||
"not applying to subclass %s."
|
||||
% (base.__name__, name, base, cls))
|
||||
continue
|
||||
elif base is not cls:
|
||||
# we're a mixin, abstract base, or something that is
|
||||
# acting like that for now.
|
||||
if isinstance(obj, Column):
|
||||
# already copied columns to the mapped class.
|
||||
continue
|
||||
elif isinstance(obj, MapperProperty):
|
||||
raise exc.InvalidRequestError(
|
||||
"Mapper properties (i.e. deferred,"
|
||||
"column_property(), relationship(), etc.) must "
|
||||
"be declared as @declared_attr callables "
|
||||
"on declarative mixin classes.")
|
||||
elif isinstance(obj, declarative_props):
|
||||
oldclassprop = isinstance(obj, util.classproperty)
|
||||
if not oldclassprop and obj._cascading:
|
||||
dict_[name] = column_copies[obj] = \
|
||||
ret = obj.__get__(obj, cls)
|
||||
setattr(cls, name, ret)
|
||||
else:
|
||||
if oldclassprop:
|
||||
util.warn_deprecated(
|
||||
"Use of sqlalchemy.util.classproperty on "
|
||||
"declarative classes is deprecated.")
|
||||
dict_[name] = column_copies[obj] = \
|
||||
ret = getattr(cls, name)
|
||||
if isinstance(ret, (Column, MapperProperty)) and \
|
||||
ret.doc is None:
|
||||
ret.doc = obj.__doc__
|
||||
|
||||
if inherited_table_args and not tablename:
|
||||
table_args = None
|
||||
|
||||
self.table_args = table_args
|
||||
self.tablename = tablename
|
||||
self.mapper_args_fn = mapper_args_fn
|
||||
self.declared_columns = declared_columns
|
||||
self.column_copies = column_copies
|
||||
|
||||
def _produce_column_copies(self, base):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
column_copies = self.column_copies
|
||||
# copy mixin columns to the mapped class
|
||||
for name, obj in vars(base).items():
|
||||
if isinstance(obj, Column):
|
||||
if getattr(cls, name) is not obj:
|
||||
# if column has been overridden
|
||||
# (like by the InstrumentedAttribute of the
|
||||
# superclass), skip
|
||||
continue
|
||||
elif obj.foreign_keys:
|
||||
raise exc.InvalidRequestError(
|
||||
"Columns with foreign keys to other columns "
|
||||
"must be declared as @declared_attr callables "
|
||||
"on declarative mixin classes. ")
|
||||
elif name not in dict_ and not (
|
||||
'__table__' in dict_ and
|
||||
(obj.name or name) in dict_['__table__'].c
|
||||
):
|
||||
column_copies[obj] = copy_ = obj.copy()
|
||||
copy_._creation_order = obj._creation_order
|
||||
setattr(cls, name, copy_)
|
||||
dict_[name] = copy_
|
||||
|
||||
def _extract_mappable_attributes(self):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
|
||||
our_stuff = self.properties
|
||||
|
||||
for k in list(dict_):
|
||||
|
||||
if k in ('__table__', '__tablename__', '__mapper_args__'):
|
||||
continue
|
||||
|
||||
value = dict_[k]
|
||||
if isinstance(value, declarative_props):
|
||||
value = getattr(cls, k)
|
||||
|
||||
elif isinstance(value, QueryableAttribute) and \
|
||||
value.class_ is not cls and \
|
||||
value.key != k:
|
||||
# detect a QueryableAttribute that's already mapped being
|
||||
# assigned elsewhere in userland, turn into a synonym()
|
||||
value = synonym(value.key)
|
||||
setattr(cls, k, value)
|
||||
|
||||
if (isinstance(value, tuple) and len(value) == 1 and
|
||||
isinstance(value[0], (Column, MapperProperty))):
|
||||
util.warn("Ignoring declarative-like tuple value of attribute "
|
||||
"%s: possibly a copy-and-paste error with a comma "
|
||||
"left at the end of the line?" % k)
|
||||
continue
|
||||
elif not isinstance(value, (Column, MapperProperty)):
|
||||
# using @declared_attr for some object that
|
||||
# isn't Column/MapperProperty; remove from the dict_
|
||||
# and place the evaluated value onto the class.
|
||||
if not k.startswith('__'):
|
||||
dict_.pop(k)
|
||||
setattr(cls, k, value)
|
||||
continue
|
||||
# we expect to see the name 'metadata' in some valid cases;
|
||||
# however at this point we see it's assigned to something trying
|
||||
# to be mapped, so raise for that.
|
||||
elif k == 'metadata':
|
||||
raise exc.InvalidRequestError(
|
||||
"Attribute name 'metadata' is reserved "
|
||||
"for the MetaData instance when using a "
|
||||
"declarative base class."
|
||||
)
|
||||
prop = clsregistry._deferred_relationship(cls, value)
|
||||
our_stuff[k] = prop
|
||||
|
||||
def _extract_declared_columns(self):
|
||||
our_stuff = self.properties
|
||||
|
||||
# set up attributes in the order they were created
|
||||
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
|
||||
|
||||
# extract columns from the class dict
|
||||
declared_columns = self.declared_columns
|
||||
name_to_prop_key = collections.defaultdict(set)
|
||||
for key, c in list(our_stuff.items()):
|
||||
if isinstance(c, (ColumnProperty, CompositeProperty)):
|
||||
for col in c.columns:
|
||||
if isinstance(col, Column) and \
|
||||
col.table is None:
|
||||
_undefer_column_name(key, col)
|
||||
if not isinstance(c, CompositeProperty):
|
||||
name_to_prop_key[col.name].add(key)
|
||||
declared_columns.add(col)
|
||||
elif isinstance(c, Column):
|
||||
_undefer_column_name(key, c)
|
||||
name_to_prop_key[c.name].add(key)
|
||||
declared_columns.add(c)
|
||||
# if the column is the same name as the key,
|
||||
# remove it from the explicit properties dict.
|
||||
# the normal rules for assigning column-based properties
|
||||
# will take over, including precedence of columns
|
||||
# in multi-column ColumnProperties.
|
||||
if key == c.key:
|
||||
del our_stuff[key]
|
||||
|
||||
for name, keys in name_to_prop_key.items():
|
||||
if len(keys) > 1:
|
||||
util.warn(
|
||||
"On class %r, Column object %r named "
|
||||
"directly multiple times, "
|
||||
"only one will be used: %s" %
|
||||
(self.classname, name, (", ".join(sorted(keys))))
|
||||
)
|
||||
|
||||
def _setup_table(self):
|
||||
cls = self.cls
|
||||
tablename = self.tablename
|
||||
table_args = self.table_args
|
||||
dict_ = self.dict_
|
||||
declared_columns = self.declared_columns
|
||||
|
||||
declared_columns = self.declared_columns = sorted(
|
||||
declared_columns, key=lambda c: c._creation_order)
|
||||
table = None
|
||||
|
||||
if hasattr(cls, '__table_cls__'):
|
||||
table_cls = util.unbound_method_to_callable(cls.__table_cls__)
|
||||
else:
|
||||
table_cls = Table
|
||||
|
||||
if '__table__' not in dict_:
|
||||
if tablename is not None:
|
||||
|
||||
args, table_kw = (), {}
|
||||
if table_args:
|
||||
if isinstance(table_args, dict):
|
||||
table_kw = table_args
|
||||
elif isinstance(table_args, tuple):
|
||||
if isinstance(table_args[-1], dict):
|
||||
args, table_kw = table_args[0:-1], table_args[-1]
|
||||
else:
|
||||
args = table_args
|
||||
|
||||
autoload = dict_.get('__autoload__')
|
||||
if autoload:
|
||||
table_kw['autoload'] = True
|
||||
|
||||
cls.__table__ = table = table_cls(
|
||||
tablename, cls.metadata,
|
||||
*(tuple(declared_columns) + tuple(args)),
|
||||
**table_kw)
|
||||
else:
|
||||
table = cls.__table__
|
||||
if declared_columns:
|
||||
for c in declared_columns:
|
||||
if not table.c.contains_column(c):
|
||||
raise exc.ArgumentError(
|
||||
"Can't add additional column %r when "
|
||||
"specifying __table__" % c.key
|
||||
)
|
||||
self.local_table = table
|
||||
|
||||
def _setup_inheritance(self):
|
||||
table = self.local_table
|
||||
cls = self.cls
|
||||
table_args = self.table_args
|
||||
declared_columns = self.declared_columns
|
||||
for c in cls.__bases__:
|
||||
c = _resolve_for_abstract(c)
|
||||
if c is None:
|
||||
continue
|
||||
if _declared_mapping_info(c) is not None and \
|
||||
not _get_immediate_cls_attr(
|
||||
c, '_sa_decl_prepare_nocascade', strict=True):
|
||||
self.inherits = c
|
||||
break
|
||||
else:
|
||||
self.inherits = None
|
||||
|
||||
if table is None and self.inherits is None and \
|
||||
not _get_immediate_cls_attr(cls, '__no_table__'):
|
||||
|
||||
raise exc.InvalidRequestError(
|
||||
"Class %r does not have a __table__ or __tablename__ "
|
||||
"specified and does not inherit from an existing "
|
||||
"table-mapped class." % cls
|
||||
)
|
||||
elif self.inherits:
|
||||
inherited_mapper = _declared_mapping_info(self.inherits)
|
||||
inherited_table = inherited_mapper.local_table
|
||||
inherited_mapped_table = inherited_mapper.mapped_table
|
||||
|
||||
if table is None:
|
||||
# single table inheritance.
|
||||
# ensure no table args
|
||||
if table_args:
|
||||
raise exc.ArgumentError(
|
||||
"Can't place __table_args__ on an inherited class "
|
||||
"with no table."
|
||||
)
|
||||
# add any columns declared here to the inherited table.
|
||||
for c in declared_columns:
|
||||
if c.primary_key:
|
||||
raise exc.ArgumentError(
|
||||
"Can't place primary key columns on an inherited "
|
||||
"class with no table."
|
||||
)
|
||||
if c.name in inherited_table.c:
|
||||
if inherited_table.c[c.name] is c:
|
||||
continue
|
||||
raise exc.ArgumentError(
|
||||
"Column '%s' on class %s conflicts with "
|
||||
"existing column '%s'" %
|
||||
(c, cls, inherited_table.c[c.name])
|
||||
)
|
||||
inherited_table.append_column(c)
|
||||
if inherited_mapped_table is not None and \
|
||||
inherited_mapped_table is not inherited_table:
|
||||
inherited_mapped_table._refresh_for_new_column(c)
|
||||
|
||||
def _prepare_mapper_arguments(self):
|
||||
properties = self.properties
|
||||
|
|
@ -401,20 +513,31 @@ class _MapperConfig(object):
|
|||
properties[k] = [col] + p.columns
|
||||
result_mapper_args = mapper_args.copy()
|
||||
result_mapper_args['properties'] = properties
|
||||
return result_mapper_args
|
||||
self.mapper_args = result_mapper_args
|
||||
|
||||
def map(self):
|
||||
mapper_args = self._prepare_mapper_arguments()
|
||||
self.cls.__mapper__ = self.mapper_cls(
|
||||
self._prepare_mapper_arguments()
|
||||
if hasattr(self.cls, '__mapper_cls__'):
|
||||
mapper_cls = util.unbound_method_to_callable(
|
||||
self.cls.__mapper_cls__)
|
||||
else:
|
||||
mapper_cls = mapper
|
||||
|
||||
self.cls.__mapper__ = mp_ = mapper_cls(
|
||||
self.cls,
|
||||
self.local_table,
|
||||
**mapper_args
|
||||
**self.mapper_args
|
||||
)
|
||||
del self.cls._sa_declared_attr_reg
|
||||
return mp_
|
||||
|
||||
|
||||
class _DeferredMapperConfig(_MapperConfig):
|
||||
_configs = util.OrderedDict()
|
||||
|
||||
def _early_mapping(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def cls(self):
|
||||
return self._cls()
|
||||
|
|
@ -466,7 +589,7 @@ class _DeferredMapperConfig(_MapperConfig):
|
|||
|
||||
def map(self):
|
||||
self._configs.pop(self._cls, None)
|
||||
super(_DeferredMapperConfig, self).map()
|
||||
return super(_DeferredMapperConfig, self).map()
|
||||
|
||||
|
||||
def _add_attribute(cls, key, value):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/clsregistry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -71,6 +71,8 @@ class _MultipleClassMarker(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'on_remove', 'contents', '__weakref__'
|
||||
|
||||
def __init__(self, classes, on_remove=None):
|
||||
self.on_remove = on_remove
|
||||
self.contents = set([
|
||||
|
|
@ -103,7 +105,12 @@ class _MultipleClassMarker(object):
|
|||
self.on_remove()
|
||||
|
||||
def add_item(self, item):
|
||||
modules = set([cls().__module__ for cls in self.contents])
|
||||
# protect against class registration race condition against
|
||||
# asynchronous garbage collection calling _remove_item,
|
||||
# [ticket:3208]
|
||||
modules = set([
|
||||
cls.__module__ for cls in
|
||||
[ref() for ref in self.contents] if cls is not None])
|
||||
if item.__module__ in modules:
|
||||
util.warn(
|
||||
"This declarative base already contains a class with the "
|
||||
|
|
@ -122,6 +129,8 @@ class _ModuleMarker(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__'
|
||||
|
||||
def __init__(self, name, parent):
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
|
|
@ -167,6 +176,8 @@ class _ModuleMarker(object):
|
|||
|
||||
|
||||
class _ModNS(object):
|
||||
__slots__ = '__parent',
|
||||
|
||||
def __init__(self, parent):
|
||||
self.__parent = parent
|
||||
|
||||
|
|
@ -188,6 +199,8 @@ class _ModNS(object):
|
|||
|
||||
|
||||
class _GetColumns(object):
|
||||
__slots__ = 'cls',
|
||||
|
||||
def __init__(self, cls):
|
||||
self.cls = cls
|
||||
|
||||
|
|
@ -216,6 +229,8 @@ inspection._inspects(_GetColumns)(
|
|||
|
||||
|
||||
class _GetTable(object):
|
||||
__slots__ = 'key', 'metadata'
|
||||
|
||||
def __init__(self, key, metadata):
|
||||
self.key = key
|
||||
self.metadata = metadata
|
||||
|
|
@ -306,7 +321,8 @@ def _deferred_relationship(cls, prop):
|
|||
key, kwargs = prop.backref
|
||||
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
|
||||
'foreign_keys', 'remote_side', 'order_by'):
|
||||
if attr in kwargs and isinstance(kwargs[attr], str):
|
||||
if attr in kwargs and isinstance(kwargs[attr],
|
||||
util.string_types):
|
||||
kwargs[attr] = resolve_arg(kwargs[attr])
|
||||
|
||||
return prop
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/horizontal_shard.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/hybrid.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -45,8 +45,8 @@ as the class itself::
|
|||
return self.end - self.start
|
||||
|
||||
@hybrid_method
|
||||
def contains(self,point):
|
||||
return (self.start <= point) & (point < self.end)
|
||||
def contains(self, point):
|
||||
return (self.start <= point) & (point <= self.end)
|
||||
|
||||
@hybrid_method
|
||||
def intersects(self, other):
|
||||
|
|
@ -145,7 +145,7 @@ usage of the absolute value function::
|
|||
return func.abs(cls.length) / 2
|
||||
|
||||
Above the Python function ``abs()`` is used for instance-level
|
||||
operations, the SQL function ``ABS()`` is used via the :attr:`.func`
|
||||
operations, the SQL function ``ABS()`` is used via the :data:`.func`
|
||||
object for class-level expressions::
|
||||
|
||||
>>> i1.radius
|
||||
|
|
@ -634,10 +634,10 @@ from .. import util
|
|||
from ..orm import attributes, interfaces
|
||||
|
||||
HYBRID_METHOD = util.symbol('HYBRID_METHOD')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.hybrid_method`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
.. seealso::
|
||||
|
|
@ -647,10 +647,10 @@ HYBRID_METHOD = util.symbol('HYBRID_METHOD')
|
|||
"""
|
||||
|
||||
HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.hybrid_method`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
.. seealso::
|
||||
|
|
@ -660,7 +660,7 @@ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
|
|||
"""
|
||||
|
||||
|
||||
class hybrid_method(interfaces._InspectionAttr):
|
||||
class hybrid_method(interfaces.InspectionAttrInfo):
|
||||
"""A decorator which allows definition of a Python object method with both
|
||||
instance-level and class-level behavior.
|
||||
|
||||
|
|
@ -703,7 +703,7 @@ class hybrid_method(interfaces._InspectionAttr):
|
|||
return self
|
||||
|
||||
|
||||
class hybrid_property(interfaces._InspectionAttr):
|
||||
class hybrid_property(interfaces.InspectionAttrInfo):
|
||||
"""A decorator which allows definition of a Python descriptor with both
|
||||
instance-level and class-level behavior.
|
||||
|
||||
|
|
|
|||
|
|
@ -166,7 +166,13 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
|
|||
def manager_of_class(self, cls):
|
||||
if cls is None:
|
||||
return None
|
||||
return self._manager_finders.get(cls, _default_manager_getter)(cls)
|
||||
try:
|
||||
finder = self._manager_finders.get(cls, _default_manager_getter)
|
||||
except TypeError:
|
||||
# due to weakref lookup on invalid object
|
||||
return None
|
||||
else:
|
||||
return finder(cls)
|
||||
|
||||
def state_of(self, instance):
|
||||
if instance is None:
|
||||
|
|
@ -392,6 +398,7 @@ def _reinstall_default_lookups():
|
|||
manager_of_class=_default_manager_getter
|
||||
)
|
||||
)
|
||||
_instrumentation_factory._extended = False
|
||||
|
||||
|
||||
def _install_lookups(lookups):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/mutable.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -402,6 +402,27 @@ class MutableBase(object):
|
|||
msg = "Attribute '%s' does not accept objects of type %s"
|
||||
raise ValueError(msg % (key, type(value)))
|
||||
|
||||
@classmethod
|
||||
def _get_listen_keys(cls, attribute):
|
||||
"""Given a descriptor attribute, return a ``set()`` of the attribute
|
||||
keys which indicate a change in the state of this attribute.
|
||||
|
||||
This is normally just ``set([attribute.key])``, but can be overridden
|
||||
to provide for additional keys. E.g. a :class:`.MutableComposite`
|
||||
augments this set with the attribute keys associated with the columns
|
||||
that comprise the composite value.
|
||||
|
||||
This collection is consulted in the case of intercepting the
|
||||
:meth:`.InstanceEvents.refresh` and
|
||||
:meth:`.InstanceEvents.refresh_flush` events, which pass along a list
|
||||
of attribute names that have been refreshed; the list is compared
|
||||
against this set to determine if action needs to be taken.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
return set([attribute.key])
|
||||
|
||||
@classmethod
|
||||
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
|
||||
"""Establish this type as a mutation listener for the given
|
||||
|
|
@ -415,6 +436,8 @@ class MutableBase(object):
|
|||
# rely on "propagate" here
|
||||
parent_cls = attribute.class_
|
||||
|
||||
listen_keys = cls._get_listen_keys(attribute)
|
||||
|
||||
def load(state, *args):
|
||||
"""Listen for objects loaded or refreshed.
|
||||
|
||||
|
|
@ -429,6 +452,10 @@ class MutableBase(object):
|
|||
state.dict[key] = val
|
||||
val._parents[state.obj()] = key
|
||||
|
||||
def load_attrs(state, ctx, attrs):
|
||||
if not attrs or listen_keys.intersection(attrs):
|
||||
load(state)
|
||||
|
||||
def set(target, value, oldvalue, initiator):
|
||||
"""Listen for set/replace events on the target
|
||||
data member.
|
||||
|
|
@ -463,7 +490,9 @@ class MutableBase(object):
|
|||
|
||||
event.listen(parent_cls, 'load', load,
|
||||
raw=True, propagate=True)
|
||||
event.listen(parent_cls, 'refresh', load,
|
||||
event.listen(parent_cls, 'refresh', load_attrs,
|
||||
raw=True, propagate=True)
|
||||
event.listen(parent_cls, 'refresh_flush', load_attrs,
|
||||
raw=True, propagate=True)
|
||||
event.listen(attribute, 'set', set,
|
||||
raw=True, retval=True, propagate=True)
|
||||
|
|
@ -574,6 +603,10 @@ class MutableComposite(MutableBase):
|
|||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _get_listen_keys(cls, attribute):
|
||||
return set([attribute.key]).union(attribute.property._attribute_keys)
|
||||
|
||||
def changed(self):
|
||||
"""Subclasses should call this method whenever change events occur."""
|
||||
|
||||
|
|
@ -602,6 +635,18 @@ _setup_composite_listener()
|
|||
class MutableDict(Mutable, dict):
|
||||
"""A dictionary type that implements :class:`.Mutable`.
|
||||
|
||||
The :class:`.MutableDict` object implements a dictionary that will
|
||||
emit change events to the underlying mapping when the contents of
|
||||
the dictionary are altered, including when values are added or removed.
|
||||
|
||||
Note that :class:`.MutableDict` does **not** apply mutable tracking to the
|
||||
*values themselves* inside the dictionary. Therefore it is not a sufficient
|
||||
solution for the use case of tracking deep changes to a *recursive*
|
||||
dictionary structure, such as a JSON structure. To support this use case,
|
||||
build a subclass of :class:`.MutableDict` that provides appropriate
|
||||
coersion to the values placed in the dictionary so that they too are
|
||||
"mutable", and emit events up to their parent structure.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
|
|
@ -621,16 +666,30 @@ class MutableDict(Mutable, dict):
|
|||
dict.__delitem__(self, key)
|
||||
self.changed()
|
||||
|
||||
def update(self, *a, **kw):
|
||||
dict.update(self, *a, **kw)
|
||||
self.changed()
|
||||
|
||||
def pop(self, *arg):
|
||||
result = dict.pop(self, *arg)
|
||||
self.changed()
|
||||
return result
|
||||
|
||||
def popitem(self):
|
||||
result = dict.popitem(self)
|
||||
self.changed()
|
||||
return result
|
||||
|
||||
def clear(self):
|
||||
dict.clear(self)
|
||||
self.changed()
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, key, value):
|
||||
"""Convert plain dictionary to MutableDict."""
|
||||
if not isinstance(value, MutableDict):
|
||||
"""Convert plain dictionary to instance of this class."""
|
||||
if not isinstance(value, cls):
|
||||
if isinstance(value, dict):
|
||||
return MutableDict(value)
|
||||
return cls(value)
|
||||
return Mutable.coerce(key, value)
|
||||
else:
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/orderinglist.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -119,7 +119,7 @@ start numbering at 1 or some other integer, provide ``count_from=1``.
|
|||
|
||||
|
||||
"""
|
||||
from ..orm.collections import collection
|
||||
from ..orm.collections import collection, collection_adapter
|
||||
from .. import util
|
||||
|
||||
__all__ = ['ordering_list']
|
||||
|
|
@ -319,7 +319,10 @@ class OrderingList(list):
|
|||
|
||||
def remove(self, entity):
|
||||
super(OrderingList, self).remove(entity)
|
||||
self._reorder()
|
||||
|
||||
adapter = collection_adapter(self)
|
||||
if adapter and adapter._referenced_by_owner:
|
||||
self._reorder()
|
||||
|
||||
def pop(self, index=-1):
|
||||
entity = super(OrderingList, self).pop(index)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# ext/serializer.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue