split platform
This commit is contained in:
commit
a2291f58b8
278 changed files with 114251 additions and 0 deletions
270
lib/python3.4/site-packages/sqlalchemy/orm/__init__.py
Normal file
270
lib/python3.4/site-packages/sqlalchemy/orm/__init__.py
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
# orm/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Functional constructs for ORM configuration.
|
||||
|
||||
See the SQLAlchemy object relational tutorial and mapper configuration
|
||||
documentation for an overview of how this module is used.
|
||||
|
||||
"""
|
||||
|
||||
from . import exc
|
||||
from .mapper import (
|
||||
Mapper,
|
||||
_mapper_registry,
|
||||
class_mapper,
|
||||
configure_mappers,
|
||||
reconstructor,
|
||||
validates
|
||||
)
|
||||
from .interfaces import (
|
||||
EXT_CONTINUE,
|
||||
EXT_STOP,
|
||||
PropComparator,
|
||||
)
|
||||
from .deprecated_interfaces import (
|
||||
MapperExtension,
|
||||
SessionExtension,
|
||||
AttributeExtension,
|
||||
)
|
||||
from .util import (
|
||||
aliased,
|
||||
join,
|
||||
object_mapper,
|
||||
outerjoin,
|
||||
polymorphic_union,
|
||||
was_deleted,
|
||||
with_parent,
|
||||
with_polymorphic,
|
||||
)
|
||||
from .properties import ColumnProperty
|
||||
from .relationships import RelationshipProperty
|
||||
from .descriptor_props import (
|
||||
ComparableProperty,
|
||||
CompositeProperty,
|
||||
SynonymProperty,
|
||||
)
|
||||
from .relationships import (
|
||||
foreign,
|
||||
remote,
|
||||
)
|
||||
from .session import (
|
||||
Session,
|
||||
object_session,
|
||||
sessionmaker,
|
||||
make_transient,
|
||||
make_transient_to_detached
|
||||
)
|
||||
from .scoping import (
|
||||
scoped_session
|
||||
)
|
||||
from . import mapper as mapperlib
|
||||
from .query import AliasOption, Query, Bundle
|
||||
from ..util.langhelpers import public_factory
|
||||
from .. import util as _sa_util
|
||||
from . import strategies as _strategies
|
||||
|
||||
|
||||
def create_session(bind=None, **kwargs):
|
||||
"""Create a new :class:`.Session`
|
||||
with no automation enabled by default.
|
||||
|
||||
This function is used primarily for testing. The usual
|
||||
route to :class:`.Session` creation is via its constructor
|
||||
or the :func:`.sessionmaker` function.
|
||||
|
||||
:param bind: optional, a single Connectable to use for all
|
||||
database access in the created
|
||||
:class:`~sqlalchemy.orm.session.Session`.
|
||||
|
||||
:param \*\*kwargs: optional, passed through to the
|
||||
:class:`.Session` constructor.
|
||||
|
||||
:returns: an :class:`~sqlalchemy.orm.session.Session` instance
|
||||
|
||||
The defaults of create_session() are the opposite of that of
|
||||
:func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are
|
||||
False, ``autocommit`` is True. In this sense the session acts
|
||||
more like the "classic" SQLAlchemy 0.3 session with these.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> from sqlalchemy.orm import create_session
|
||||
>>> session = create_session()
|
||||
|
||||
It is recommended to use :func:`sessionmaker` instead of
|
||||
create_session().
|
||||
|
||||
"""
|
||||
kwargs.setdefault('autoflush', False)
|
||||
kwargs.setdefault('autocommit', True)
|
||||
kwargs.setdefault('expire_on_commit', False)
|
||||
return Session(bind=bind, **kwargs)
|
||||
|
||||
relationship = public_factory(RelationshipProperty, ".orm.relationship")
|
||||
|
||||
|
||||
def relation(*arg, **kw):
|
||||
"""A synonym for :func:`relationship`."""
|
||||
|
||||
return relationship(*arg, **kw)
|
||||
|
||||
|
||||
def dynamic_loader(argument, **kw):
|
||||
"""Construct a dynamically-loading mapper property.
|
||||
|
||||
This is essentially the same as
|
||||
using the ``lazy='dynamic'`` argument with :func:`relationship`::
|
||||
|
||||
dynamic_loader(SomeClass)
|
||||
|
||||
# is the same as
|
||||
|
||||
relationship(SomeClass, lazy="dynamic")
|
||||
|
||||
See the section :ref:`dynamic_relationship` for more details
|
||||
on dynamic loading.
|
||||
|
||||
"""
|
||||
kw['lazy'] = 'dynamic'
|
||||
return relationship(argument, **kw)
|
||||
|
||||
|
||||
column_property = public_factory(ColumnProperty, ".orm.column_property")
|
||||
composite = public_factory(CompositeProperty, ".orm.composite")
|
||||
|
||||
|
||||
def backref(name, **kwargs):
|
||||
"""Create a back reference with explicit keyword arguments, which are the
|
||||
same arguments one can send to :func:`relationship`.
|
||||
|
||||
Used with the ``backref`` keyword argument to :func:`relationship` in
|
||||
place of a string argument, e.g.::
|
||||
|
||||
'items':relationship(
|
||||
SomeItem, backref=backref('parent', lazy='subquery'))
|
||||
|
||||
"""
|
||||
return (name, kwargs)
|
||||
|
||||
|
||||
def deferred(*columns, **kw):
|
||||
"""Indicate a column-based mapped attribute that by default will
|
||||
not load unless accessed.
|
||||
|
||||
:param \*columns: columns to be mapped. This is typically a single
|
||||
:class:`.Column` object, however a collection is supported in order
|
||||
to support multiple columns mapped under the same attribute.
|
||||
|
||||
:param \**kw: additional keyword arguments passed to
|
||||
:class:`.ColumnProperty`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deferred`
|
||||
|
||||
"""
|
||||
return ColumnProperty(deferred=True, *columns, **kw)
|
||||
|
||||
|
||||
mapper = public_factory(Mapper, ".orm.mapper")
|
||||
|
||||
synonym = public_factory(SynonymProperty, ".orm.synonym")
|
||||
|
||||
comparable_property = public_factory(ComparableProperty,
|
||||
".orm.comparable_property")
|
||||
|
||||
|
||||
@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
|
||||
"is renamed to :func:`.configure_mappers`")
|
||||
def compile_mappers():
|
||||
"""Initialize the inter-mapper relationships of all mappers that have
|
||||
been defined.
|
||||
|
||||
"""
|
||||
configure_mappers()
|
||||
|
||||
|
||||
def clear_mappers():
|
||||
"""Remove all mappers from all classes.
|
||||
|
||||
This function removes all instrumentation from classes and disposes
|
||||
of their associated mappers. Once called, the classes are unmapped
|
||||
and can be later re-mapped with new mappers.
|
||||
|
||||
:func:`.clear_mappers` is *not* for normal use, as there is literally no
|
||||
valid usage for it outside of very specific testing scenarios. Normally,
|
||||
mappers are permanent structural components of user-defined classes, and
|
||||
are never discarded independently of their class. If a mapped class
|
||||
itself is garbage collected, its mapper is automatically disposed of as
|
||||
well. As such, :func:`.clear_mappers` is only for usage in test suites
|
||||
that re-use the same classes with different mappings, which is itself an
|
||||
extremely rare use case - the only such use case is in fact SQLAlchemy's
|
||||
own test suite, and possibly the test suites of other ORM extension
|
||||
libraries which intend to test various combinations of mapper construction
|
||||
upon a fixed set of classes.
|
||||
|
||||
"""
|
||||
mapperlib._CONFIGURE_MUTEX.acquire()
|
||||
try:
|
||||
while _mapper_registry:
|
||||
try:
|
||||
# can't even reliably call list(weakdict) in jython
|
||||
mapper, b = _mapper_registry.popitem()
|
||||
mapper.dispose()
|
||||
except KeyError:
|
||||
pass
|
||||
finally:
|
||||
mapperlib._CONFIGURE_MUTEX.release()
|
||||
|
||||
from . import strategy_options
|
||||
|
||||
joinedload = strategy_options.joinedload._unbound_fn
|
||||
joinedload_all = strategy_options.joinedload._unbound_all_fn
|
||||
contains_eager = strategy_options.contains_eager._unbound_fn
|
||||
defer = strategy_options.defer._unbound_fn
|
||||
undefer = strategy_options.undefer._unbound_fn
|
||||
undefer_group = strategy_options.undefer_group._unbound_fn
|
||||
load_only = strategy_options.load_only._unbound_fn
|
||||
lazyload = strategy_options.lazyload._unbound_fn
|
||||
lazyload_all = strategy_options.lazyload_all._unbound_all_fn
|
||||
subqueryload = strategy_options.subqueryload._unbound_fn
|
||||
subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
|
||||
immediateload = strategy_options.immediateload._unbound_fn
|
||||
noload = strategy_options.noload._unbound_fn
|
||||
defaultload = strategy_options.defaultload._unbound_fn
|
||||
|
||||
from .strategy_options import Load
|
||||
|
||||
|
||||
def eagerload(*args, **kwargs):
|
||||
"""A synonym for :func:`joinedload()`."""
|
||||
return joinedload(*args, **kwargs)
|
||||
|
||||
|
||||
def eagerload_all(*args, **kwargs):
|
||||
"""A synonym for :func:`joinedload_all()`"""
|
||||
return joinedload_all(*args, **kwargs)
|
||||
|
||||
|
||||
contains_alias = public_factory(AliasOption, ".orm.contains_alias")
|
||||
|
||||
|
||||
def __go(lcls):
|
||||
global __all__
|
||||
from .. import util as sa_util
|
||||
from . import dynamic
|
||||
from . import events
|
||||
import inspect as _inspect
|
||||
|
||||
__all__ = sorted(name for name, obj in lcls.items()
|
||||
if not (name.startswith('_') or _inspect.ismodule(obj)))
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.orm")
|
||||
|
||||
__go(locals())
|
||||
1578
lib/python3.4/site-packages/sqlalchemy/orm/attributes.py
Normal file
1578
lib/python3.4/site-packages/sqlalchemy/orm/attributes.py
Normal file
File diff suppressed because it is too large
Load diff
492
lib/python3.4/site-packages/sqlalchemy/orm/base.py
Normal file
492
lib/python3.4/site-packages/sqlalchemy/orm/base.py
Normal file
|
|
@ -0,0 +1,492 @@
|
|||
# orm/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Constants and rudimental functions used throughout the ORM.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util, inspection, exc as sa_exc
|
||||
from ..sql import expression
|
||||
from . import exc
|
||||
import operator
|
||||
|
||||
PASSIVE_NO_RESULT = util.symbol(
|
||||
'PASSIVE_NO_RESULT',
|
||||
"""Symbol returned by a loader callable or other attribute/history
|
||||
retrieval operation when a value could not be determined, based
|
||||
on loader callable flags.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_WAS_SET = util.symbol(
|
||||
'ATTR_WAS_SET',
|
||||
"""Symbol returned by a loader callable to indicate the
|
||||
retrieved value, or values, were assigned to their attributes
|
||||
on the target object.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_EMPTY = util.symbol(
|
||||
'ATTR_EMPTY',
|
||||
"""Symbol used internally to indicate an attribute had no callable."""
|
||||
)
|
||||
|
||||
NO_VALUE = util.symbol(
|
||||
'NO_VALUE',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute,
|
||||
indicating no value was loaded for an attribute when it was modified,
|
||||
and flags indicated we were not to load it.
|
||||
"""
|
||||
)
|
||||
|
||||
NEVER_SET = util.symbol(
|
||||
'NEVER_SET',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute
|
||||
indicating that the attribute had not been assigned to previously.
|
||||
"""
|
||||
)
|
||||
|
||||
NO_CHANGE = util.symbol(
|
||||
"NO_CHANGE",
|
||||
"""No callables or SQL should be emitted on attribute access
|
||||
and no state should change
|
||||
""", canonical=0
|
||||
)
|
||||
|
||||
CALLABLES_OK = util.symbol(
|
||||
"CALLABLES_OK",
|
||||
"""Loader callables can be fired off if a value
|
||||
is not present.
|
||||
""", canonical=1
|
||||
)
|
||||
|
||||
SQL_OK = util.symbol(
|
||||
"SQL_OK",
|
||||
"""Loader callables can emit SQL at least on scalar value attributes.""",
|
||||
canonical=2
|
||||
)
|
||||
|
||||
RELATED_OBJECT_OK = util.symbol(
|
||||
"RELATED_OBJECT_OK",
|
||||
"""Callables can use SQL to load related objects as well
|
||||
as scalar value attributes.
|
||||
""", canonical=4
|
||||
)
|
||||
|
||||
INIT_OK = util.symbol(
|
||||
"INIT_OK",
|
||||
"""Attributes should be initialized with a blank
|
||||
value (None or an empty collection) upon get, if no other
|
||||
value can be obtained.
|
||||
""", canonical=8
|
||||
)
|
||||
|
||||
NON_PERSISTENT_OK = util.symbol(
|
||||
"NON_PERSISTENT_OK",
|
||||
"""Callables can be emitted if the parent is not persistent.""",
|
||||
canonical=16
|
||||
)
|
||||
|
||||
LOAD_AGAINST_COMMITTED = util.symbol(
|
||||
"LOAD_AGAINST_COMMITTED",
|
||||
"""Callables should use committed values as primary/foreign keys during a
|
||||
load.
|
||||
""", canonical=32
|
||||
)
|
||||
|
||||
NO_AUTOFLUSH = util.symbol(
|
||||
"NO_AUTOFLUSH",
|
||||
"""Loader callables should disable autoflush.""",
|
||||
canonical=64
|
||||
)
|
||||
|
||||
# pre-packaged sets of flags used as inputs
|
||||
PASSIVE_OFF = util.symbol(
|
||||
"PASSIVE_OFF",
|
||||
"Callables can be emitted in all cases.",
|
||||
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
|
||||
INIT_OK | CALLABLES_OK | SQL_OK)
|
||||
)
|
||||
PASSIVE_RETURN_NEVER_SET = util.symbol(
|
||||
"PASSIVE_RETURN_NEVER_SET",
|
||||
"""PASSIVE_OFF ^ INIT_OK""",
|
||||
canonical=PASSIVE_OFF ^ INIT_OK
|
||||
)
|
||||
PASSIVE_NO_INITIALIZE = util.symbol(
|
||||
"PASSIVE_NO_INITIALIZE",
|
||||
"PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
|
||||
canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH = util.symbol(
|
||||
"PASSIVE_NO_FETCH",
|
||||
"PASSIVE_OFF ^ SQL_OK",
|
||||
canonical=PASSIVE_OFF ^ SQL_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH_RELATED = util.symbol(
|
||||
"PASSIVE_NO_FETCH_RELATED",
|
||||
"PASSIVE_OFF ^ RELATED_OBJECT_OK",
|
||||
canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
|
||||
)
|
||||
PASSIVE_ONLY_PERSISTENT = util.symbol(
|
||||
"PASSIVE_ONLY_PERSISTENT",
|
||||
"PASSIVE_OFF ^ NON_PERSISTENT_OK",
|
||||
canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
|
||||
)
|
||||
|
||||
DEFAULT_MANAGER_ATTR = '_sa_class_manager'
|
||||
DEFAULT_STATE_ATTR = '_sa_instance_state'
|
||||
_INSTRUMENTOR = ('mapper', 'instrumentor')
|
||||
|
||||
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
|
||||
EXT_STOP = util.symbol('EXT_STOP')
|
||||
|
||||
ONETOMANY = util.symbol('ONETOMANY',
|
||||
"""Indicates the one-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOONE = util.symbol('MANYTOONE',
|
||||
"""Indicates the many-to-one direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOMANY = util.symbol('MANYTOMANY',
|
||||
"""Indicates the many-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
NOT_EXTENSION = util.symbol('NOT_EXTENSION',
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
""")
|
||||
|
||||
_none_set = frozenset([None])
|
||||
|
||||
|
||||
def _generative(*assertions):
|
||||
"""Mark a method as generative, e.g. method-chained."""
|
||||
|
||||
@util.decorator
|
||||
def generate(fn, *args, **kw):
|
||||
self = args[0]._clone()
|
||||
for assertion in assertions:
|
||||
assertion(self, fn.__name__)
|
||||
fn(self, *args[1:], **kw)
|
||||
return self
|
||||
return generate
|
||||
|
||||
|
||||
# these can be replaced by sqlalchemy.ext.instrumentation
|
||||
# if augmented class instrumentation is enabled.
|
||||
def manager_of_class(cls):
|
||||
return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
|
||||
|
||||
instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
|
||||
|
||||
instance_dict = operator.attrgetter('__dict__')
|
||||
|
||||
|
||||
def instance_str(instance):
|
||||
"""Return a string describing an instance."""
|
||||
|
||||
return state_str(instance_state(instance))
|
||||
|
||||
|
||||
def state_str(state):
|
||||
"""Return a string describing an instance via its InstanceState."""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
|
||||
|
||||
|
||||
def state_class_str(state):
|
||||
"""Return a string describing an instance's class via its
|
||||
InstanceState.
|
||||
"""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s>' % (state.class_.__name__, )
|
||||
|
||||
|
||||
def attribute_str(instance, attribute):
|
||||
return instance_str(instance) + "." + attribute
|
||||
|
||||
|
||||
def state_attribute_str(state, attribute):
|
||||
return state_str(state) + "." + attribute
|
||||
|
||||
|
||||
def object_mapper(instance):
|
||||
"""Given an object, return the primary Mapper associated with the object
|
||||
instance.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
This function is available via the inspection system as::
|
||||
|
||||
inspect(instance).mapper
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
return object_state(instance).mapper
|
||||
|
||||
|
||||
def object_state(instance):
|
||||
"""Given an object, return the :class:`.InstanceState`
|
||||
associated with the object.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(instance)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
state = _inspect_mapped_object(instance)
|
||||
if state is None:
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
else:
|
||||
return state
|
||||
|
||||
|
||||
@inspection._inspects(object)
|
||||
def _inspect_mapped_object(instance):
|
||||
try:
|
||||
return instance_state(instance)
|
||||
# TODO: whats the py-2/3 syntax to catch two
|
||||
# different kinds of exceptions at once ?
|
||||
except exc.UnmappedClassError:
|
||||
return None
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
|
||||
|
||||
def _class_to_mapper(class_or_mapper):
|
||||
insp = inspection.inspect(class_or_mapper, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
raise exc.UnmappedClassError(class_or_mapper)
|
||||
|
||||
|
||||
def _mapper_or_none(entity):
|
||||
"""Return the :class:`.Mapper` for the given class or None if the
|
||||
class is not mapped.
|
||||
"""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _is_mapped_class(entity):
|
||||
"""Return True if the given object is a mapped class,
|
||||
:class:`.Mapper`, or :class:`.AliasedClass`.
|
||||
"""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
hasattr(insp, "mapper") and \
|
||||
(
|
||||
insp.is_mapper
|
||||
or insp.is_aliased_class
|
||||
)
|
||||
|
||||
|
||||
def _attr_as_key(attr):
|
||||
if hasattr(attr, 'key'):
|
||||
return attr.key
|
||||
else:
|
||||
return expression._column_as_key(attr)
|
||||
|
||||
|
||||
def _orm_columns(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
if hasattr(insp, 'selectable'):
|
||||
return [c for c in insp.selectable.c]
|
||||
else:
|
||||
return [entity]
|
||||
|
||||
|
||||
def _is_aliased_class(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
getattr(insp, "is_aliased_class", False)
|
||||
|
||||
|
||||
def _entity_descriptor(entity, key):
|
||||
"""Return a class attribute given an entity and string name.
|
||||
|
||||
May return :class:`.InstrumentedAttribute` or user-defined
|
||||
attribute.
|
||||
|
||||
"""
|
||||
insp = inspection.inspect(entity)
|
||||
if insp.is_selectable:
|
||||
description = entity
|
||||
entity = insp.c
|
||||
elif insp.is_aliased_class:
|
||||
entity = insp.entity
|
||||
description = entity
|
||||
elif hasattr(insp, "mapper"):
|
||||
description = entity = insp.mapper.class_
|
||||
else:
|
||||
description = entity
|
||||
|
||||
try:
|
||||
return getattr(entity, key)
|
||||
except AttributeError:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Entity '%s' has no property '%s'" %
|
||||
(description, key)
|
||||
)
|
||||
|
||||
_state_mapper = util.dottedgetter('manager.mapper')
|
||||
|
||||
|
||||
@inspection._inspects(type)
|
||||
def _inspect_mapped_class(class_, configure=False):
|
||||
try:
|
||||
class_manager = manager_of_class(class_)
|
||||
if not class_manager.is_mapped:
|
||||
return None
|
||||
mapper = class_manager.mapper
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
else:
|
||||
if configure and mapper._new_mappers:
|
||||
mapper._configure_all()
|
||||
return mapper
|
||||
|
||||
|
||||
def class_mapper(class_, configure=True):
|
||||
"""Given a class, return the primary :class:`.Mapper` associated
|
||||
with the key.
|
||||
|
||||
Raises :exc:`.UnmappedClassError` if no mapping is configured
|
||||
on the given class, or :exc:`.ArgumentError` if a non-class
|
||||
object is passed.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(some_mapped_class)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
|
||||
|
||||
"""
|
||||
mapper = _inspect_mapped_class(class_, configure=configure)
|
||||
if mapper is None:
|
||||
if not isinstance(class_, type):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Class object expected, got '%r'." % (class_, ))
|
||||
raise exc.UnmappedClassError(class_)
|
||||
else:
|
||||
return mapper
|
||||
|
||||
|
||||
class _InspectionAttr(object):
|
||||
"""A base class applied to all ORM objects that can be returned
|
||||
by the :func:`.inspect` function.
|
||||
|
||||
The attributes defined here allow the usage of simple boolean
|
||||
checks to test basic facts about the object returned.
|
||||
|
||||
While the boolean checks here are basically the same as using
|
||||
the Python isinstance() function, the flags here can be used without
|
||||
the need to import all of these classes, and also such that
|
||||
the SQLAlchemy class system can change while leaving the flags
|
||||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of :class:`.Selectable`."""
|
||||
|
||||
is_aliased_class = False
|
||||
"""True if this object is an instance of :class:`.AliasedClass`."""
|
||||
|
||||
is_instance = False
|
||||
"""True if this object is an instance of :class:`.InstanceState`."""
|
||||
|
||||
is_mapper = False
|
||||
"""True if this object is an instance of :class:`.Mapper`."""
|
||||
|
||||
is_property = False
|
||||
"""True if this object is an instance of :class:`.MapperProperty`."""
|
||||
|
||||
is_attribute = False
|
||||
"""True if this object is a Python :term:`descriptor`.
|
||||
|
||||
This can refer to one of many types. Usually a
|
||||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`._InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
|
||||
is_clause_element = False
|
||||
"""True if this object is an instance of :class:`.ClauseElement`."""
|
||||
|
||||
extension_type = NOT_EXTENSION
|
||||
"""The extension type, if any.
|
||||
Defaults to :data:`.interfaces.NOT_EXTENSION`
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:data:`.HYBRID_METHOD`
|
||||
|
||||
:data:`.HYBRID_PROPERTY`
|
||||
|
||||
:data:`.ASSOCIATION_PROXY`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class _MappedAttribute(object):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
||||
1554
lib/python3.4/site-packages/sqlalchemy/orm/collections.py
Normal file
1554
lib/python3.4/site-packages/sqlalchemy/orm/collections.py
Normal file
File diff suppressed because it is too large
Load diff
1172
lib/python3.4/site-packages/sqlalchemy/orm/dependency.py
Normal file
1172
lib/python3.4/site-packages/sqlalchemy/orm/dependency.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,593 @@
|
|||
# orm/deprecated_interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import event, util
|
||||
from .interfaces import EXT_CONTINUE
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class MapperExtension(object):
|
||||
"""Base implementation for :class:`.Mapper` event hooks.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.MapperExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.MapperEvents`.
|
||||
|
||||
New extension classes subclass :class:`.MapperExtension` and are specified
|
||||
using the ``extension`` mapper() argument, which is a single
|
||||
:class:`.MapperExtension` or a list of such::
|
||||
|
||||
from sqlalchemy.orm.interfaces import MapperExtension
|
||||
|
||||
class MyExtension(MapperExtension):
|
||||
def before_insert(self, mapper, connection, instance):
|
||||
print "instance %s before insert !" % instance
|
||||
|
||||
m = mapper(User, users_table, extension=MyExtension())
|
||||
|
||||
A single mapper can maintain a chain of ``MapperExtension``
|
||||
objects. When a particular mapping event occurs, the
|
||||
corresponding method on each ``MapperExtension`` is invoked
|
||||
serially, and each method has the ability to halt the chain
|
||||
from proceeding further::
|
||||
|
||||
m = mapper(User, users_table, extension=[ext1, ext2, ext3])
|
||||
|
||||
Each ``MapperExtension`` method returns the symbol
|
||||
EXT_CONTINUE by default. This symbol generally means "move
|
||||
to the next ``MapperExtension`` for processing". For methods
|
||||
that return objects like translated rows or new object
|
||||
instances, EXT_CONTINUE means the result of the method
|
||||
should be ignored. In some cases it's required for a
|
||||
default mapper activity to be performed, such as adding a
|
||||
new instance to a result list.
|
||||
|
||||
The symbol EXT_STOP has significance within a chain
|
||||
of ``MapperExtension`` objects that the chain will be stopped
|
||||
when this symbol is returned. Like EXT_CONTINUE, it also
|
||||
has additional significance in some cases that a default
|
||||
mapper activity will not be performed.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_instrument_class(cls, self, listener):
|
||||
cls._adapt_listener_methods(self, listener, ('instrument_class',))
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
cls._adapt_listener_methods(
|
||||
self, listener,
|
||||
(
|
||||
'init_instance',
|
||||
'init_failed',
|
||||
'translate_row',
|
||||
'create_instance',
|
||||
'append_result',
|
||||
'populate_instance',
|
||||
'reconstruct_instance',
|
||||
'before_insert',
|
||||
'after_insert',
|
||||
'before_update',
|
||||
'after_update',
|
||||
'before_delete',
|
||||
'after_delete'
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener_methods(cls, self, listener, methods):
|
||||
|
||||
for meth in methods:
|
||||
me_meth = getattr(MapperExtension, meth)
|
||||
ls_meth = getattr(listener, meth)
|
||||
|
||||
if not util.methods_equivalent(me_meth, ls_meth):
|
||||
if meth == 'reconstruct_instance':
|
||||
def go(ls_meth):
|
||||
def reconstruct(instance, ctx):
|
||||
ls_meth(self, instance)
|
||||
return reconstruct
|
||||
event.listen(self.class_manager, 'load',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
elif meth == 'init_instance':
|
||||
def go(ls_meth):
|
||||
def init_instance(instance, args, kwargs):
|
||||
ls_meth(self, self.class_,
|
||||
self.class_manager.original_init,
|
||||
instance, args, kwargs)
|
||||
return init_instance
|
||||
event.listen(self.class_manager, 'init',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
elif meth == 'init_failed':
|
||||
def go(ls_meth):
|
||||
def init_failed(instance, args, kwargs):
|
||||
util.warn_exception(
|
||||
ls_meth, self, self.class_,
|
||||
self.class_manager.original_init,
|
||||
instance, args, kwargs)
|
||||
|
||||
return init_failed
|
||||
event.listen(self.class_manager, 'init_failure',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
else:
|
||||
event.listen(self, "%s" % meth, ls_meth,
|
||||
raw=False, retval=True, propagate=True)
|
||||
|
||||
def instrument_class(self, mapper, class_):
|
||||
"""Receive a class when the mapper is first constructed, and has
|
||||
applied instrumentation to the mapped class.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
|
||||
"""Receive an instance when its constructor is called.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
|
||||
"""Receive an instance when its constructor has been called,
|
||||
and raised an exception.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def translate_row(self, mapper, context, row):
|
||||
"""Perform pre-processing on the given result row and return a
|
||||
new row instance.
|
||||
|
||||
This is called when the mapper first receives a row, before
|
||||
the object identity or the instance itself has been derived
|
||||
from that row. The given row may or may not be a
|
||||
``RowProxy`` object - it will always be a dictionary-like
|
||||
object which contains mapped columns as keys. The
|
||||
returned object should also be a dictionary-like object
|
||||
which recognizes mapped columns as keys.
|
||||
|
||||
If the ultimate return value is EXT_CONTINUE, the row
|
||||
is not translated.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def create_instance(self, mapper, selectcontext, row, class_):
|
||||
"""Receive a row when a new object instance is about to be
|
||||
created from that row.
|
||||
|
||||
The method can choose to create the instance itself, or it can return
|
||||
EXT_CONTINUE to indicate normal object creation should take place.
|
||||
|
||||
mapper
|
||||
The mapper doing the operation
|
||||
|
||||
selectcontext
|
||||
The QueryContext generated from the Query.
|
||||
|
||||
row
|
||||
The result row from the database
|
||||
|
||||
class\_
|
||||
The class we are mapping.
|
||||
|
||||
return value
|
||||
A new object instance, or EXT_CONTINUE
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def append_result(self, mapper, selectcontext, row, instance,
|
||||
result, **flags):
|
||||
"""Receive an object instance before that instance is appended
|
||||
to a result list.
|
||||
|
||||
If this method returns EXT_CONTINUE, result appending will proceed
|
||||
normally. if this method returns any other value or None,
|
||||
result appending will not proceed for this instance, giving
|
||||
this extension an opportunity to do the appending itself, if
|
||||
desired.
|
||||
|
||||
mapper
|
||||
The mapper doing the operation.
|
||||
|
||||
selectcontext
|
||||
The QueryContext generated from the Query.
|
||||
|
||||
row
|
||||
The result row from the database.
|
||||
|
||||
instance
|
||||
The object instance to be appended to the result.
|
||||
|
||||
result
|
||||
List to which results are being appended.
|
||||
|
||||
\**flags
|
||||
extra information about the row, same as criterion in
|
||||
``create_row_processor()`` method of
|
||||
:class:`~sqlalchemy.orm.interfaces.MapperProperty`
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def populate_instance(self, mapper, selectcontext, row,
|
||||
instance, **flags):
|
||||
"""Receive an instance before that instance has
|
||||
its attributes populated.
|
||||
|
||||
This usually corresponds to a newly loaded instance but may
|
||||
also correspond to an already-loaded instance which has
|
||||
unloaded attributes to be populated. The method may be called
|
||||
many times for a single instance, as multiple result rows are
|
||||
used to populate eagerly loaded collections.
|
||||
|
||||
If this method returns EXT_CONTINUE, instance population will
|
||||
proceed normally. If any other value or None is returned,
|
||||
instance population will not proceed, giving this extension an
|
||||
opportunity to populate the instance itself, if desired.
|
||||
|
||||
.. deprecated:: 0.5
|
||||
Most usages of this hook are obsolete. For a
|
||||
generic "object has been newly created from a row" hook, use
|
||||
``reconstruct_instance()``, or the ``@orm.reconstructor``
|
||||
decorator.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def reconstruct_instance(self, mapper, instance):
|
||||
"""Receive an object instance after it has been created via
|
||||
``__new__``, and after initial attribute population has
|
||||
occurred.
|
||||
|
||||
This typically occurs when the instance is created based on
|
||||
incoming result rows, and is only called once for that
|
||||
instance's lifetime.
|
||||
|
||||
Note that during a result-row load, this method is called upon
|
||||
the first row received for this instance. Note that some
|
||||
attributes and collections may or may not be loaded or even
|
||||
initialized, depending on what's present in the result rows.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_insert(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is inserted
|
||||
into its table.
|
||||
|
||||
This is a good place to set up primary key values and such
|
||||
that aren't handled otherwise.
|
||||
|
||||
Column-based attributes can be modified within this method
|
||||
which will result in the new value being inserted. However
|
||||
*no* changes to the overall flush plan can be made, and
|
||||
manipulation of the ``Session`` will not have the desired effect.
|
||||
To manipulate the ``Session`` within an extension, use
|
||||
``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_insert(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is inserted.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_update(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is updated.
|
||||
|
||||
Note that this method is called for all instances that are marked as
|
||||
"dirty", even those which have no net changes to their column-based
|
||||
attributes. An object is marked as dirty when any of its column-based
|
||||
attributes have a "set attribute" operation called or when any of its
|
||||
collections are modified. If, at update time, no column-based
|
||||
attributes have any net changes, no UPDATE statement will be issued.
|
||||
This means that an instance being sent to before_update is *not* a
|
||||
guarantee that an UPDATE statement will be issued (although you can
|
||||
affect the outcome here).
|
||||
|
||||
To detect if the column-based attributes on the object have net
|
||||
changes, and will therefore generate an UPDATE statement, use
|
||||
``object_session(instance).is_modified(instance,
|
||||
include_collections=False)``.
|
||||
|
||||
Column-based attributes can be modified within this method
|
||||
which will result in the new value being updated. However
|
||||
*no* changes to the overall flush plan can be made, and
|
||||
manipulation of the ``Session`` will not have the desired effect.
|
||||
To manipulate the ``Session`` within an extension, use
|
||||
``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_update(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is updated.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_delete(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is deleted.
|
||||
|
||||
Note that *no* changes to the overall flush plan can be made
|
||||
here; and manipulation of the ``Session`` will not have the
|
||||
desired effect. To manipulate the ``Session`` within an
|
||||
extension, use ``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_delete(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is deleted.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class SessionExtension(object):
|
||||
|
||||
"""Base implementation for :class:`.Session` event hooks.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.SessionExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.SessionEvents`.
|
||||
|
||||
Subclasses may be installed into a :class:`.Session` (or
|
||||
:class:`.sessionmaker`) using the ``extension`` keyword
|
||||
argument::
|
||||
|
||||
from sqlalchemy.orm.interfaces import SessionExtension
|
||||
|
||||
class MySessionExtension(SessionExtension):
|
||||
def before_commit(self, session):
|
||||
print "before commit!"
|
||||
|
||||
Session = sessionmaker(extension=MySessionExtension())
|
||||
|
||||
The same :class:`.SessionExtension` instance can be used
|
||||
with any number of sessions.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
for meth in [
|
||||
'before_commit',
|
||||
'after_commit',
|
||||
'after_rollback',
|
||||
'before_flush',
|
||||
'after_flush',
|
||||
'after_flush_postexec',
|
||||
'after_begin',
|
||||
'after_attach',
|
||||
'after_bulk_update',
|
||||
'after_bulk_delete',
|
||||
]:
|
||||
me_meth = getattr(SessionExtension, meth)
|
||||
ls_meth = getattr(listener, meth)
|
||||
|
||||
if not util.methods_equivalent(me_meth, ls_meth):
|
||||
event.listen(self, meth, getattr(listener, meth))
|
||||
|
||||
def before_commit(self, session):
|
||||
"""Execute right before commit is called.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def after_commit(self, session):
|
||||
"""Execute after a commit has occurred.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def after_rollback(self, session):
|
||||
"""Execute after a rollback has occurred.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def before_flush(self, session, flush_context, instances):
|
||||
"""Execute before flush process has started.
|
||||
|
||||
`instances` is an optional list of objects which were passed to
|
||||
the ``flush()`` method. """
|
||||
|
||||
def after_flush(self, session, flush_context):
|
||||
"""Execute after flush has completed, but before commit has been
|
||||
called.
|
||||
|
||||
Note that the session's state is still in pre-flush, i.e. 'new',
|
||||
'dirty', and 'deleted' lists still show pre-flush state as well
|
||||
as the history settings on instance attributes."""
|
||||
|
||||
def after_flush_postexec(self, session, flush_context):
|
||||
"""Execute after flush has completed, and after the post-exec
|
||||
state occurs.
|
||||
|
||||
This will be when the 'new', 'dirty', and 'deleted' lists are in
|
||||
their final state. An actual commit() may or may not have
|
||||
occurred, depending on whether or not the flush started its own
|
||||
transaction or participated in a larger transaction. """
|
||||
|
||||
def after_begin(self, session, transaction, connection):
|
||||
"""Execute after a transaction is begun on a connection
|
||||
|
||||
`transaction` is the SessionTransaction. This method is called
|
||||
after an engine level transaction is begun on a connection. """
|
||||
|
||||
def after_attach(self, session, instance):
|
||||
"""Execute after an instance is attached to a session.
|
||||
|
||||
This is called after an add, delete or merge. """
|
||||
|
||||
def after_bulk_update(self, session, query, query_context, result):
|
||||
"""Execute after a bulk update operation to the session.
|
||||
|
||||
This is called after a session.query(...).update()
|
||||
|
||||
`query` is the query object that this update operation was
|
||||
called on. `query_context` was the query context object.
|
||||
`result` is the result object returned from the bulk operation.
|
||||
"""
|
||||
|
||||
def after_bulk_delete(self, session, query, query_context, result):
|
||||
"""Execute after a bulk delete operation to the session.
|
||||
|
||||
This is called after a session.query(...).delete()
|
||||
|
||||
`query` is the query object that this delete operation was
|
||||
called on. `query_context` was the query context object.
|
||||
`result` is the result object returned from the bulk operation.
|
||||
"""
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class AttributeExtension(object):
|
||||
"""Base implementation for :class:`.AttributeImpl` event hooks, events
|
||||
that fire upon attribute mutations in user code.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.AttributeExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.AttributeEvents`.
|
||||
|
||||
:class:`.AttributeExtension` is used to listen for set,
|
||||
remove, and append events on individual mapped attributes.
|
||||
It is established on an individual mapped attribute using
|
||||
the `extension` argument, available on
|
||||
:func:`.column_property`, :func:`.relationship`, and
|
||||
others::
|
||||
|
||||
from sqlalchemy.orm.interfaces import AttributeExtension
|
||||
from sqlalchemy.orm import mapper, relationship, column_property
|
||||
|
||||
class MyAttrExt(AttributeExtension):
|
||||
def append(self, state, value, initiator):
|
||||
print "append event !"
|
||||
return value
|
||||
|
||||
def set(self, state, value, oldvalue, initiator):
|
||||
print "set event !"
|
||||
return value
|
||||
|
||||
mapper(SomeClass, sometable, properties={
|
||||
'foo':column_property(sometable.c.foo, extension=MyAttrExt()),
|
||||
'bar':relationship(Bar, extension=MyAttrExt())
|
||||
})
|
||||
|
||||
Note that the :class:`.AttributeExtension` methods
|
||||
:meth:`~.AttributeExtension.append` and
|
||||
:meth:`~.AttributeExtension.set` need to return the
|
||||
``value`` parameter. The returned value is used as the
|
||||
effective value, and allows the extension to change what is
|
||||
ultimately persisted.
|
||||
|
||||
AttributeExtension is assembled within the descriptors associated
|
||||
with a mapped class.
|
||||
|
||||
"""
|
||||
|
||||
active_history = True
|
||||
"""indicates that the set() method would like to receive the 'old' value,
|
||||
even if it means firing lazy callables.
|
||||
|
||||
Note that ``active_history`` can also be set directly via
|
||||
:func:`.column_property` and :func:`.relationship`.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
event.listen(self, 'append', listener.append,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
event.listen(self, 'remove', listener.remove,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
event.listen(self, 'set', listener.set,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
|
||||
def append(self, state, value, initiator):
|
||||
"""Receive a collection append event.
|
||||
|
||||
The returned value will be used as the actual value to be
|
||||
appended.
|
||||
|
||||
"""
|
||||
return value
|
||||
|
||||
def remove(self, state, value, initiator):
|
||||
"""Receive a remove event.
|
||||
|
||||
No return value is defined.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def set(self, state, value, oldvalue, initiator):
|
||||
"""Receive a set event.
|
||||
|
||||
The returned value will be used as the actual value to be
|
||||
set.
|
||||
|
||||
"""
|
||||
return value
|
||||
680
lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
680
lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
|
|
@ -0,0 +1,680 @@
|
|||
# orm/descriptor_props.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Descriptor properties are more "auxiliary" properties
|
||||
that exist as configurational elements, but don't participate
|
||||
as actively in the load/persist ORM loop.
|
||||
|
||||
"""
|
||||
|
||||
from .interfaces import MapperProperty, PropComparator
|
||||
from .util import _none_set
|
||||
from . import attributes
|
||||
from .. import util, sql, exc as sa_exc, event, schema
|
||||
from ..sql import expression
|
||||
from . import properties
|
||||
from . import query
|
||||
|
||||
|
||||
class DescriptorProperty(MapperProperty):
|
||||
""":class:`.MapperProperty` which proxies access to a
|
||||
user-defined descriptor."""
|
||||
|
||||
doc = None
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
prop = self
|
||||
|
||||
class _ProxyImpl(object):
|
||||
accepts_scalar_loader = False
|
||||
expire_missing = True
|
||||
collection = False
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
if hasattr(prop, 'get_history'):
|
||||
def get_history(self, state, dict_,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
return prop.get_history(state, dict_, passive)
|
||||
|
||||
if self.descriptor is None:
|
||||
desc = getattr(mapper.class_, self.key, None)
|
||||
if mapper._is_userland_descriptor(desc):
|
||||
self.descriptor = desc
|
||||
|
||||
if self.descriptor is None:
|
||||
def fset(obj, value):
|
||||
setattr(obj, self.name, value)
|
||||
|
||||
def fdel(obj):
|
||||
delattr(obj, self.name)
|
||||
|
||||
def fget(obj):
|
||||
return getattr(obj, self.name)
|
||||
|
||||
self.descriptor = property(
|
||||
fget=fget,
|
||||
fset=fset,
|
||||
fdel=fdel,
|
||||
)
|
||||
|
||||
proxy_attr = attributes.create_proxied_attribute(
|
||||
self.descriptor)(
|
||||
self.parent.class_,
|
||||
self.key,
|
||||
self.descriptor,
|
||||
lambda: self._comparator_factory(mapper),
|
||||
doc=self.doc,
|
||||
original_property=self
|
||||
)
|
||||
proxy_attr.impl = _ProxyImpl(self.key)
|
||||
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class CompositeProperty(DescriptorProperty):
|
||||
"""Defines a "composite" mapped attribute, representing a collection
|
||||
of columns as one attribute.
|
||||
|
||||
:class:`.CompositeProperty` is constructed using the :func:`.composite`
|
||||
function.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mapper_composite`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, class_, *attrs, **kwargs):
|
||||
"""Return a composite column-based property for use with a Mapper.
|
||||
|
||||
See the mapping documentation section :ref:`mapper_composite` for a
|
||||
full usage example.
|
||||
|
||||
The :class:`.MapperProperty` returned by :func:`.composite`
|
||||
is the :class:`.CompositeProperty`.
|
||||
|
||||
:param class\_:
|
||||
The "composite type" class.
|
||||
|
||||
:param \*cols:
|
||||
List of Column objects to be mapped.
|
||||
|
||||
:param active_history=False:
|
||||
When ``True``, indicates that the "previous" value for a
|
||||
scalar attribute should be loaded when replaced, if not
|
||||
already loaded. See the same flag on :func:`.column_property`.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
This flag specifically becomes meaningful
|
||||
- previously it was a placeholder.
|
||||
|
||||
:param group:
|
||||
A group name for this property when marked as deferred.
|
||||
|
||||
:param deferred:
|
||||
When True, the column property is "deferred", meaning that it does
|
||||
not load immediately, and is instead loaded when the attribute is
|
||||
first accessed on an instance. See also
|
||||
:func:`~sqlalchemy.orm.deferred`.
|
||||
|
||||
:param comparator_factory: a class which extends
|
||||
:class:`.CompositeProperty.Comparator` which provides custom SQL
|
||||
clause generation for comparison operations.
|
||||
|
||||
:param doc:
|
||||
optional string that will be applied as the doc on the
|
||||
class-bound descriptor.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.MapperProperty.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
:param extension:
|
||||
an :class:`.AttributeExtension` instance,
|
||||
or list of extensions, which will be prepended to the list of
|
||||
attribute listeners for the resulting descriptor placed on the
|
||||
class. **Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
|
||||
self.attrs = attrs
|
||||
self.composite_class = class_
|
||||
self.active_history = kwargs.get('active_history', False)
|
||||
self.deferred = kwargs.get('deferred', False)
|
||||
self.group = kwargs.get('group', None)
|
||||
self.comparator_factory = kwargs.pop('comparator_factory',
|
||||
self.__class__.Comparator)
|
||||
if 'info' in kwargs:
|
||||
self.info = kwargs.pop('info')
|
||||
|
||||
util.set_creation_order(self)
|
||||
self._create_descriptor()
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
super(CompositeProperty, self).instrument_class(mapper)
|
||||
self._setup_event_handlers()
|
||||
|
||||
def do_init(self):
|
||||
"""Initialization which occurs after the :class:`.CompositeProperty`
|
||||
has been associated with its parent mapper.
|
||||
|
||||
"""
|
||||
self._setup_arguments_on_columns()
|
||||
|
||||
def _create_descriptor(self):
|
||||
"""Create the Python descriptor that will serve as
|
||||
the access point on instances of the mapped class.
|
||||
|
||||
"""
|
||||
|
||||
def fget(instance):
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
|
||||
if self.key not in dict_:
|
||||
# key not present. Iterate through related
|
||||
# attributes, retrieve their values. This
|
||||
# ensures they all load.
|
||||
values = [
|
||||
getattr(instance, key)
|
||||
for key in self._attribute_keys
|
||||
]
|
||||
|
||||
# current expected behavior here is that the composite is
|
||||
# created on access if the object is persistent or if
|
||||
# col attributes have non-None. This would be better
|
||||
# if the composite were created unconditionally,
|
||||
# but that would be a behavioral change.
|
||||
if self.key not in dict_ and (
|
||||
state.key is not None or
|
||||
not _none_set.issuperset(values)
|
||||
):
|
||||
dict_[self.key] = self.composite_class(*values)
|
||||
state.manager.dispatch.refresh(state, None, [self.key])
|
||||
|
||||
return dict_.get(self.key, None)
|
||||
|
||||
def fset(instance, value):
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
attr = state.manager[self.key]
|
||||
previous = dict_.get(self.key, attributes.NO_VALUE)
|
||||
for fn in attr.dispatch.set:
|
||||
value = fn(state, value, previous, attr.impl)
|
||||
dict_[self.key] = value
|
||||
if value is None:
|
||||
for key in self._attribute_keys:
|
||||
setattr(instance, key, None)
|
||||
else:
|
||||
for key, value in zip(
|
||||
self._attribute_keys,
|
||||
value.__composite_values__()):
|
||||
setattr(instance, key, value)
|
||||
|
||||
def fdel(instance):
|
||||
state = attributes.instance_state(instance)
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
previous = dict_.pop(self.key, attributes.NO_VALUE)
|
||||
attr = state.manager[self.key]
|
||||
attr.dispatch.remove(state, previous, attr.impl)
|
||||
for key in self._attribute_keys:
|
||||
setattr(instance, key, None)
|
||||
|
||||
self.descriptor = property(fget, fset, fdel)
|
||||
|
||||
@util.memoized_property
|
||||
def _comparable_elements(self):
|
||||
return [
|
||||
getattr(self.parent.class_, prop.key)
|
||||
for prop in self.props
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def props(self):
|
||||
props = []
|
||||
for attr in self.attrs:
|
||||
if isinstance(attr, str):
|
||||
prop = self.parent.get_property(
|
||||
attr, _configure_mappers=False)
|
||||
elif isinstance(attr, schema.Column):
|
||||
prop = self.parent._columntoproperty[attr]
|
||||
elif isinstance(attr, attributes.InstrumentedAttribute):
|
||||
prop = attr.property
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Composite expects Column objects or mapped "
|
||||
"attributes/attribute names as arguments, got: %r"
|
||||
% (attr,))
|
||||
props.append(prop)
|
||||
return props
|
||||
|
||||
@property
|
||||
def columns(self):
|
||||
return [a for a in self.attrs if isinstance(a, schema.Column)]
|
||||
|
||||
def _setup_arguments_on_columns(self):
|
||||
"""Propagate configuration arguments made on this composite
|
||||
to the target columns, for those that apply.
|
||||
|
||||
"""
|
||||
for prop in self.props:
|
||||
prop.active_history = self.active_history
|
||||
if self.deferred:
|
||||
prop.deferred = self.deferred
|
||||
prop.strategy_class = prop._strategy_lookup(
|
||||
("deferred", True),
|
||||
("instrument", True))
|
||||
prop.group = self.group
|
||||
|
||||
def _setup_event_handlers(self):
|
||||
"""Establish events that populate/expire the composite attribute."""
|
||||
|
||||
def load_handler(state, *args):
|
||||
dict_ = state.dict
|
||||
|
||||
if self.key in dict_:
|
||||
return
|
||||
|
||||
# if column elements aren't loaded, skip.
|
||||
# __get__() will initiate a load for those
|
||||
# columns
|
||||
for k in self._attribute_keys:
|
||||
if k not in dict_:
|
||||
return
|
||||
|
||||
# assert self.key not in dict_
|
||||
dict_[self.key] = self.composite_class(
|
||||
*[state.dict[key] for key in
|
||||
self._attribute_keys]
|
||||
)
|
||||
|
||||
def expire_handler(state, keys):
|
||||
if keys is None or set(self._attribute_keys).intersection(keys):
|
||||
state.dict.pop(self.key, None)
|
||||
|
||||
def insert_update_handler(mapper, connection, state):
|
||||
"""After an insert or update, some columns may be expired due
|
||||
to server side defaults, or re-populated due to client side
|
||||
defaults. Pop out the composite value here so that it
|
||||
recreates.
|
||||
|
||||
"""
|
||||
|
||||
state.dict.pop(self.key, None)
|
||||
|
||||
event.listen(self.parent, 'after_insert',
|
||||
insert_update_handler, raw=True)
|
||||
event.listen(self.parent, 'after_update',
|
||||
insert_update_handler, raw=True)
|
||||
event.listen(self.parent, 'load',
|
||||
load_handler, raw=True, propagate=True)
|
||||
event.listen(self.parent, 'refresh',
|
||||
load_handler, raw=True, propagate=True)
|
||||
event.listen(self.parent, 'expire',
|
||||
expire_handler, raw=True, propagate=True)
|
||||
|
||||
# TODO: need a deserialize hook here
|
||||
|
||||
@util.memoized_property
|
||||
def _attribute_keys(self):
|
||||
return [
|
||||
prop.key for prop in self.props
|
||||
]
|
||||
|
||||
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
"""Provided for userland code that uses attributes.get_history()."""
|
||||
|
||||
added = []
|
||||
deleted = []
|
||||
|
||||
has_history = False
|
||||
for prop in self.props:
|
||||
key = prop.key
|
||||
hist = state.manager[key].impl.get_history(state, dict_)
|
||||
if hist.has_changes():
|
||||
has_history = True
|
||||
|
||||
non_deleted = hist.non_deleted()
|
||||
if non_deleted:
|
||||
added.extend(non_deleted)
|
||||
else:
|
||||
added.append(None)
|
||||
if hist.deleted:
|
||||
deleted.extend(hist.deleted)
|
||||
else:
|
||||
deleted.append(None)
|
||||
|
||||
if has_history:
|
||||
return attributes.History(
|
||||
[self.composite_class(*added)],
|
||||
(),
|
||||
[self.composite_class(*deleted)]
|
||||
)
|
||||
else:
|
||||
return attributes.History(
|
||||
(), [self.composite_class(*added)], ()
|
||||
)
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
|
||||
class CompositeBundle(query.Bundle):
|
||||
def __init__(self, property, expr):
|
||||
self.property = property
|
||||
super(CompositeProperty.CompositeBundle, self).__init__(
|
||||
property.key, *expr)
|
||||
|
||||
def create_row_processor(self, query, procs, labels):
|
||||
def proc(row, result):
|
||||
return self.property.composite_class(
|
||||
*[proc(row, result) for proc in procs])
|
||||
return proc
|
||||
|
||||
class Comparator(PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.CompositeProperty` attributes.
|
||||
|
||||
See the example in :ref:`composite_operations` for an overview
|
||||
of usage , as well as the documentation for :class:`.PropComparator`.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.PropComparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
__hash__ = None
|
||||
|
||||
@property
|
||||
def clauses(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def __clause_element__(self):
|
||||
return expression.ClauseList(
|
||||
group=False, *self._comparable_elements)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return CompositeProperty.CompositeBundle(
|
||||
self.prop, self.__clause_element__())
|
||||
|
||||
@util.memoized_property
|
||||
def _comparable_elements(self):
|
||||
if self._adapt_to_entity:
|
||||
return [
|
||||
getattr(
|
||||
self._adapt_to_entity.entity,
|
||||
prop.key
|
||||
) for prop in self.prop._comparable_elements
|
||||
]
|
||||
else:
|
||||
return self.prop._comparable_elements
|
||||
|
||||
def __eq__(self, other):
|
||||
if other is None:
|
||||
values = [None] * len(self.prop._comparable_elements)
|
||||
else:
|
||||
values = other.__composite_values__()
|
||||
comparisons = [
|
||||
a == b
|
||||
for a, b in zip(self.prop._comparable_elements, values)
|
||||
]
|
||||
if self._adapt_to_entity:
|
||||
comparisons = [self.adapter(x) for x in comparisons]
|
||||
return sql.and_(*comparisons)
|
||||
|
||||
def __ne__(self, other):
|
||||
return sql.not_(self.__eq__(other))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ConcreteInheritedProperty(DescriptorProperty):
|
||||
"""A 'do nothing' :class:`.MapperProperty` that disables
|
||||
an attribute on a concrete subclass that is only present
|
||||
on the inherited mapper, not the concrete classes' mapper.
|
||||
|
||||
Cases where this occurs include:
|
||||
|
||||
* When the superclass mapper is mapped against a
|
||||
"polymorphic union", which includes all attributes from
|
||||
all subclasses.
|
||||
* When a relationship() is configured on an inherited mapper,
|
||||
but not on the subclass mapper. Concrete mappers require
|
||||
that relationship() is configured explicitly on each
|
||||
subclass.
|
||||
|
||||
"""
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
comparator_callable = None
|
||||
|
||||
for m in self.parent.iterate_to_root():
|
||||
p = m._props[self.key]
|
||||
if not isinstance(p, ConcreteInheritedProperty):
|
||||
comparator_callable = p.comparator_factory
|
||||
break
|
||||
return comparator_callable
|
||||
|
||||
def __init__(self):
|
||||
def warn():
|
||||
raise AttributeError("Concrete %s does not implement "
|
||||
"attribute %r at the instance level. Add "
|
||||
"this property explicitly to %s." %
|
||||
(self.parent, self.key, self.parent))
|
||||
|
||||
class NoninheritedConcreteProp(object):
|
||||
def __set__(s, obj, value):
|
||||
warn()
|
||||
|
||||
def __delete__(s, obj):
|
||||
warn()
|
||||
|
||||
def __get__(s, obj, owner):
|
||||
if obj is None:
|
||||
return self.descriptor
|
||||
warn()
|
||||
self.descriptor = NoninheritedConcreteProp()
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class SynonymProperty(DescriptorProperty):
|
||||
|
||||
def __init__(self, name, map_column=None,
|
||||
descriptor=None, comparator_factory=None,
|
||||
doc=None):
|
||||
"""Denote an attribute name as a synonym to a mapped property,
|
||||
in that the attribute will mirror the value and expression behavior
|
||||
of another attribute.
|
||||
|
||||
:param name: the name of the existing mapped property. This
|
||||
can refer to the string name of any :class:`.MapperProperty`
|
||||
configured on the class, including column-bound attributes
|
||||
and relationships.
|
||||
|
||||
:param descriptor: a Python :term:`descriptor` that will be used
|
||||
as a getter (and potentially a setter) when this attribute is
|
||||
accessed at the instance level.
|
||||
|
||||
:param map_column: if ``True``, the :func:`.synonym` construct will
|
||||
locate the existing named :class:`.MapperProperty` based on the
|
||||
attribute name of this :func:`.synonym`, and assign it to a new
|
||||
attribute linked to the name of this :func:`.synonym`.
|
||||
That is, given a mapping like::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_status = Column(String(50))
|
||||
|
||||
job_status = synonym("_job_status", map_column=True)
|
||||
|
||||
The above class ``MyClass`` will now have the ``job_status``
|
||||
:class:`.Column` object mapped to the attribute named
|
||||
``_job_status``, and the attribute named ``job_status`` will refer
|
||||
to the synonym itself. This feature is typically used in
|
||||
conjunction with the ``descriptor`` argument in order to link a
|
||||
user-defined descriptor as a "wrapper" for an existing column.
|
||||
|
||||
:param comparator_factory: A subclass of :class:`.PropComparator`
|
||||
that will provide custom comparison behavior at the SQL expression
|
||||
level.
|
||||
|
||||
.. note::
|
||||
|
||||
For the use case of providing an attribute which redefines both
|
||||
Python-level and SQL-expression level behavior of an attribute,
|
||||
please refer to the Hybrid attribute introduced at
|
||||
:ref:`mapper_hybrids` for a more effective technique.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`synonyms` - examples of functionality.
|
||||
|
||||
:ref:`mapper_hybrids` - Hybrids provide a better approach for
|
||||
more complicated attribute-wrapping schemes than synonyms.
|
||||
|
||||
"""
|
||||
|
||||
self.name = name
|
||||
self.map_column = map_column
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
# TODO: when initialized, check _proxied_property,
|
||||
# emit a warning if its not a column-based property
|
||||
|
||||
@util.memoized_property
|
||||
def _proxied_property(self):
|
||||
return getattr(self.parent.class_, self.name).property
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
prop = self._proxied_property
|
||||
|
||||
if self.comparator_factory:
|
||||
comp = self.comparator_factory(prop, mapper)
|
||||
else:
|
||||
comp = prop.comparator_factory(prop, mapper)
|
||||
return comp
|
||||
|
||||
def set_parent(self, parent, init):
|
||||
if self.map_column:
|
||||
# implement the 'map_column' option.
|
||||
if self.key not in parent.mapped_table.c:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't compile synonym '%s': no column on table "
|
||||
"'%s' named '%s'"
|
||||
% (self.name, parent.mapped_table.description, self.key))
|
||||
elif parent.mapped_table.c[self.key] in \
|
||||
parent._columntoproperty and \
|
||||
parent._columntoproperty[
|
||||
parent.mapped_table.c[self.key]
|
||||
].key == self.name:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't call map_column=True for synonym %r=%r, "
|
||||
"a ColumnProperty already exists keyed to the name "
|
||||
"%r for column %r" %
|
||||
(self.key, self.name, self.name, self.key)
|
||||
)
|
||||
p = properties.ColumnProperty(parent.mapped_table.c[self.key])
|
||||
parent._configure_property(
|
||||
self.name, p,
|
||||
init=init,
|
||||
setparent=True)
|
||||
p._mapped_by_synonym = self.key
|
||||
|
||||
self.parent = parent
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ComparableProperty(DescriptorProperty):
|
||||
"""Instruments a Python property for use in query expressions."""
|
||||
|
||||
def __init__(self, comparator_factory, descriptor=None, doc=None):
|
||||
"""Provides a method of applying a :class:`.PropComparator`
|
||||
to any Python descriptor attribute.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
:func:`.comparable_property` is superseded by
|
||||
the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
|
||||
at :ref:`hybrid_custom_comparators`.
|
||||
|
||||
Allows any Python descriptor to behave like a SQL-enabled
|
||||
attribute when used at the class level in queries, allowing
|
||||
redefinition of expression operator behavior.
|
||||
|
||||
In the example below we redefine :meth:`.PropComparator.operate`
|
||||
to wrap both sides of an expression in ``func.lower()`` to produce
|
||||
case-insensitive comparison::
|
||||
|
||||
from sqlalchemy.orm import comparable_property
|
||||
from sqlalchemy.orm.interfaces import PropComparator
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy import Integer, String, Column
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
class CaseInsensitiveComparator(PropComparator):
|
||||
def __clause_element__(self):
|
||||
return self.prop
|
||||
|
||||
def operate(self, op, other):
|
||||
return op(
|
||||
func.lower(self.__clause_element__()),
|
||||
func.lower(other)
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class SearchWord(Base):
|
||||
__tablename__ = 'search_word'
|
||||
id = Column(Integer, primary_key=True)
|
||||
word = Column(String)
|
||||
word_insensitive = comparable_property(lambda prop, mapper:
|
||||
CaseInsensitiveComparator(
|
||||
mapper.c.word, mapper)
|
||||
)
|
||||
|
||||
|
||||
A mapping like the above allows the ``word_insensitive`` attribute
|
||||
to render an expression like::
|
||||
|
||||
>>> print SearchWord.word_insensitive == "Trucks"
|
||||
lower(search_word.word) = lower(:lower_1)
|
||||
|
||||
:param comparator_factory:
|
||||
A PropComparator subclass or factory that defines operator behavior
|
||||
for this property.
|
||||
|
||||
:param descriptor:
|
||||
Optional when used in a ``properties={}`` declaration. The Python
|
||||
descriptor or property to layer comparison behavior on top of.
|
||||
|
||||
The like-named descriptor will be automatically retrieved from the
|
||||
mapped class if left blank in a ``properties`` declaration.
|
||||
|
||||
"""
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
util.set_creation_order(self)
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
372
lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py
Normal file
372
lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py
Normal file
|
|
@ -0,0 +1,372 @@
|
|||
# orm/dynamic.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Dynamic collection API.
|
||||
|
||||
Dynamic collections act like Query() objects for read operations and support
|
||||
basic add/delete mutation.
|
||||
|
||||
"""
|
||||
|
||||
from .. import log, util, exc
|
||||
from ..sql import operators
|
||||
from . import (
|
||||
attributes, object_session, util as orm_util, strategies,
|
||||
object_mapper, exc as orm_exc, properties
|
||||
)
|
||||
from .query import Query
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="dynamic")
|
||||
class DynaLoader(strategies.AbstractRelationshipLoader):
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
if not self.uselist:
|
||||
raise exc.InvalidRequestError(
|
||||
"On relationship %s, 'dynamic' loaders cannot be used with "
|
||||
"many-to-one/one-to-one relationships and/or "
|
||||
"uselist=False." % self.parent_property)
|
||||
strategies._register_attribute(
|
||||
self,
|
||||
mapper,
|
||||
useobject=True,
|
||||
uselist=True,
|
||||
impl_class=DynamicAttributeImpl,
|
||||
target_mapper=self.parent_property.mapper,
|
||||
order_by=self.parent_property.order_by,
|
||||
query_class=self.parent_property.query_class,
|
||||
backref=self.parent_property.back_populates,
|
||||
)
|
||||
|
||||
|
||||
class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
uses_objects = True
|
||||
accepts_scalar_loader = False
|
||||
supports_population = False
|
||||
collection = False
|
||||
|
||||
def __init__(self, class_, key, typecallable,
|
||||
dispatch,
|
||||
target_mapper, order_by, query_class=None, **kw):
|
||||
super(DynamicAttributeImpl, self).\
|
||||
__init__(class_, key, typecallable, dispatch, **kw)
|
||||
self.target_mapper = target_mapper
|
||||
self.order_by = order_by
|
||||
if not query_class:
|
||||
self.query_class = AppenderQuery
|
||||
elif AppenderMixin in query_class.mro():
|
||||
self.query_class = query_class
|
||||
else:
|
||||
self.query_class = mixin_user_query(query_class)
|
||||
|
||||
def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
if not passive & attributes.SQL_OK:
|
||||
return self._get_collection_history(
|
||||
state, attributes.PASSIVE_NO_INITIALIZE).added_items
|
||||
else:
|
||||
return self.query_class(self, state)
|
||||
|
||||
def get_collection(self, state, dict_, user_data=None,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
if not passive & attributes.SQL_OK:
|
||||
return self._get_collection_history(state,
|
||||
passive).added_items
|
||||
else:
|
||||
history = self._get_collection_history(state, passive)
|
||||
return history.added_plus_unchanged
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return attributes.Event(self, attributes.OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return attributes.Event(self, attributes.OP_REMOVE)
|
||||
|
||||
def fire_append_event(self, state, dict_, value, initiator,
|
||||
collection_history=None):
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_added(value)
|
||||
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, True)
|
||||
|
||||
def fire_remove_event(self, state, dict_, value, initiator,
|
||||
collection_history=None):
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_removed(value)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
|
||||
def _modified_event(self, state, dict_):
|
||||
|
||||
if self.key not in state.committed_state:
|
||||
state.committed_state[self.key] = CollectionHistory(self, state)
|
||||
|
||||
state._modified_event(dict_,
|
||||
self,
|
||||
attributes.NEVER_SET)
|
||||
|
||||
# this is a hack to allow the fixtures.ComparableEntity fixture
|
||||
# to work
|
||||
dict_[self.key] = True
|
||||
return state.committed_state[self.key]
|
||||
|
||||
def set(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF,
|
||||
check_old=None, pop=False):
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
if pop and value is None:
|
||||
return
|
||||
self._set_iterable(state, dict_, value)
|
||||
|
||||
def _set_iterable(self, state, dict_, iterable, adapter=None):
|
||||
new_values = list(iterable)
|
||||
if state.has_identity:
|
||||
old_collection = util.IdentitySet(self.get(state, dict_))
|
||||
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
if not state.has_identity:
|
||||
old_collection = collection_history.added_items
|
||||
else:
|
||||
old_collection = old_collection.union(
|
||||
collection_history.added_items)
|
||||
|
||||
idset = util.IdentitySet
|
||||
constants = old_collection.intersection(new_values)
|
||||
additions = idset(new_values).difference(constants)
|
||||
removals = old_collection.difference(constants)
|
||||
|
||||
for member in new_values:
|
||||
if member in additions:
|
||||
self.fire_append_event(state, dict_, member, None,
|
||||
collection_history=collection_history)
|
||||
|
||||
for member in removals:
|
||||
self.fire_remove_event(state, dict_, member, None,
|
||||
collection_history=collection_history)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_committed_value(self, state, dict_, value):
|
||||
raise NotImplementedError("Dynamic attributes don't support "
|
||||
"collection population.")
|
||||
|
||||
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
c = self._get_collection_history(state, passive)
|
||||
return c.as_history()
|
||||
|
||||
def get_all_pending(self, state, dict_,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
c = self._get_collection_history(
|
||||
state, passive)
|
||||
return [
|
||||
(attributes.instance_state(x), x)
|
||||
for x in
|
||||
c.all_items
|
||||
]
|
||||
|
||||
def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
|
||||
if self.key in state.committed_state:
|
||||
c = state.committed_state[self.key]
|
||||
else:
|
||||
c = CollectionHistory(self, state)
|
||||
|
||||
if state.has_identity and (passive & attributes.INIT_OK):
|
||||
return CollectionHistory(self, state, apply_to=c)
|
||||
else:
|
||||
return c
|
||||
|
||||
def append(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
if initiator is not self:
|
||||
self.fire_append_event(state, dict_, value, initiator)
|
||||
|
||||
def remove(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
if initiator is not self:
|
||||
self.fire_remove_event(state, dict_, value, initiator)
|
||||
|
||||
def pop(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
self.remove(state, dict_, value, initiator, passive=passive)
|
||||
|
||||
|
||||
class AppenderMixin(object):
|
||||
query_class = None
|
||||
|
||||
def __init__(self, attr, state):
|
||||
super(AppenderMixin, self).__init__(attr.target_mapper, None)
|
||||
self.instance = instance = state.obj()
|
||||
self.attr = attr
|
||||
|
||||
mapper = object_mapper(instance)
|
||||
prop = mapper._props[self.attr.key]
|
||||
self._criterion = prop.compare(
|
||||
operators.eq,
|
||||
instance,
|
||||
value_is_parent=True,
|
||||
alias_secondary=False)
|
||||
|
||||
if self.attr.order_by:
|
||||
self._order_by = self.attr.order_by
|
||||
|
||||
def session(self):
|
||||
sess = object_session(self.instance)
|
||||
if sess is not None and self.autoflush and sess.autoflush \
|
||||
and self.instance in sess:
|
||||
sess.flush()
|
||||
if not orm_util.has_identity(self.instance):
|
||||
return None
|
||||
else:
|
||||
return sess
|
||||
session = property(session, lambda s, x: None)
|
||||
|
||||
def __iter__(self):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return iter(self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).added_items)
|
||||
else:
|
||||
return iter(self._clone(sess))
|
||||
|
||||
def __getitem__(self, index):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).indexed(index)
|
||||
else:
|
||||
return self._clone(sess).__getitem__(index)
|
||||
|
||||
def count(self):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return len(self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).added_items)
|
||||
else:
|
||||
return self._clone(sess).count()
|
||||
|
||||
def _clone(self, sess=None):
|
||||
# note we're returning an entirely new Query class instance
|
||||
# here without any assignment capabilities; the class of this
|
||||
# query is determined by the session.
|
||||
instance = self.instance
|
||||
if sess is None:
|
||||
sess = object_session(instance)
|
||||
if sess is None:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Parent instance %s is not bound to a Session, and no "
|
||||
"contextual session is established; lazy load operation "
|
||||
"of attribute '%s' cannot proceed" % (
|
||||
orm_util.instance_str(instance), self.attr.key))
|
||||
|
||||
if self.query_class:
|
||||
query = self.query_class(self.attr.target_mapper, session=sess)
|
||||
else:
|
||||
query = sess.query(self.attr.target_mapper)
|
||||
|
||||
query._criterion = self._criterion
|
||||
query._order_by = self._order_by
|
||||
|
||||
return query
|
||||
|
||||
def extend(self, iterator):
|
||||
for item in iterator:
|
||||
self.attr.append(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
def append(self, item):
|
||||
self.attr.append(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
def remove(self, item):
|
||||
self.attr.remove(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
|
||||
class AppenderQuery(AppenderMixin, Query):
|
||||
"""A dynamic query that supports basic collection storage operations."""
|
||||
|
||||
|
||||
def mixin_user_query(cls):
|
||||
"""Return a new class with AppenderQuery functionality layered over."""
|
||||
name = 'Appender' + cls.__name__
|
||||
return type(name, (AppenderMixin, cls), {'query_class': cls})
|
||||
|
||||
|
||||
class CollectionHistory(object):
|
||||
"""Overrides AttributeHistory to receive append/remove events directly."""
|
||||
|
||||
def __init__(self, attr, state, apply_to=None):
|
||||
if apply_to:
|
||||
coll = AppenderQuery(attr, state).autoflush(False)
|
||||
self.unchanged_items = util.OrderedIdentitySet(coll)
|
||||
self.added_items = apply_to.added_items
|
||||
self.deleted_items = apply_to.deleted_items
|
||||
self._reconcile_collection = True
|
||||
else:
|
||||
self.deleted_items = util.OrderedIdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
self.unchanged_items = util.OrderedIdentitySet()
|
||||
self._reconcile_collection = False
|
||||
|
||||
@property
|
||||
def added_plus_unchanged(self):
|
||||
return list(self.added_items.union(self.unchanged_items))
|
||||
|
||||
@property
|
||||
def all_items(self):
|
||||
return list(self.added_items.union(
|
||||
self.unchanged_items).union(self.deleted_items))
|
||||
|
||||
def as_history(self):
|
||||
if self._reconcile_collection:
|
||||
added = self.added_items.difference(self.unchanged_items)
|
||||
deleted = self.deleted_items.intersection(self.unchanged_items)
|
||||
unchanged = self.unchanged_items.difference(deleted)
|
||||
else:
|
||||
added, unchanged, deleted = self.added_items,\
|
||||
self.unchanged_items,\
|
||||
self.deleted_items
|
||||
return attributes.History(
|
||||
list(added),
|
||||
list(unchanged),
|
||||
list(deleted),
|
||||
)
|
||||
|
||||
def indexed(self, index):
|
||||
return list(self.added_items)[index]
|
||||
|
||||
def add_added(self, value):
|
||||
self.added_items.add(value)
|
||||
|
||||
def add_removed(self, value):
|
||||
if value in self.added_items:
|
||||
self.added_items.remove(value)
|
||||
else:
|
||||
self.deleted_items.add(value)
|
||||
141
lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py
Normal file
141
lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
# orm/evaluator.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import operator
|
||||
from ..sql import operators
|
||||
from .. import util
|
||||
|
||||
|
||||
class UnevaluatableError(Exception):
|
||||
pass
|
||||
|
||||
_straight_ops = set(getattr(operators, op)
|
||||
for op in ('add', 'mul', 'sub',
|
||||
'div',
|
||||
'mod', 'truediv',
|
||||
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
|
||||
|
||||
|
||||
_notimplemented_ops = set(getattr(operators, op)
|
||||
for op in ('like_op', 'notlike_op', 'ilike_op',
|
||||
'notilike_op', 'between_op', 'in_op',
|
||||
'notin_op', 'endswith_op', 'concat_op'))
|
||||
|
||||
|
||||
class EvaluatorCompiler(object):
|
||||
|
||||
def __init__(self, target_cls=None):
|
||||
self.target_cls = target_cls
|
||||
|
||||
def process(self, clause):
|
||||
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
|
||||
if not meth:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s" % type(clause).__name__)
|
||||
return meth(clause)
|
||||
|
||||
def visit_grouping(self, clause):
|
||||
return self.process(clause.element)
|
||||
|
||||
def visit_null(self, clause):
|
||||
return lambda obj: None
|
||||
|
||||
def visit_false(self, clause):
|
||||
return lambda obj: False
|
||||
|
||||
def visit_true(self, clause):
|
||||
return lambda obj: True
|
||||
|
||||
def visit_column(self, clause):
|
||||
if 'parentmapper' in clause._annotations:
|
||||
parentmapper = clause._annotations['parentmapper']
|
||||
if self.target_cls and not issubclass(
|
||||
self.target_cls, parentmapper.class_):
|
||||
util.warn(
|
||||
"Can't do in-Python evaluation of criteria against "
|
||||
"alternate class %s; "
|
||||
"expiration of objects will not be accurate "
|
||||
"and/or may fail. synchronize_session should be set to "
|
||||
"False or 'fetch'. "
|
||||
"This warning will be an exception "
|
||||
"in 1.0." % parentmapper.class_
|
||||
)
|
||||
key = parentmapper._columntoproperty[clause].key
|
||||
else:
|
||||
key = clause.key
|
||||
|
||||
get_corresponding_attr = operator.attrgetter(key)
|
||||
return lambda obj: get_corresponding_attr(obj)
|
||||
|
||||
def visit_clauselist(self, clause):
|
||||
evaluators = list(map(self.process, clause.clauses))
|
||||
if clause.operator is operators.or_:
|
||||
def evaluate(obj):
|
||||
has_null = False
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if value:
|
||||
return True
|
||||
has_null = has_null or value is None
|
||||
if has_null:
|
||||
return None
|
||||
return False
|
||||
elif clause.operator is operators.and_:
|
||||
def evaluate(obj):
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if not value:
|
||||
if value is None:
|
||||
return None
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate clauselist with operator %s" %
|
||||
clause.operator)
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_binary(self, clause):
|
||||
eval_left, eval_right = list(map(self.process,
|
||||
[clause.left, clause.right]))
|
||||
operator = clause.operator
|
||||
if operator is operators.is_:
|
||||
def evaluate(obj):
|
||||
return eval_left(obj) == eval_right(obj)
|
||||
elif operator is operators.isnot:
|
||||
def evaluate(obj):
|
||||
return eval_left(obj) != eval_right(obj)
|
||||
elif operator in _straight_ops:
|
||||
def evaluate(obj):
|
||||
left_val = eval_left(obj)
|
||||
right_val = eval_right(obj)
|
||||
if left_val is None or right_val is None:
|
||||
return None
|
||||
return operator(eval_left(obj), eval_right(obj))
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s with operator %s" %
|
||||
(type(clause).__name__, clause.operator))
|
||||
return evaluate
|
||||
|
||||
def visit_unary(self, clause):
|
||||
eval_inner = self.process(clause.element)
|
||||
if clause.operator is operators.inv:
|
||||
def evaluate(obj):
|
||||
value = eval_inner(obj)
|
||||
if value is None:
|
||||
return None
|
||||
return not value
|
||||
return evaluate
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s with operator %s" %
|
||||
(type(clause).__name__, clause.operator))
|
||||
|
||||
def visit_bindparam(self, clause):
|
||||
val = clause.value
|
||||
return lambda obj: val
|
||||
1746
lib/python3.4/site-packages/sqlalchemy/orm/events.py
Normal file
1746
lib/python3.4/site-packages/sqlalchemy/orm/events.py
Normal file
File diff suppressed because it is too large
Load diff
165
lib/python3.4/site-packages/sqlalchemy/orm/exc.py
Normal file
165
lib/python3.4/site-packages/sqlalchemy/orm/exc.py
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
# orm/exc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQLAlchemy ORM exceptions."""
|
||||
from .. import exc as sa_exc, util
|
||||
|
||||
NO_STATE = (AttributeError, KeyError)
|
||||
"""Exception types that may be raised by instrumentation implementations."""
|
||||
|
||||
|
||||
class StaleDataError(sa_exc.SQLAlchemyError):
|
||||
"""An operation encountered database state that is unaccounted for.
|
||||
|
||||
Conditions which cause this to happen include:
|
||||
|
||||
* A flush may have attempted to update or delete rows
|
||||
and an unexpected number of rows were matched during
|
||||
the UPDATE or DELETE statement. Note that when
|
||||
version_id_col is used, rows in UPDATE or DELETE statements
|
||||
are also matched against the current known version
|
||||
identifier.
|
||||
|
||||
* A mapped object with version_id_col was refreshed,
|
||||
and the version number coming back from the database does
|
||||
not match that of the object itself.
|
||||
|
||||
* A object is detached from its parent object, however
|
||||
the object was previously attached to a different parent
|
||||
identity which was garbage collected, and a decision
|
||||
cannot be made if the new parent was really the most
|
||||
recent "parent".
|
||||
|
||||
.. versionadded:: 0.7.4
|
||||
|
||||
"""
|
||||
|
||||
ConcurrentModificationError = StaleDataError
|
||||
|
||||
|
||||
class FlushError(sa_exc.SQLAlchemyError):
|
||||
"""A invalid condition was detected during flush()."""
|
||||
|
||||
|
||||
class UnmappedError(sa_exc.InvalidRequestError):
|
||||
"""Base for exceptions that involve expected mappings not present."""
|
||||
|
||||
|
||||
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
|
||||
"""An operation cannot complete due to an object being garbage
|
||||
collected.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DetachedInstanceError(sa_exc.SQLAlchemyError):
|
||||
"""An attempt to access unloaded attributes on a
|
||||
mapped instance that is detached."""
|
||||
|
||||
|
||||
class UnmappedInstanceError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown instance."""
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, obj, msg=None):
|
||||
if not msg:
|
||||
try:
|
||||
base.class_mapper(type(obj))
|
||||
name = _safe_cls_name(type(obj))
|
||||
msg = ("Class %r is mapped, but this instance lacks "
|
||||
"instrumentation. This occurs when the instance"
|
||||
"is created before sqlalchemy.orm.mapper(%s) "
|
||||
"was called." % (name, name))
|
||||
except UnmappedClassError:
|
||||
msg = _default_unmapped(type(obj))
|
||||
if isinstance(obj, type):
|
||||
msg += (
|
||||
'; was a class (%s) supplied where an instance was '
|
||||
'required?' % _safe_cls_name(obj))
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedClassError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown class."""
|
||||
|
||||
def __init__(self, cls, msg=None):
|
||||
if not msg:
|
||||
msg = _default_unmapped(cls)
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class ObjectDeletedError(sa_exc.InvalidRequestError):
|
||||
"""A refresh operation failed to retrieve the database
|
||||
row corresponding to an object's known primary key identity.
|
||||
|
||||
A refresh operation proceeds when an expired attribute is
|
||||
accessed on an object, or when :meth:`.Query.get` is
|
||||
used to retrieve an object which is, upon retrieval, detected
|
||||
as expired. A SELECT is emitted for the target row
|
||||
based on primary key; if no row is returned, this
|
||||
exception is raised.
|
||||
|
||||
The true meaning of this exception is simply that
|
||||
no row exists for the primary key identifier associated
|
||||
with a persistent object. The row may have been
|
||||
deleted, or in some cases the primary key updated
|
||||
to a new value, outside of the ORM's management of the target
|
||||
object.
|
||||
|
||||
"""
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, state, msg=None):
|
||||
if not msg:
|
||||
msg = "Instance '%s' has been deleted, or its "\
|
||||
"row is otherwise not present." % base.state_str(state)
|
||||
|
||||
sa_exc.InvalidRequestError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedColumnError(sa_exc.InvalidRequestError):
|
||||
"""Mapping operation was requested on an unknown column."""
|
||||
|
||||
|
||||
class NoResultFound(sa_exc.InvalidRequestError):
|
||||
"""A database result was required but none was found."""
|
||||
|
||||
|
||||
class MultipleResultsFound(sa_exc.InvalidRequestError):
|
||||
"""A single database result was required but more than one were found."""
|
||||
|
||||
|
||||
def _safe_cls_name(cls):
|
||||
try:
|
||||
cls_name = '.'.join((cls.__module__, cls.__name__))
|
||||
except AttributeError:
|
||||
cls_name = getattr(cls, '__name__', None)
|
||||
if cls_name is None:
|
||||
cls_name = repr(cls)
|
||||
return cls_name
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def _default_unmapped(base, cls):
|
||||
try:
|
||||
mappers = base.manager_of_class(cls).mappers
|
||||
except NO_STATE:
|
||||
mappers = {}
|
||||
except TypeError:
|
||||
mappers = {}
|
||||
name = _safe_cls_name(cls)
|
||||
|
||||
if not mappers:
|
||||
return "Class '%s' is not mapped" % name
|
||||
247
lib/python3.4/site-packages/sqlalchemy/orm/identity.py
Normal file
247
lib/python3.4/site-packages/sqlalchemy/orm/identity.py
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
# orm/identity.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import weakref
|
||||
from . import attributes
|
||||
from .. import util
|
||||
|
||||
|
||||
class IdentityMap(dict):
|
||||
|
||||
def __init__(self):
|
||||
self._modified = set()
|
||||
self._wr = weakref.ref(self)
|
||||
|
||||
def replace(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def add(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self, dict):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def _manage_incoming_state(self, state):
|
||||
state._instance_dict = self._wr
|
||||
|
||||
if state.modified:
|
||||
self._modified.add(state)
|
||||
|
||||
def _manage_removed_state(self, state):
|
||||
del state._instance_dict
|
||||
self._modified.discard(state)
|
||||
|
||||
def _dirty_states(self):
|
||||
return self._modified
|
||||
|
||||
def check_modified(self):
|
||||
"""return True if any InstanceStates present have been marked
|
||||
as 'modified'.
|
||||
|
||||
"""
|
||||
return bool(self._modified)
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self
|
||||
|
||||
def popitem(self):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def pop(self, key, *args):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def copy(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def __delitem__(self, key):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
|
||||
class WeakInstanceDict(IdentityMap):
|
||||
|
||||
def __init__(self):
|
||||
IdentityMap.__init__(self)
|
||||
|
||||
def __getitem__(self, key):
|
||||
state = dict.__getitem__(self, key)
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
raise KeyError(key)
|
||||
return o
|
||||
|
||||
def __contains__(self, key):
|
||||
try:
|
||||
if dict.__contains__(self, key):
|
||||
state = dict.__getitem__(self, key)
|
||||
o = state.obj()
|
||||
else:
|
||||
return False
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return o is not None
|
||||
|
||||
def contains_state(self, state):
|
||||
return dict.get(self, state.key) is state
|
||||
|
||||
def replace(self, state):
|
||||
if dict.__contains__(self, state.key):
|
||||
existing = dict.__getitem__(self, state.key)
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
dict.__setitem__(self, state.key, state)
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
key = state.key
|
||||
# inline of self.__contains__
|
||||
if dict.__contains__(self, key):
|
||||
try:
|
||||
existing_state = dict.__getitem__(self, key)
|
||||
if existing_state is not state:
|
||||
o = existing_state.obj()
|
||||
if o is not None:
|
||||
raise AssertionError(
|
||||
"A conflicting state is already "
|
||||
"present in the identity map for key %r"
|
||||
% (key, ))
|
||||
else:
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
dict.__setitem__(self, key, state)
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def get(self, key, default=None):
|
||||
state = dict.get(self, key, default)
|
||||
if state is default:
|
||||
return default
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
return default
|
||||
return o
|
||||
|
||||
def _items(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
if value is not None:
|
||||
result.append((state.key, value))
|
||||
return result
|
||||
|
||||
def _values(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
if value is not None:
|
||||
result.append(value)
|
||||
|
||||
return result
|
||||
|
||||
if util.py2k:
|
||||
items = _items
|
||||
values = _values
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def itervalues(self):
|
||||
return iter(self.values())
|
||||
else:
|
||||
def items(self):
|
||||
return iter(self._items())
|
||||
|
||||
def values(self):
|
||||
return iter(self._values())
|
||||
|
||||
def all_states(self):
|
||||
if util.py2k:
|
||||
return dict.values(self)
|
||||
else:
|
||||
return list(dict.values(self))
|
||||
|
||||
def discard(self, state):
|
||||
st = dict.get(self, state.key, None)
|
||||
if st is state:
|
||||
dict.pop(self, state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
return 0
|
||||
|
||||
|
||||
class StrongInstanceDict(IdentityMap):
|
||||
|
||||
def all_states(self):
|
||||
return [attributes.instance_state(o) for o in self.values()]
|
||||
|
||||
def contains_state(self, state):
|
||||
return (
|
||||
state.key in self and
|
||||
attributes.instance_state(self[state.key]) is state)
|
||||
|
||||
def replace(self, state):
|
||||
if dict.__contains__(self, state.key):
|
||||
existing = dict.__getitem__(self, state.key)
|
||||
existing = attributes.instance_state(existing)
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
dict.__setitem__(self, state.key, state.obj())
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
if state.key in self:
|
||||
if attributes.instance_state(
|
||||
dict.__getitem__(
|
||||
self,
|
||||
state.key)) is not state:
|
||||
raise AssertionError('A conflicting state is already '
|
||||
'present in the identity map for key %r'
|
||||
% (state.key, ))
|
||||
else:
|
||||
dict.__setitem__(self, state.key, state.obj())
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def discard(self, state):
|
||||
obj = dict.get(self, state.key, None)
|
||||
if obj is not None:
|
||||
st = attributes.instance_state(obj)
|
||||
if st is state:
|
||||
dict.pop(self, state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
"""prune unreferenced, non-dirty states."""
|
||||
|
||||
ref_count = len(self)
|
||||
dirty = [s.obj() for s in self.all_states() if s.modified]
|
||||
|
||||
# work around http://bugs.python.org/issue6149
|
||||
keepers = weakref.WeakValueDictionary()
|
||||
keepers.update(self)
|
||||
|
||||
dict.clear(self)
|
||||
dict.update(self, keepers)
|
||||
self.modified = bool(dirty)
|
||||
return ref_count - len(self)
|
||||
507
lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py
Normal file
507
lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py
Normal file
|
|
@ -0,0 +1,507 @@
|
|||
# orm/instrumentation.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Defines SQLAlchemy's system of class instrumentation.
|
||||
|
||||
This module is usually not directly visible to user applications, but
|
||||
defines a large part of the ORM's interactivity.
|
||||
|
||||
instrumentation.py deals with registration of end-user classes
|
||||
for state tracking. It interacts closely with state.py
|
||||
and attributes.py which establish per-instance and per-class-attribute
|
||||
instrumentation, respectively.
|
||||
|
||||
The class instrumentation system can be customized on a per-class
|
||||
or global basis using the :mod:`sqlalchemy.ext.instrumentation`
|
||||
module, which provides the means to build and specify
|
||||
alternate instrumentation forms.
|
||||
|
||||
.. versionchanged: 0.8
|
||||
The instrumentation extension system was moved out of the
|
||||
ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
|
||||
package. When that package is imported, it installs
|
||||
itself within sqlalchemy.orm so that its more comprehensive
|
||||
resolution mechanics take effect.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from . import exc, collections, interfaces, state
|
||||
from .. import util
|
||||
from . import base
|
||||
|
||||
|
||||
class ClassManager(dict):
|
||||
|
||||
"""tracks state information at the class level."""
|
||||
|
||||
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
|
||||
STATE_ATTR = base.DEFAULT_STATE_ATTR
|
||||
|
||||
deferred_scalar_loader = None
|
||||
|
||||
original_init = object.__init__
|
||||
|
||||
factory = None
|
||||
|
||||
def __init__(self, class_):
|
||||
self.class_ = class_
|
||||
self.info = {}
|
||||
self.new_init = None
|
||||
self.local_attrs = {}
|
||||
self.originals = {}
|
||||
|
||||
self._bases = [mgr for mgr in [
|
||||
manager_of_class(base)
|
||||
for base in self.class_.__bases__
|
||||
if isinstance(base, type)
|
||||
] if mgr is not None]
|
||||
|
||||
for base in self._bases:
|
||||
self.update(base)
|
||||
|
||||
self.dispatch._events._new_classmanager_instance(class_, self)
|
||||
# events._InstanceEventsHold.populate(class_, self)
|
||||
|
||||
for basecls in class_.__mro__:
|
||||
mgr = manager_of_class(basecls)
|
||||
if mgr is not None:
|
||||
self.dispatch._update(mgr.dispatch)
|
||||
self.manage()
|
||||
self._instrument_init()
|
||||
|
||||
if '__del__' in class_.__dict__:
|
||||
util.warn("__del__() method on class %s will "
|
||||
"cause unreachable cycles and memory leaks, "
|
||||
"as SQLAlchemy instrumentation often creates "
|
||||
"reference cycles. Please remove this method." %
|
||||
class_)
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is self
|
||||
|
||||
@property
|
||||
def is_mapped(self):
|
||||
return 'mapper' in self.__dict__
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self):
|
||||
# raises unless self.mapper has been assigned
|
||||
raise exc.UnmappedClassError(self.class_)
|
||||
|
||||
def _all_sqla_attributes(self, exclude=None):
|
||||
"""return an iterator of all classbound attributes that are
|
||||
implement :class:`._InspectionAttr`.
|
||||
|
||||
This includes :class:`.QueryableAttribute` as well as extension
|
||||
types such as :class:`.hybrid_property` and
|
||||
:class:`.AssociationProxy`.
|
||||
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = set()
|
||||
for supercls in self.class_.__mro__:
|
||||
for key in set(supercls.__dict__).difference(exclude):
|
||||
exclude.add(key)
|
||||
val = supercls.__dict__[key]
|
||||
if isinstance(val, interfaces._InspectionAttr):
|
||||
yield key, val
|
||||
|
||||
def _attr_has_impl(self, key):
|
||||
"""Return True if the given attribute is fully initialized.
|
||||
|
||||
i.e. has an impl.
|
||||
"""
|
||||
|
||||
return key in self and self[key].impl is not None
|
||||
|
||||
def _subclass_manager(self, cls):
|
||||
"""Create a new ClassManager for a subclass of this ClassManager's
|
||||
class.
|
||||
|
||||
This is called automatically when attributes are instrumented so that
|
||||
the attributes can be propagated to subclasses against their own
|
||||
class-local manager, without the need for mappers etc. to have already
|
||||
pre-configured managers for the full class hierarchy. Mappers
|
||||
can post-configure the auto-generated ClassManager when needed.
|
||||
|
||||
"""
|
||||
manager = manager_of_class(cls)
|
||||
if manager is None:
|
||||
manager = _instrumentation_factory.create_manager_for_cls(cls)
|
||||
return manager
|
||||
|
||||
def _instrument_init(self):
|
||||
# TODO: self.class_.__init__ is often the already-instrumented
|
||||
# __init__ from an instrumented superclass. We still need to make
|
||||
# our own wrapper, but it would
|
||||
# be nice to wrap the original __init__ and not our existing wrapper
|
||||
# of such, since this adds method overhead.
|
||||
self.original_init = self.class_.__init__
|
||||
self.new_init = _generate_init(self.class_, self)
|
||||
self.install_member('__init__', self.new_init)
|
||||
|
||||
def _uninstrument_init(self):
|
||||
if self.new_init:
|
||||
self.uninstall_member('__init__')
|
||||
self.new_init = None
|
||||
|
||||
@util.memoized_property
|
||||
def _state_constructor(self):
|
||||
self.dispatch.first_init(self, self.class_)
|
||||
return state.InstanceState
|
||||
|
||||
def manage(self):
|
||||
"""Mark this instance as the manager for its class."""
|
||||
|
||||
setattr(self.class_, self.MANAGER_ATTR, self)
|
||||
|
||||
def dispose(self):
|
||||
"""Dissasociate this manager from its class."""
|
||||
|
||||
delattr(self.class_, self.MANAGER_ATTR)
|
||||
|
||||
@util.hybridmethod
|
||||
def manager_getter(self):
|
||||
return _default_manager_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def state_getter(self):
|
||||
"""Return a (instance) -> InstanceState callable.
|
||||
|
||||
"state getter" callables should raise either KeyError or
|
||||
AttributeError if no InstanceState could be found for the
|
||||
instance.
|
||||
"""
|
||||
|
||||
return _default_state_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def dict_getter(self):
|
||||
return _default_dict_getter
|
||||
|
||||
def instrument_attribute(self, key, inst, propagated=False):
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't override local attr with inherited attr
|
||||
else:
|
||||
self.local_attrs[key] = inst
|
||||
self.install_descriptor(key, inst)
|
||||
self[key] = inst
|
||||
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = self._subclass_manager(cls)
|
||||
manager.instrument_attribute(key, inst, True)
|
||||
|
||||
def subclass_managers(self, recursive):
|
||||
for cls in self.class_.__subclasses__():
|
||||
mgr = manager_of_class(cls)
|
||||
if mgr is not None and mgr is not self:
|
||||
yield mgr
|
||||
if recursive:
|
||||
for m in mgr.subclass_managers(True):
|
||||
yield m
|
||||
|
||||
def post_configure_attribute(self, key):
|
||||
_instrumentation_factory.dispatch.\
|
||||
attribute_instrument(self.class_, key, self[key])
|
||||
|
||||
def uninstrument_attribute(self, key, propagated=False):
|
||||
if key not in self:
|
||||
return
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't get rid of local attr
|
||||
else:
|
||||
del self.local_attrs[key]
|
||||
self.uninstall_descriptor(key)
|
||||
del self[key]
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = manager_of_class(cls)
|
||||
if manager:
|
||||
manager.uninstrument_attribute(key, True)
|
||||
|
||||
def unregister(self):
|
||||
"""remove all instrumentation established by this ClassManager."""
|
||||
|
||||
self._uninstrument_init()
|
||||
|
||||
self.mapper = self.dispatch = None
|
||||
self.info.clear()
|
||||
|
||||
for key in list(self):
|
||||
if key in self.local_attrs:
|
||||
self.uninstrument_attribute(key)
|
||||
|
||||
def install_descriptor(self, key, inst):
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError("%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." %
|
||||
key)
|
||||
setattr(self.class_, key, inst)
|
||||
|
||||
def uninstall_descriptor(self, key):
|
||||
delattr(self.class_, key)
|
||||
|
||||
def install_member(self, key, implementation):
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError("%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." %
|
||||
key)
|
||||
self.originals.setdefault(key, getattr(self.class_, key, None))
|
||||
setattr(self.class_, key, implementation)
|
||||
|
||||
def uninstall_member(self, key):
|
||||
original = self.originals.pop(key, None)
|
||||
if original is not None:
|
||||
setattr(self.class_, key, original)
|
||||
|
||||
def instrument_collection_class(self, key, collection_class):
|
||||
return collections.prepare_instrumentation(collection_class)
|
||||
|
||||
def initialize_collection(self, key, state, factory):
|
||||
user_data = factory()
|
||||
adapter = collections.CollectionAdapter(
|
||||
self.get_impl(key), state, user_data)
|
||||
return adapter, user_data
|
||||
|
||||
def is_instrumented(self, key, search=False):
|
||||
if search:
|
||||
return key in self
|
||||
else:
|
||||
return key in self.local_attrs
|
||||
|
||||
def get_impl(self, key):
|
||||
return self[key].impl
|
||||
|
||||
@property
|
||||
def attributes(self):
|
||||
return iter(self.values())
|
||||
|
||||
# InstanceState management
|
||||
|
||||
def new_instance(self, state=None):
|
||||
instance = self.class_.__new__(self.class_)
|
||||
setattr(instance, self.STATE_ATTR,
|
||||
state or self._state_constructor(instance, self))
|
||||
return instance
|
||||
|
||||
def setup_instance(self, instance, state=None):
|
||||
setattr(instance, self.STATE_ATTR,
|
||||
state or self._state_constructor(instance, self))
|
||||
|
||||
def teardown_instance(self, instance):
|
||||
delattr(instance, self.STATE_ATTR)
|
||||
|
||||
def _serialize(self, state, state_dict):
|
||||
return _SerializeManager(state, state_dict)
|
||||
|
||||
def _new_state_if_none(self, instance):
|
||||
"""Install a default InstanceState if none is present.
|
||||
|
||||
A private convenience method used by the __init__ decorator.
|
||||
|
||||
"""
|
||||
if hasattr(instance, self.STATE_ATTR):
|
||||
return False
|
||||
elif self.class_ is not instance.__class__ and \
|
||||
self.is_mapped:
|
||||
# this will create a new ClassManager for the
|
||||
# subclass, without a mapper. This is likely a
|
||||
# user error situation but allow the object
|
||||
# to be constructed, so that it is usable
|
||||
# in a non-ORM context at least.
|
||||
return self._subclass_manager(instance.__class__).\
|
||||
_new_state_if_none(instance)
|
||||
else:
|
||||
state = self._state_constructor(instance, self)
|
||||
setattr(instance, self.STATE_ATTR, state)
|
||||
return state
|
||||
|
||||
def has_state(self, instance):
|
||||
return hasattr(instance, self.STATE_ATTR)
|
||||
|
||||
def has_parent(self, state, key, optimistic=False):
|
||||
"""TODO"""
|
||||
return self.get_impl(key).hasparent(state, optimistic=optimistic)
|
||||
|
||||
def __bool__(self):
|
||||
"""All ClassManagers are non-zero regardless of attribute state."""
|
||||
return True
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s of %r at %x>' % (
|
||||
self.__class__.__name__, self.class_, id(self))
|
||||
|
||||
|
||||
class _SerializeManager(object):
|
||||
|
||||
"""Provide serialization of a :class:`.ClassManager`.
|
||||
|
||||
The :class:`.InstanceState` uses ``__init__()`` on serialize
|
||||
and ``__call__()`` on deserialize.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, state, d):
|
||||
self.class_ = state.class_
|
||||
manager = state.manager
|
||||
manager.dispatch.pickle(state, d)
|
||||
|
||||
def __call__(self, state, inst, state_dict):
|
||||
state.manager = manager = manager_of_class(self.class_)
|
||||
if manager is None:
|
||||
raise exc.UnmappedInstanceError(
|
||||
inst,
|
||||
"Cannot deserialize object of type %r - "
|
||||
"no mapper() has "
|
||||
"been configured for this class within the current "
|
||||
"Python process!" %
|
||||
self.class_)
|
||||
elif manager.is_mapped and not manager.mapper.configured:
|
||||
manager.mapper._configure_all()
|
||||
|
||||
# setup _sa_instance_state ahead of time so that
|
||||
# unpickle events can access the object normally.
|
||||
# see [ticket:2362]
|
||||
if inst is not None:
|
||||
manager.setup_instance(inst, state)
|
||||
manager.dispatch.unpickle(state, state_dict)
|
||||
|
||||
|
||||
class InstrumentationFactory(object):
|
||||
|
||||
"""Factory for new ClassManager instances."""
|
||||
|
||||
def create_manager_for_cls(self, class_):
|
||||
assert class_ is not None
|
||||
assert manager_of_class(class_) is None
|
||||
|
||||
# give a more complicated subclass
|
||||
# a chance to do what it wants here
|
||||
manager, factory = self._locate_extended_factory(class_)
|
||||
|
||||
if factory is None:
|
||||
factory = ClassManager
|
||||
manager = factory(class_)
|
||||
|
||||
self._check_conflicts(class_, factory)
|
||||
|
||||
manager.factory = factory
|
||||
|
||||
self.dispatch.class_instrument(class_)
|
||||
return manager
|
||||
|
||||
def _locate_extended_factory(self, class_):
|
||||
"""Overridden by a subclass to do an extended lookup."""
|
||||
return None, None
|
||||
|
||||
def _check_conflicts(self, class_, factory):
|
||||
"""Overridden by a subclass to test for conflicting factories."""
|
||||
return
|
||||
|
||||
def unregister(self, class_):
|
||||
manager = manager_of_class(class_)
|
||||
manager.unregister()
|
||||
manager.dispose()
|
||||
self.dispatch.class_uninstrument(class_)
|
||||
if ClassManager.MANAGER_ATTR in class_.__dict__:
|
||||
delattr(class_, ClassManager.MANAGER_ATTR)
|
||||
|
||||
# this attribute is replaced by sqlalchemy.ext.instrumentation
|
||||
# when importred.
|
||||
_instrumentation_factory = InstrumentationFactory()
|
||||
|
||||
# these attributes are replaced by sqlalchemy.ext.instrumentation
|
||||
# when a non-standard InstrumentationManager class is first
|
||||
# used to instrument a class.
|
||||
instance_state = _default_state_getter = base.instance_state
|
||||
|
||||
instance_dict = _default_dict_getter = base.instance_dict
|
||||
|
||||
manager_of_class = _default_manager_getter = base.manager_of_class
|
||||
|
||||
|
||||
def register_class(class_):
|
||||
"""Register class instrumentation.
|
||||
|
||||
Returns the existing or newly created class manager.
|
||||
|
||||
"""
|
||||
|
||||
manager = manager_of_class(class_)
|
||||
if manager is None:
|
||||
manager = _instrumentation_factory.create_manager_for_cls(class_)
|
||||
return manager
|
||||
|
||||
|
||||
def unregister_class(class_):
|
||||
"""Unregister class instrumentation."""
|
||||
|
||||
_instrumentation_factory.unregister(class_)
|
||||
|
||||
|
||||
def is_instrumented(instance, key):
|
||||
"""Return True if the given attribute on the given instance is
|
||||
instrumented by the attributes package.
|
||||
|
||||
This function may be used regardless of instrumentation
|
||||
applied directly to the class, i.e. no descriptors are required.
|
||||
|
||||
"""
|
||||
return manager_of_class(instance.__class__).\
|
||||
is_instrumented(key, search=True)
|
||||
|
||||
|
||||
def _generate_init(class_, class_manager):
|
||||
"""Build an __init__ decorator that triggers ClassManager events."""
|
||||
|
||||
# TODO: we should use the ClassManager's notion of the
|
||||
# original '__init__' method, once ClassManager is fixed
|
||||
# to always reference that.
|
||||
original__init__ = class_.__init__
|
||||
assert original__init__
|
||||
|
||||
# Go through some effort here and don't change the user's __init__
|
||||
# calling signature, including the unlikely case that it has
|
||||
# a return value.
|
||||
# FIXME: need to juggle local names to avoid constructor argument
|
||||
# clashes.
|
||||
func_body = """\
|
||||
def __init__(%(apply_pos)s):
|
||||
new_state = class_manager._new_state_if_none(%(self_arg)s)
|
||||
if new_state:
|
||||
return new_state._initialize_instance(%(apply_kw)s)
|
||||
else:
|
||||
return original__init__(%(apply_kw)s)
|
||||
"""
|
||||
func_vars = util.format_argspec_init(original__init__, grouped=False)
|
||||
func_text = func_body % func_vars
|
||||
|
||||
if util.py2k:
|
||||
func = getattr(original__init__, 'im_func', original__init__)
|
||||
func_defaults = getattr(func, 'func_defaults', None)
|
||||
else:
|
||||
func_defaults = getattr(original__init__, '__defaults__', None)
|
||||
func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
|
||||
|
||||
env = locals().copy()
|
||||
exec(func_text, env)
|
||||
__init__ = env['__init__']
|
||||
__init__.__doc__ = original__init__.__doc__
|
||||
|
||||
if func_defaults:
|
||||
__init__.__defaults__ = func_defaults
|
||||
if not util.py2k and func_kw_defaults:
|
||||
__init__.__kwdefaults__ = func_kw_defaults
|
||||
|
||||
return __init__
|
||||
578
lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py
Normal file
578
lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py
Normal file
|
|
@ -0,0 +1,578 @@
|
|||
# orm/interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
Contains various base classes used throughout the ORM.
|
||||
|
||||
Defines the now deprecated ORM extension classes as well
|
||||
as ORM internals.
|
||||
|
||||
Other than the deprecated extensions, this module and the
|
||||
classes within should be considered mostly private.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import exc as sa_exc, util, inspect
|
||||
from ..sql import operators
|
||||
from collections import deque
|
||||
from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
|
||||
EXT_CONTINUE, EXT_STOP, NOT_EXTENSION)
|
||||
from .base import _InspectionAttr, _MappedAttribute
|
||||
from .path_registry import PathRegistry
|
||||
import collections
|
||||
|
||||
|
||||
__all__ = (
|
||||
'AttributeExtension',
|
||||
'EXT_CONTINUE',
|
||||
'EXT_STOP',
|
||||
'ONETOMANY',
|
||||
'MANYTOMANY',
|
||||
'MANYTOONE',
|
||||
'NOT_EXTENSION',
|
||||
'LoaderStrategy',
|
||||
'MapperExtension',
|
||||
'MapperOption',
|
||||
'MapperProperty',
|
||||
'PropComparator',
|
||||
'SessionExtension',
|
||||
'StrategizedProperty',
|
||||
)
|
||||
|
||||
|
||||
class MapperProperty(_MappedAttribute, _InspectionAttr):
|
||||
"""Manage the relationship of a ``Mapper`` to a single class
|
||||
attribute, as well as that attribute as it appears on individual
|
||||
instances of the class, including attribute instrumentation,
|
||||
attribute access, loading behavior, and dependency calculations.
|
||||
|
||||
The most common occurrences of :class:`.MapperProperty` are the
|
||||
mapped :class:`.Column`, which is represented in a mapping as
|
||||
an instance of :class:`.ColumnProperty`,
|
||||
and a reference to another class produced by :func:`.relationship`,
|
||||
represented in the mapping as an instance of
|
||||
:class:`.RelationshipProperty`.
|
||||
|
||||
"""
|
||||
|
||||
cascade = frozenset()
|
||||
"""The set of 'cascade' attribute names.
|
||||
|
||||
This collection is checked before the 'cascade_iterator' method is called.
|
||||
|
||||
"""
|
||||
|
||||
is_property = True
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
"""Called by Query for the purposes of constructing a SQL statement.
|
||||
|
||||
Each MapperProperty associated with the target mapper processes the
|
||||
statement referenced by the query context, adding columns and/or
|
||||
criterion as appropriate.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, row, adapter):
|
||||
"""Return a 3-tuple consisting of three row processing functions.
|
||||
|
||||
"""
|
||||
return None, None, None
|
||||
|
||||
def cascade_iterator(self, type_, state, visited_instances=None,
|
||||
halt_on=None):
|
||||
"""Iterate through instances related to the given instance for
|
||||
a particular 'cascade', starting with this MapperProperty.
|
||||
|
||||
Return an iterator3-tuples (instance, mapper, state).
|
||||
|
||||
Note that the 'cascade' collection on this MapperProperty is
|
||||
checked first for the given type before cascade_iterator is called.
|
||||
|
||||
See PropertyLoader for the related instance implementation.
|
||||
"""
|
||||
|
||||
return iter(())
|
||||
|
||||
def set_parent(self, parent, init):
|
||||
self.parent = parent
|
||||
|
||||
def instrument_class(self, mapper): # pragma: no-coverage
|
||||
raise NotImplementedError()
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.MapperProperty`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
_configure_started = False
|
||||
_configure_finished = False
|
||||
|
||||
def init(self):
|
||||
"""Called after all mappers are created to assemble
|
||||
relationships between mappers and perform other post-mapper-creation
|
||||
initialization steps.
|
||||
|
||||
"""
|
||||
self._configure_started = True
|
||||
self.do_init()
|
||||
self._configure_finished = True
|
||||
|
||||
@property
|
||||
def class_attribute(self):
|
||||
"""Return the class-bound descriptor corresponding to this
|
||||
:class:`.MapperProperty`.
|
||||
|
||||
This is basically a ``getattr()`` call::
|
||||
|
||||
return getattr(self.parent.class_, self.key)
|
||||
|
||||
I.e. if this :class:`.MapperProperty` were named ``addresses``,
|
||||
and the class to which it is mapped is ``User``, this sequence
|
||||
is possible::
|
||||
|
||||
>>> from sqlalchemy import inspect
|
||||
>>> mapper = inspect(User)
|
||||
>>> addresses_property = mapper.attrs.addresses
|
||||
>>> addresses_property.class_attribute is User.addresses
|
||||
True
|
||||
>>> User.addresses.property is addresses_property
|
||||
True
|
||||
|
||||
|
||||
"""
|
||||
|
||||
return getattr(self.parent.class_, self.key)
|
||||
|
||||
def do_init(self):
|
||||
"""Perform subclass-specific initialization post-mapper-creation
|
||||
steps.
|
||||
|
||||
This is a template method called by the ``MapperProperty``
|
||||
object's init() method.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
"""Perform instrumentation adjustments that need to occur
|
||||
after init() has completed.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def is_primary(self):
|
||||
"""Return True if this ``MapperProperty``'s mapper is the
|
||||
primary mapper for its class.
|
||||
|
||||
This flag is used to indicate that the ``MapperProperty`` can
|
||||
define attribute instrumentation for the class at the class
|
||||
level (as opposed to the individual instance level).
|
||||
"""
|
||||
|
||||
return not self.parent.non_primary
|
||||
|
||||
def merge(self, session, source_state, source_dict, dest_state,
|
||||
dest_dict, load, _recursive):
|
||||
"""Merge the attribute represented by this ``MapperProperty``
|
||||
from source to destination object"""
|
||||
|
||||
pass
|
||||
|
||||
def compare(self, operator, value, **kw):
|
||||
"""Return a compare operation for the columns represented by
|
||||
this ``MapperProperty`` to the given value, which may be a
|
||||
column value or an instance. 'operator' is an operator from
|
||||
the operators module, or from sql.Comparator.
|
||||
|
||||
By default uses the PropComparator attached to this MapperProperty
|
||||
under the attribute name "comparator".
|
||||
"""
|
||||
|
||||
return operator(self.comparator, value)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s at 0x%x; %s>' % (
|
||||
self.__class__.__name__,
|
||||
id(self), getattr(self, 'key', 'no key'))
|
||||
|
||||
|
||||
class PropComparator(operators.ColumnOperators):
|
||||
"""Defines boolean, comparison, and other operators for
|
||||
:class:`.MapperProperty` objects.
|
||||
|
||||
SQLAlchemy allows for operators to
|
||||
be redefined at both the Core and ORM level. :class:`.PropComparator`
|
||||
is the base class of operator redefinition for ORM-level operations,
|
||||
including those of :class:`.ColumnProperty`,
|
||||
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
|
||||
|
||||
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
|
||||
0.7, as well as Core-level operator redefinition in
|
||||
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
|
||||
instances is extremely rare. See :ref:`hybrids_toplevel` as well
|
||||
as :ref:`types_operators`.
|
||||
|
||||
User-defined subclasses of :class:`.PropComparator` may be created. The
|
||||
built-in Python comparison and math operator methods, such as
|
||||
:meth:`.operators.ColumnOperators.__eq__`,
|
||||
:meth:`.operators.ColumnOperators.__lt__`, and
|
||||
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
|
||||
new operator behavior. The custom :class:`.PropComparator` is passed to
|
||||
the :class:`.MapperProperty` instance via the ``comparator_factory``
|
||||
argument. In each case,
|
||||
the appropriate subclass of :class:`.PropComparator` should be used::
|
||||
|
||||
# definition of custom PropComparator subclasses
|
||||
|
||||
from sqlalchemy.orm.properties import \\
|
||||
ColumnProperty,\\
|
||||
CompositeProperty,\\
|
||||
RelationshipProperty
|
||||
|
||||
class MyColumnComparator(ColumnProperty.Comparator):
|
||||
def __eq__(self, other):
|
||||
return self.__clause_element__() == other
|
||||
|
||||
class MyRelationshipComparator(RelationshipProperty.Comparator):
|
||||
def any(self, expression):
|
||||
"define the 'any' operation"
|
||||
# ...
|
||||
|
||||
class MyCompositeComparator(CompositeProperty.Comparator):
|
||||
def __gt__(self, other):
|
||||
"redefine the 'greater than' operation"
|
||||
|
||||
return sql.and_(*[a>b for a, b in
|
||||
zip(self.__clause_element__().clauses,
|
||||
other.__composite_values__())])
|
||||
|
||||
|
||||
# application of custom PropComparator subclasses
|
||||
|
||||
from sqlalchemy.orm import column_property, relationship, composite
|
||||
from sqlalchemy import Column, String
|
||||
|
||||
class SomeMappedClass(Base):
|
||||
some_column = column_property(Column("some_column", String),
|
||||
comparator_factory=MyColumnComparator)
|
||||
|
||||
some_relationship = relationship(SomeOtherClass,
|
||||
comparator_factory=MyRelationshipComparator)
|
||||
|
||||
some_composite = composite(
|
||||
Column("a", String), Column("b", String),
|
||||
comparator_factory=MyCompositeComparator
|
||||
)
|
||||
|
||||
Note that for column-level operator redefinition, it's usually
|
||||
simpler to define the operators at the Core level, using the
|
||||
:attr:`.TypeEngine.comparator_factory` attribute. See
|
||||
:ref:`types_operators` for more detail.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.ColumnProperty.Comparator`
|
||||
|
||||
:class:`.RelationshipProperty.Comparator`
|
||||
|
||||
:class:`.CompositeProperty.Comparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, prop, parentmapper, adapt_to_entity=None):
|
||||
self.prop = self.property = prop
|
||||
self._parentmapper = parentmapper
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
|
||||
def __clause_element__(self):
|
||||
raise NotImplementedError("%r" % self)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
"""Return a copy of this PropComparator which will use the given
|
||||
:class:`.AliasedInsp` to produce corresponding expressions.
|
||||
"""
|
||||
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
"""Produce a callable that adapts column expressions
|
||||
to suit an aliased version of this comparator.
|
||||
|
||||
"""
|
||||
if self._adapt_to_entity is None:
|
||||
return None
|
||||
else:
|
||||
return self._adapt_to_entity._adapt_element
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
return self.property.info
|
||||
|
||||
@staticmethod
|
||||
def any_op(a, b, **kwargs):
|
||||
return a.any(b, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def has_op(a, b, **kwargs):
|
||||
return a.has(b, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def of_type_op(a, class_):
|
||||
return a.of_type(class_)
|
||||
|
||||
def of_type(self, class_):
|
||||
"""Redefine this object in terms of a polymorphic subclass.
|
||||
|
||||
Returns a new PropComparator from which further criterion can be
|
||||
evaluated.
|
||||
|
||||
e.g.::
|
||||
|
||||
query.join(Company.employees.of_type(Engineer)).\\
|
||||
filter(Engineer.name=='foo')
|
||||
|
||||
:param \class_: a class or mapper indicating that criterion will be
|
||||
against this specific subclass.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.of_type_op, class_)
|
||||
|
||||
def any(self, criterion=None, **kwargs):
|
||||
"""Return true if this collection contains any member that meets the
|
||||
given criterion.
|
||||
|
||||
The usual implementation of ``any()`` is
|
||||
:meth:`.RelationshipProperty.Comparator.any`.
|
||||
|
||||
:param criterion: an optional ClauseElement formulated against the
|
||||
member class' table or attributes.
|
||||
|
||||
:param \**kwargs: key/value pairs corresponding to member class
|
||||
attribute names which will be compared via equality to the
|
||||
corresponding values.
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.any_op, criterion, **kwargs)
|
||||
|
||||
def has(self, criterion=None, **kwargs):
|
||||
"""Return true if this element references a member which meets the
|
||||
given criterion.
|
||||
|
||||
The usual implementation of ``has()`` is
|
||||
:meth:`.RelationshipProperty.Comparator.has`.
|
||||
|
||||
:param criterion: an optional ClauseElement formulated against the
|
||||
member class' table or attributes.
|
||||
|
||||
:param \**kwargs: key/value pairs corresponding to member class
|
||||
attribute names which will be compared via equality to the
|
||||
corresponding values.
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.has_op, criterion, **kwargs)
|
||||
|
||||
|
||||
class StrategizedProperty(MapperProperty):
|
||||
"""A MapperProperty which uses selectable strategies to affect
|
||||
loading behavior.
|
||||
|
||||
There is a single strategy selected by default. Alternate
|
||||
strategies can be selected at Query time through the usage of
|
||||
``StrategizedOption`` objects via the Query.options() method.
|
||||
|
||||
"""
|
||||
|
||||
strategy_wildcard_key = None
|
||||
|
||||
def _get_context_loader(self, context, path):
|
||||
load = None
|
||||
|
||||
# use EntityRegistry.__getitem__()->PropRegistry here so
|
||||
# that the path is stated in terms of our base
|
||||
search_path = dict.__getitem__(path, self)
|
||||
|
||||
# search among: exact match, "attr.*", "default" strategy
|
||||
# if any.
|
||||
for path_key in (
|
||||
search_path._loader_key,
|
||||
search_path._wildcard_path_loader_key,
|
||||
search_path._default_path_loader_key
|
||||
):
|
||||
if path_key in context.attributes:
|
||||
load = context.attributes[path_key]
|
||||
break
|
||||
|
||||
return load
|
||||
|
||||
def _get_strategy(self, key):
|
||||
try:
|
||||
return self._strategies[key]
|
||||
except KeyError:
|
||||
cls = self._strategy_lookup(*key)
|
||||
self._strategies[key] = self._strategies[
|
||||
cls] = strategy = cls(self)
|
||||
return strategy
|
||||
|
||||
def _get_strategy_by_cls(self, cls):
|
||||
return self._get_strategy(cls._strategy_keys[0])
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
|
||||
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
return strat.create_row_processor(context, path, loader,
|
||||
mapper, row, adapter)
|
||||
|
||||
def do_init(self):
|
||||
self._strategies = {}
|
||||
self.strategy = self._get_strategy_by_cls(self.strategy_class)
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
if self.is_primary() and \
|
||||
not mapper.class_manager._attr_has_impl(self.key):
|
||||
self.strategy.init_class_attribute(mapper)
|
||||
|
||||
_strategies = collections.defaultdict(dict)
|
||||
|
||||
@classmethod
|
||||
def strategy_for(cls, **kw):
|
||||
def decorate(dec_cls):
|
||||
dec_cls._strategy_keys = []
|
||||
key = tuple(sorted(kw.items()))
|
||||
cls._strategies[cls][key] = dec_cls
|
||||
dec_cls._strategy_keys.append(key)
|
||||
return dec_cls
|
||||
return decorate
|
||||
|
||||
@classmethod
|
||||
def _strategy_lookup(cls, *key):
|
||||
for prop_cls in cls.__mro__:
|
||||
if prop_cls in cls._strategies:
|
||||
strategies = cls._strategies[prop_cls]
|
||||
try:
|
||||
return strategies[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise Exception("can't locate strategy for %s %s" % (cls, key))
|
||||
|
||||
|
||||
class MapperOption(object):
|
||||
"""Describe a modification to a Query."""
|
||||
|
||||
propagate_to_loaders = False
|
||||
"""if True, indicate this option should be carried along
|
||||
Query object generated by scalar or object lazy loaders.
|
||||
"""
|
||||
|
||||
def process_query(self, query):
|
||||
pass
|
||||
|
||||
def process_query_conditionally(self, query):
|
||||
"""same as process_query(), except that this option may not
|
||||
apply to the given query.
|
||||
|
||||
Used when secondary loaders resend existing options to a new
|
||||
Query."""
|
||||
|
||||
self.process_query(query)
|
||||
|
||||
|
||||
class LoaderStrategy(object):
|
||||
"""Describe the loading behavior of a StrategizedProperty object.
|
||||
|
||||
The ``LoaderStrategy`` interacts with the querying process in three
|
||||
ways:
|
||||
|
||||
* it controls the configuration of the ``InstrumentedAttribute``
|
||||
placed on a class to handle the behavior of the attribute. this
|
||||
may involve setting up class-level callable functions to fire
|
||||
off a select operation when the attribute is first accessed
|
||||
(i.e. a lazy load)
|
||||
|
||||
* it processes the ``QueryContext`` at statement construction time,
|
||||
where it can modify the SQL statement that is being produced.
|
||||
Simple column attributes may add their represented column to the
|
||||
list of selected columns, *eager loading* properties may add
|
||||
``LEFT OUTER JOIN`` clauses to the statement.
|
||||
|
||||
* It produces "row processor" functions at result fetching time.
|
||||
These "row processor" functions populate a particular attribute
|
||||
on a particular mapped instance.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, parent):
|
||||
self.parent_property = parent
|
||||
self.is_class_level = False
|
||||
self.parent = self.parent_property.parent
|
||||
self.key = self.parent_property.key
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
pass
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
|
||||
pass
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper,
|
||||
row, adapter):
|
||||
"""Return row processing functions which fulfill the contract
|
||||
specified by MapperProperty.create_row_processor.
|
||||
|
||||
StrategizedProperty delegates its create_row_processor method
|
||||
directly to this method. """
|
||||
|
||||
return None, None, None
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent_property)
|
||||
614
lib/python3.4/site-packages/sqlalchemy/orm/loading.py
Normal file
614
lib/python3.4/site-packages/sqlalchemy/orm/loading.py
Normal file
|
|
@ -0,0 +1,614 @@
|
|||
# orm/loading.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""private module containing functions used to convert database
|
||||
rows into object instances and associated state.
|
||||
|
||||
the functions here are called primarily by Query, Mapper,
|
||||
as well as some of the attribute loading strategies.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from .. import util
|
||||
from . import attributes, exc as orm_exc, state as statelib
|
||||
from .interfaces import EXT_CONTINUE
|
||||
from ..sql import util as sql_util
|
||||
from .util import _none_set, state_str
|
||||
from .. import exc as sa_exc
|
||||
|
||||
_new_runid = util.counter()
|
||||
|
||||
|
||||
def instances(query, cursor, context):
|
||||
"""Return an ORM result as an iterator."""
|
||||
session = query.session
|
||||
|
||||
context.runid = _new_runid()
|
||||
|
||||
filter_fns = [ent.filter_fn
|
||||
for ent in query._entities]
|
||||
filtered = id in filter_fns
|
||||
|
||||
single_entity = len(query._entities) == 1 and \
|
||||
query._entities[0].supports_single_entity
|
||||
|
||||
if filtered:
|
||||
if single_entity:
|
||||
filter_fn = id
|
||||
else:
|
||||
def filter_fn(row):
|
||||
return tuple(fn(x) for x, fn in zip(row, filter_fns))
|
||||
|
||||
custom_rows = single_entity and \
|
||||
query._entities[0].custom_rows
|
||||
|
||||
(process, labels) = \
|
||||
list(zip(*[
|
||||
query_entity.row_processor(query,
|
||||
context, custom_rows)
|
||||
for query_entity in query._entities
|
||||
]))
|
||||
|
||||
while True:
|
||||
context.progress = {}
|
||||
context.partials = {}
|
||||
|
||||
if query._yield_per:
|
||||
fetch = cursor.fetchmany(query._yield_per)
|
||||
if not fetch:
|
||||
break
|
||||
else:
|
||||
fetch = cursor.fetchall()
|
||||
|
||||
if custom_rows:
|
||||
rows = []
|
||||
for row in fetch:
|
||||
process[0](row, rows)
|
||||
elif single_entity:
|
||||
rows = [process[0](row, None) for row in fetch]
|
||||
else:
|
||||
rows = [util.KeyedTuple([proc(row, None) for proc in process],
|
||||
labels) for row in fetch]
|
||||
|
||||
if filtered:
|
||||
rows = util.unique_list(rows, filter_fn)
|
||||
|
||||
if context.refresh_state and query._only_load_props \
|
||||
and context.refresh_state in context.progress:
|
||||
context.refresh_state._commit(
|
||||
context.refresh_state.dict, query._only_load_props)
|
||||
context.progress.pop(context.refresh_state)
|
||||
|
||||
statelib.InstanceState._commit_all_states(
|
||||
list(context.progress.items()),
|
||||
session.identity_map
|
||||
)
|
||||
|
||||
for state, (dict_, attrs) in context.partials.items():
|
||||
state._commit(dict_, attrs)
|
||||
|
||||
for row in rows:
|
||||
yield row
|
||||
|
||||
if not query._yield_per:
|
||||
break
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.query")
|
||||
def merge_result(querylib, query, iterator, load=True):
|
||||
"""Merge a result into this :class:`.Query` object's Session."""
|
||||
|
||||
session = query.session
|
||||
if load:
|
||||
# flush current contents if we expect to load data
|
||||
session._autoflush()
|
||||
|
||||
autoflush = session.autoflush
|
||||
try:
|
||||
session.autoflush = False
|
||||
single_entity = len(query._entities) == 1
|
||||
if single_entity:
|
||||
if isinstance(query._entities[0], querylib._MapperEntity):
|
||||
result = [session._merge(
|
||||
attributes.instance_state(instance),
|
||||
attributes.instance_dict(instance),
|
||||
load=load, _recursive={})
|
||||
for instance in iterator]
|
||||
else:
|
||||
result = list(iterator)
|
||||
else:
|
||||
mapped_entities = [i for i, e in enumerate(query._entities)
|
||||
if isinstance(e, querylib._MapperEntity)]
|
||||
result = []
|
||||
keys = [ent._label_name for ent in query._entities]
|
||||
for row in iterator:
|
||||
newrow = list(row)
|
||||
for i in mapped_entities:
|
||||
if newrow[i] is not None:
|
||||
newrow[i] = session._merge(
|
||||
attributes.instance_state(newrow[i]),
|
||||
attributes.instance_dict(newrow[i]),
|
||||
load=load, _recursive={})
|
||||
result.append(util.KeyedTuple(newrow, keys))
|
||||
|
||||
return iter(result)
|
||||
finally:
|
||||
session.autoflush = autoflush
|
||||
|
||||
|
||||
def get_from_identity(session, key, passive):
|
||||
"""Look up the given key in the given session's identity map,
|
||||
check the object for expired state if found.
|
||||
|
||||
"""
|
||||
instance = session.identity_map.get(key)
|
||||
if instance is not None:
|
||||
|
||||
state = attributes.instance_state(instance)
|
||||
|
||||
# expired - ensure it still exists
|
||||
if state.expired:
|
||||
if not passive & attributes.SQL_OK:
|
||||
# TODO: no coverage here
|
||||
return attributes.PASSIVE_NO_RESULT
|
||||
elif not passive & attributes.RELATED_OBJECT_OK:
|
||||
# this mode is used within a flush and the instance's
|
||||
# expired state will be checked soon enough, if necessary
|
||||
return instance
|
||||
try:
|
||||
state(state, passive)
|
||||
except orm_exc.ObjectDeletedError:
|
||||
session._remove_newly_deleted([state])
|
||||
return None
|
||||
return instance
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def load_on_ident(query, key,
|
||||
refresh_state=None, lockmode=None,
|
||||
only_load_props=None):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
if key is not None:
|
||||
ident = key[1]
|
||||
else:
|
||||
ident = None
|
||||
|
||||
if refresh_state is None:
|
||||
q = query._clone()
|
||||
q._get_condition()
|
||||
else:
|
||||
q = query._clone()
|
||||
|
||||
if ident is not None:
|
||||
mapper = query._mapper_zero()
|
||||
|
||||
(_get_clause, _get_params) = mapper._get_clause
|
||||
|
||||
# None present in ident - turn those comparisons
|
||||
# into "IS NULL"
|
||||
if None in ident:
|
||||
nones = set([
|
||||
_get_params[col].key for col, value in
|
||||
zip(mapper.primary_key, ident) if value is None
|
||||
])
|
||||
_get_clause = sql_util.adapt_criterion_to_null(
|
||||
_get_clause, nones)
|
||||
|
||||
_get_clause = q._adapt_clause(_get_clause, True, False)
|
||||
q._criterion = _get_clause
|
||||
|
||||
params = dict([
|
||||
(_get_params[primary_key].key, id_val)
|
||||
for id_val, primary_key in zip(ident, mapper.primary_key)
|
||||
])
|
||||
|
||||
q._params = params
|
||||
|
||||
if lockmode is not None:
|
||||
version_check = True
|
||||
q = q.with_lockmode(lockmode)
|
||||
elif query._for_update_arg is not None:
|
||||
version_check = True
|
||||
q._for_update_arg = query._for_update_arg
|
||||
else:
|
||||
version_check = False
|
||||
|
||||
q._get_options(
|
||||
populate_existing=bool(refresh_state),
|
||||
version_check=version_check,
|
||||
only_load_props=only_load_props,
|
||||
refresh_state=refresh_state)
|
||||
q._order_by = None
|
||||
|
||||
try:
|
||||
return q.one()
|
||||
except orm_exc.NoResultFound:
|
||||
return None
|
||||
|
||||
|
||||
def instance_processor(mapper, context, path, adapter,
|
||||
polymorphic_from=None,
|
||||
only_load_props=None,
|
||||
refresh_state=None,
|
||||
polymorphic_discriminator=None):
|
||||
"""Produce a mapper level row processor callable
|
||||
which processes rows into mapped instances."""
|
||||
|
||||
# note that this method, most of which exists in a closure
|
||||
# called _instance(), resists being broken out, as
|
||||
# attempts to do so tend to add significant function
|
||||
# call overhead. _instance() is the most
|
||||
# performance-critical section in the whole ORM.
|
||||
|
||||
pk_cols = mapper.primary_key
|
||||
|
||||
if polymorphic_from or refresh_state:
|
||||
polymorphic_on = None
|
||||
else:
|
||||
if polymorphic_discriminator is not None:
|
||||
polymorphic_on = polymorphic_discriminator
|
||||
else:
|
||||
polymorphic_on = mapper.polymorphic_on
|
||||
polymorphic_instances = util.PopulateDict(
|
||||
_configure_subclass_mapper(
|
||||
mapper,
|
||||
context, path, adapter)
|
||||
)
|
||||
|
||||
version_id_col = mapper.version_id_col
|
||||
|
||||
if adapter:
|
||||
pk_cols = [adapter.columns[c] for c in pk_cols]
|
||||
if polymorphic_on is not None:
|
||||
polymorphic_on = adapter.columns[polymorphic_on]
|
||||
if version_id_col is not None:
|
||||
version_id_col = adapter.columns[version_id_col]
|
||||
|
||||
identity_class = mapper._identity_class
|
||||
|
||||
new_populators = []
|
||||
existing_populators = []
|
||||
eager_populators = []
|
||||
|
||||
load_path = context.query._current_path + path \
|
||||
if context.query._current_path.path \
|
||||
else path
|
||||
|
||||
def populate_state(state, dict_, row, isnew, only_load_props):
|
||||
if isnew:
|
||||
if context.propagate_options:
|
||||
state.load_options = context.propagate_options
|
||||
if state.load_options:
|
||||
state.load_path = load_path
|
||||
|
||||
if not new_populators:
|
||||
_populators(mapper, context, path, row, adapter,
|
||||
new_populators,
|
||||
existing_populators,
|
||||
eager_populators
|
||||
)
|
||||
|
||||
if isnew:
|
||||
populators = new_populators
|
||||
else:
|
||||
populators = existing_populators
|
||||
|
||||
if only_load_props is None:
|
||||
for key, populator in populators:
|
||||
populator(state, dict_, row)
|
||||
elif only_load_props:
|
||||
for key, populator in populators:
|
||||
if key in only_load_props:
|
||||
populator(state, dict_, row)
|
||||
|
||||
session_identity_map = context.session.identity_map
|
||||
|
||||
listeners = mapper.dispatch
|
||||
|
||||
# legacy events - I'd very much like to yank these totally
|
||||
translate_row = listeners.translate_row or None
|
||||
create_instance = listeners.create_instance or None
|
||||
populate_instance = listeners.populate_instance or None
|
||||
append_result = listeners.append_result or None
|
||||
####
|
||||
|
||||
populate_existing = context.populate_existing or mapper.always_refresh
|
||||
invoke_all_eagers = context.invoke_all_eagers
|
||||
|
||||
if mapper.allow_partial_pks:
|
||||
is_not_primary_key = _none_set.issuperset
|
||||
else:
|
||||
is_not_primary_key = _none_set.issubset
|
||||
|
||||
def _instance(row, result):
|
||||
if not new_populators and invoke_all_eagers:
|
||||
_populators(mapper, context, path, row, adapter,
|
||||
new_populators,
|
||||
existing_populators,
|
||||
eager_populators
|
||||
)
|
||||
|
||||
if translate_row:
|
||||
for fn in translate_row:
|
||||
ret = fn(mapper, context, row)
|
||||
if ret is not EXT_CONTINUE:
|
||||
row = ret
|
||||
break
|
||||
|
||||
if polymorphic_on is not None:
|
||||
discriminator = row[polymorphic_on]
|
||||
if discriminator is not None:
|
||||
_instance = polymorphic_instances[discriminator]
|
||||
if _instance:
|
||||
return _instance(row, result)
|
||||
|
||||
# determine identity key
|
||||
if refresh_state:
|
||||
identitykey = refresh_state.key
|
||||
if identitykey is None:
|
||||
# super-rare condition; a refresh is being called
|
||||
# on a non-instance-key instance; this is meant to only
|
||||
# occur within a flush()
|
||||
identitykey = mapper._identity_key_from_state(refresh_state)
|
||||
else:
|
||||
identitykey = (
|
||||
identity_class,
|
||||
tuple([row[column] for column in pk_cols])
|
||||
)
|
||||
|
||||
instance = session_identity_map.get(identitykey)
|
||||
|
||||
if instance is not None:
|
||||
state = attributes.instance_state(instance)
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
|
||||
isnew = state.runid != context.runid
|
||||
currentload = not isnew
|
||||
loaded_instance = False
|
||||
|
||||
if not currentload and \
|
||||
version_id_col is not None and \
|
||||
context.version_check and \
|
||||
mapper._get_state_attr_by_column(
|
||||
state,
|
||||
dict_,
|
||||
mapper.version_id_col) != \
|
||||
row[version_id_col]:
|
||||
|
||||
raise orm_exc.StaleDataError(
|
||||
"Instance '%s' has version id '%s' which "
|
||||
"does not match database-loaded version id '%s'."
|
||||
% (state_str(state),
|
||||
mapper._get_state_attr_by_column(
|
||||
state, dict_,
|
||||
mapper.version_id_col),
|
||||
row[version_id_col]))
|
||||
elif refresh_state:
|
||||
# out of band refresh_state detected (i.e. its not in the
|
||||
# session.identity_map) honor it anyway. this can happen
|
||||
# if a _get() occurs within save_obj(), such as
|
||||
# when eager_defaults is True.
|
||||
state = refresh_state
|
||||
instance = state.obj()
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
isnew = state.runid != context.runid
|
||||
currentload = True
|
||||
loaded_instance = False
|
||||
else:
|
||||
# check for non-NULL values in the primary key columns,
|
||||
# else no entity is returned for the row
|
||||
if is_not_primary_key(identitykey[1]):
|
||||
return None
|
||||
|
||||
isnew = True
|
||||
currentload = True
|
||||
loaded_instance = True
|
||||
|
||||
if create_instance:
|
||||
for fn in create_instance:
|
||||
instance = fn(mapper, context,
|
||||
row, mapper.class_)
|
||||
if instance is not EXT_CONTINUE:
|
||||
manager = attributes.manager_of_class(
|
||||
instance.__class__)
|
||||
# TODO: if manager is None, raise a friendly error
|
||||
# about returning instances of unmapped types
|
||||
manager.setup_instance(instance)
|
||||
break
|
||||
else:
|
||||
instance = mapper.class_manager.new_instance()
|
||||
else:
|
||||
instance = mapper.class_manager.new_instance()
|
||||
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
state.key = identitykey
|
||||
|
||||
# attach instance to session.
|
||||
state.session_id = context.session.hash_key
|
||||
session_identity_map.add(state)
|
||||
|
||||
if currentload or populate_existing:
|
||||
# state is being fully loaded, so populate.
|
||||
# add to the "context.progress" collection.
|
||||
if isnew:
|
||||
state.runid = context.runid
|
||||
context.progress[state] = dict_
|
||||
|
||||
if populate_instance:
|
||||
for fn in populate_instance:
|
||||
ret = fn(mapper, context, row, state,
|
||||
only_load_props=only_load_props,
|
||||
instancekey=identitykey, isnew=isnew)
|
||||
if ret is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, only_load_props)
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, only_load_props)
|
||||
|
||||
if loaded_instance:
|
||||
state.manager.dispatch.load(state, context)
|
||||
elif isnew:
|
||||
state.manager.dispatch.refresh(state, context, only_load_props)
|
||||
|
||||
elif state in context.partials or state.unloaded or eager_populators:
|
||||
# state is having a partial set of its attributes
|
||||
# refreshed. Populate those attributes,
|
||||
# and add to the "context.partials" collection.
|
||||
if state in context.partials:
|
||||
isnew = False
|
||||
(d_, attrs) = context.partials[state]
|
||||
else:
|
||||
isnew = True
|
||||
attrs = state.unloaded
|
||||
context.partials[state] = (dict_, attrs)
|
||||
|
||||
if populate_instance:
|
||||
for fn in populate_instance:
|
||||
ret = fn(mapper, context, row, state,
|
||||
only_load_props=attrs,
|
||||
instancekey=identitykey, isnew=isnew)
|
||||
if ret is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, attrs)
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, attrs)
|
||||
|
||||
for key, pop in eager_populators:
|
||||
if key not in state.unloaded:
|
||||
pop(state, dict_, row)
|
||||
|
||||
if isnew:
|
||||
state.manager.dispatch.refresh(state, context, attrs)
|
||||
|
||||
if result is not None:
|
||||
if append_result:
|
||||
for fn in append_result:
|
||||
if fn(mapper, context, row, state,
|
||||
result, instancekey=identitykey,
|
||||
isnew=isnew) is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
result.append(instance)
|
||||
else:
|
||||
result.append(instance)
|
||||
|
||||
return instance
|
||||
return _instance
|
||||
|
||||
|
||||
def _populators(mapper, context, path, row, adapter,
|
||||
new_populators, existing_populators, eager_populators):
|
||||
"""Produce a collection of attribute level row processor
|
||||
callables."""
|
||||
|
||||
delayed_populators = []
|
||||
pops = (new_populators, existing_populators, delayed_populators,
|
||||
eager_populators)
|
||||
|
||||
for prop in mapper._props.values():
|
||||
|
||||
for i, pop in enumerate(prop.create_row_processor(
|
||||
context,
|
||||
path,
|
||||
mapper, row, adapter)):
|
||||
if pop is not None:
|
||||
pops[i].append((prop.key, pop))
|
||||
|
||||
if delayed_populators:
|
||||
new_populators.extend(delayed_populators)
|
||||
|
||||
|
||||
def _configure_subclass_mapper(mapper, context, path, adapter):
|
||||
"""Produce a mapper level row processor callable factory for mappers
|
||||
inheriting this one."""
|
||||
|
||||
def configure_subclass_mapper(discriminator):
|
||||
try:
|
||||
sub_mapper = mapper.polymorphic_map[discriminator]
|
||||
except KeyError:
|
||||
raise AssertionError(
|
||||
"No such polymorphic_identity %r is defined" %
|
||||
discriminator)
|
||||
if sub_mapper is mapper:
|
||||
return None
|
||||
|
||||
return instance_processor(
|
||||
sub_mapper,
|
||||
context,
|
||||
path,
|
||||
adapter,
|
||||
polymorphic_from=mapper)
|
||||
return configure_subclass_mapper
|
||||
|
||||
|
||||
def load_scalar_attributes(mapper, state, attribute_names):
|
||||
"""initiate a column-based attribute refresh operation."""
|
||||
|
||||
# assert mapper is _state_mapper(state)
|
||||
session = state.session
|
||||
if not session:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Instance %s is not bound to a Session; "
|
||||
"attribute refresh operation cannot proceed" %
|
||||
(state_str(state)))
|
||||
|
||||
has_key = bool(state.key)
|
||||
|
||||
result = False
|
||||
|
||||
if mapper.inherits and not mapper.concrete:
|
||||
statement = mapper._optimized_get_statement(state, attribute_names)
|
||||
if statement is not None:
|
||||
result = load_on_ident(
|
||||
session.query(mapper).from_statement(statement),
|
||||
None,
|
||||
only_load_props=attribute_names,
|
||||
refresh_state=state
|
||||
)
|
||||
|
||||
if result is False:
|
||||
if has_key:
|
||||
identity_key = state.key
|
||||
else:
|
||||
# this codepath is rare - only valid when inside a flush, and the
|
||||
# object is becoming persistent but hasn't yet been assigned
|
||||
# an identity_key.
|
||||
# check here to ensure we have the attrs we need.
|
||||
pk_attrs = [mapper._columntoproperty[col].key
|
||||
for col in mapper.primary_key]
|
||||
if state.expired_attributes.intersection(pk_attrs):
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance %s cannot be refreshed - it's not "
|
||||
" persistent and does not "
|
||||
"contain a full primary key." % state_str(state))
|
||||
identity_key = mapper._identity_key_from_state(state)
|
||||
|
||||
if (_none_set.issubset(identity_key) and
|
||||
not mapper.allow_partial_pks) or \
|
||||
_none_set.issuperset(identity_key):
|
||||
util.warn("Instance %s to be refreshed doesn't "
|
||||
"contain a full primary key - can't be refreshed "
|
||||
"(and shouldn't be expired, either)."
|
||||
% state_str(state))
|
||||
return
|
||||
|
||||
result = load_on_ident(
|
||||
session.query(mapper),
|
||||
identity_key,
|
||||
refresh_state=state,
|
||||
only_load_props=attribute_names)
|
||||
|
||||
# if instance is pending, a refresh operation
|
||||
# may not complete (even if PK attributes are assigned)
|
||||
if has_key and result is None:
|
||||
raise orm_exc.ObjectDeletedError(state)
|
||||
2733
lib/python3.4/site-packages/sqlalchemy/orm/mapper.py
Normal file
2733
lib/python3.4/site-packages/sqlalchemy/orm/mapper.py
Normal file
File diff suppressed because it is too large
Load diff
267
lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py
Normal file
267
lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Path tracking utilities, representing mapper graph traversals.
|
||||
|
||||
"""
|
||||
|
||||
from .. import inspection
|
||||
from .. import util
|
||||
from .. import exc
|
||||
from itertools import chain
|
||||
from .base import class_mapper
|
||||
|
||||
|
||||
def _unreduce_path(path):
|
||||
return PathRegistry.deserialize(path)
|
||||
|
||||
|
||||
_WILDCARD_TOKEN = "*"
|
||||
_DEFAULT_TOKEN = "_sa_default"
|
||||
|
||||
|
||||
class PathRegistry(object):
|
||||
"""Represent query load paths and registry functions.
|
||||
|
||||
Basically represents structures like:
|
||||
|
||||
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
|
||||
|
||||
These structures are generated by things like
|
||||
query options (joinedload(), subqueryload(), etc.) and are
|
||||
used to compose keys stored in the query._attributes dictionary
|
||||
for various options.
|
||||
|
||||
They are then re-composed at query compile/result row time as
|
||||
the query is formed and as rows are fetched, where they again
|
||||
serve to compose keys to look up options in the context.attributes
|
||||
dictionary, which is copied from query._attributes.
|
||||
|
||||
The path structure has a limited amount of caching, where each
|
||||
"root" ultimately pulls from a fixed registry associated with
|
||||
the first mapper, that also contains elements for each of its
|
||||
property keys. However paths longer than two elements, which
|
||||
are the exception rather than the rule, are generated on an
|
||||
as-needed basis.
|
||||
|
||||
"""
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and \
|
||||
self.path == other.path
|
||||
|
||||
def set(self, attributes, key, value):
|
||||
attributes[(key, self.path)] = value
|
||||
|
||||
def setdefault(self, attributes, key, value):
|
||||
attributes.setdefault((key, self.path), value)
|
||||
|
||||
def get(self, attributes, key, value=None):
|
||||
key = (key, self.path)
|
||||
if key in attributes:
|
||||
return attributes[key]
|
||||
else:
|
||||
return value
|
||||
|
||||
def __len__(self):
|
||||
return len(self.path)
|
||||
|
||||
@property
|
||||
def length(self):
|
||||
return len(self.path)
|
||||
|
||||
def pairs(self):
|
||||
path = self.path
|
||||
for i in range(0, len(path), 2):
|
||||
yield path[i], path[i + 1]
|
||||
|
||||
def contains_mapper(self, mapper):
|
||||
for path_mapper in [
|
||||
self.path[i] for i in range(0, len(self.path), 2)
|
||||
]:
|
||||
if path_mapper.is_mapper and \
|
||||
path_mapper.isa(mapper):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def contains(self, attributes, key):
|
||||
return (key, self.path) in attributes
|
||||
|
||||
def __reduce__(self):
|
||||
return _unreduce_path, (self.serialize(), )
|
||||
|
||||
def serialize(self):
|
||||
path = self.path
|
||||
return list(zip(
|
||||
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
|
||||
[path[i].key for i in range(1, len(path), 2)] + [None]
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, path):
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
p = tuple(chain(*[(class_mapper(mcls),
|
||||
class_mapper(mcls).attrs[key]
|
||||
if key is not None else None)
|
||||
for mcls, key in path]))
|
||||
if p and p[-1] is None:
|
||||
p = p[0:-1]
|
||||
return cls.coerce(p)
|
||||
|
||||
@classmethod
|
||||
def per_mapper(cls, mapper):
|
||||
return EntityRegistry(
|
||||
cls.root, mapper
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, raw):
|
||||
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
|
||||
|
||||
def token(self, token):
|
||||
if token.endswith(':' + _WILDCARD_TOKEN):
|
||||
return TokenRegistry(self, token)
|
||||
elif token.endswith(":" + _DEFAULT_TOKEN):
|
||||
return TokenRegistry(self.root, token)
|
||||
else:
|
||||
raise exc.ArgumentError("invalid token: %s" % token)
|
||||
|
||||
def __add__(self, other):
|
||||
return util.reduce(
|
||||
lambda prev, next: prev[next],
|
||||
other.path, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self.path, )
|
||||
|
||||
|
||||
class RootRegistry(PathRegistry):
|
||||
"""Root registry, defers to mappers so that
|
||||
paths are maintained per-root-mapper.
|
||||
|
||||
"""
|
||||
path = ()
|
||||
has_entity = False
|
||||
|
||||
def __getitem__(self, entity):
|
||||
return entity._path_registry
|
||||
|
||||
PathRegistry.root = RootRegistry()
|
||||
|
||||
|
||||
class TokenRegistry(PathRegistry):
|
||||
def __init__(self, parent, token):
|
||||
self.token = token
|
||||
self.parent = parent
|
||||
self.path = parent.path + (token,)
|
||||
|
||||
has_entity = False
|
||||
|
||||
def __getitem__(self, entity):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class PropRegistry(PathRegistry):
|
||||
def __init__(self, parent, prop):
|
||||
# restate this path in terms of the
|
||||
# given MapperProperty's parent.
|
||||
insp = inspection.inspect(parent[-1])
|
||||
if not insp.is_aliased_class or insp._use_mapper_path:
|
||||
parent = parent.parent[prop.parent]
|
||||
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
|
||||
if prop.parent is not insp.mapper and \
|
||||
prop.parent in insp.with_polymorphic_mappers:
|
||||
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
|
||||
parent = parent.parent[subclass_entity]
|
||||
|
||||
self.prop = prop
|
||||
self.parent = parent
|
||||
self.path = parent.path + (prop,)
|
||||
|
||||
@util.memoized_property
|
||||
def has_entity(self):
|
||||
return hasattr(self.prop, "mapper")
|
||||
|
||||
@util.memoized_property
|
||||
def entity(self):
|
||||
return self.prop.mapper
|
||||
|
||||
@util.memoized_property
|
||||
def _wildcard_path_loader_key(self):
|
||||
"""Given a path (mapper A, prop X), replace the prop with the wildcard,
|
||||
e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
|
||||
return within the ("loader", path) structure.
|
||||
|
||||
"""
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (
|
||||
self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _default_path_loader_key(self):
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (self.prop.strategy_wildcard_key,
|
||||
_DEFAULT_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _loader_key(self):
|
||||
return ("loader", self.path)
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return self.entity
|
||||
|
||||
@property
|
||||
def entity_path(self):
|
||||
return self[self.entity]
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return EntityRegistry(
|
||||
self, entity
|
||||
)
|
||||
|
||||
|
||||
class EntityRegistry(PathRegistry, dict):
|
||||
is_aliased_class = False
|
||||
has_entity = True
|
||||
|
||||
def __init__(self, parent, entity):
|
||||
self.key = entity
|
||||
self.parent = parent
|
||||
self.is_aliased_class = entity.is_aliased_class
|
||||
self.entity = entity
|
||||
self.path = parent.path + (entity,)
|
||||
self.entity_path = self
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return inspection.inspect(self.entity).mapper
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return dict.__getitem__(self, entity)
|
||||
|
||||
def __missing__(self, key):
|
||||
self[key] = item = PropRegistry(self, key)
|
||||
return item
|
||||
1117
lib/python3.4/site-packages/sqlalchemy/orm/persistence.py
Normal file
1117
lib/python3.4/site-packages/sqlalchemy/orm/persistence.py
Normal file
File diff suppressed because it is too large
Load diff
259
lib/python3.4/site-packages/sqlalchemy/orm/properties.py
Normal file
259
lib/python3.4/site-packages/sqlalchemy/orm/properties.py
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
# orm/properties.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""MapperProperty implementations.
|
||||
|
||||
This is a private module which defines the behavior of invidual ORM-
|
||||
mapped attributes.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util, log
|
||||
from ..sql import expression
|
||||
from . import attributes
|
||||
from .util import _orm_full_deannotate
|
||||
|
||||
from .interfaces import PropComparator, StrategizedProperty
|
||||
|
||||
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
|
||||
'ComparableProperty', 'RelationshipProperty']
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class ColumnProperty(StrategizedProperty):
|
||||
"""Describes an object attribute that corresponds to a table column.
|
||||
|
||||
Public constructor is the :func:`.orm.column_property` function.
|
||||
|
||||
"""
|
||||
|
||||
strategy_wildcard_key = 'column'
|
||||
|
||||
def __init__(self, *columns, **kwargs):
|
||||
"""Provide a column-level property for use with a Mapper.
|
||||
|
||||
Column-based properties can normally be applied to the mapper's
|
||||
``properties`` dictionary using the :class:`.Column` element directly.
|
||||
Use this function when the given column is not directly present within
|
||||
the mapper's selectable; examples include SQL expressions, functions,
|
||||
and scalar SELECT queries.
|
||||
|
||||
Columns that aren't present in the mapper's selectable won't be
|
||||
persisted by the mapper and are effectively "read-only" attributes.
|
||||
|
||||
:param \*cols:
|
||||
list of Column objects to be mapped.
|
||||
|
||||
:param active_history=False:
|
||||
When ``True``, indicates that the "previous" value for a
|
||||
scalar attribute should be loaded when replaced, if not
|
||||
already loaded. Normally, history tracking logic for
|
||||
simple non-primary-key scalar values only needs to be
|
||||
aware of the "new" value in order to perform a flush. This
|
||||
flag is available for applications that make use of
|
||||
:func:`.attributes.get_history` or :meth:`.Session.is_modified`
|
||||
which also need to know
|
||||
the "previous" value of the attribute.
|
||||
|
||||
.. versionadded:: 0.6.6
|
||||
|
||||
:param comparator_factory: a class which extends
|
||||
:class:`.ColumnProperty.Comparator` which provides custom SQL
|
||||
clause generation for comparison operations.
|
||||
|
||||
:param group:
|
||||
a group name for this property when marked as deferred.
|
||||
|
||||
:param deferred:
|
||||
when True, the column property is "deferred", meaning that
|
||||
it does not load immediately, and is instead loaded when the
|
||||
attribute is first accessed on an instance. See also
|
||||
:func:`~sqlalchemy.orm.deferred`.
|
||||
|
||||
:param doc:
|
||||
optional string that will be applied as the doc on the
|
||||
class-bound descriptor.
|
||||
|
||||
:param expire_on_flush=True:
|
||||
Disable expiry on flush. A column_property() which refers
|
||||
to a SQL expression (and not a single table-bound column)
|
||||
is considered to be a "read only" property; populating it
|
||||
has no effect on the state of data, and it can only return
|
||||
database state. For this reason a column_property()'s value
|
||||
is expired whenever the parent object is involved in a
|
||||
flush, that is, has any kind of "dirty" state within a flush.
|
||||
Setting this parameter to ``False`` will have the effect of
|
||||
leaving any existing value present after the flush proceeds.
|
||||
Note however that the :class:`.Session` with default expiration
|
||||
settings still expires
|
||||
all attributes after a :meth:`.Session.commit` call, however.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.MapperProperty.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
:param extension:
|
||||
an
|
||||
:class:`.AttributeExtension`
|
||||
instance, or list of extensions, which will be prepended
|
||||
to the list of attribute listeners for the resulting
|
||||
descriptor placed on the class.
|
||||
**Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
self._orig_columns = [expression._labeled(c) for c in columns]
|
||||
self.columns = [expression._labeled(_orm_full_deannotate(c))
|
||||
for c in columns]
|
||||
self.group = kwargs.pop('group', None)
|
||||
self.deferred = kwargs.pop('deferred', False)
|
||||
self.instrument = kwargs.pop('_instrument', True)
|
||||
self.comparator_factory = kwargs.pop('comparator_factory',
|
||||
self.__class__.Comparator)
|
||||
self.descriptor = kwargs.pop('descriptor', None)
|
||||
self.extension = kwargs.pop('extension', None)
|
||||
self.active_history = kwargs.pop('active_history', False)
|
||||
self.expire_on_flush = kwargs.pop('expire_on_flush', True)
|
||||
|
||||
if 'info' in kwargs:
|
||||
self.info = kwargs.pop('info')
|
||||
|
||||
if 'doc' in kwargs:
|
||||
self.doc = kwargs.pop('doc')
|
||||
else:
|
||||
for col in reversed(self.columns):
|
||||
doc = getattr(col, 'doc', None)
|
||||
if doc is not None:
|
||||
self.doc = doc
|
||||
break
|
||||
else:
|
||||
self.doc = None
|
||||
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"%s received unexpected keyword argument(s): %s" % (
|
||||
self.__class__.__name__,
|
||||
', '.join(sorted(kwargs.keys()))))
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
self.strategy_class = self._strategy_lookup(
|
||||
("deferred", self.deferred),
|
||||
("instrument", self.instrument)
|
||||
)
|
||||
|
||||
@property
|
||||
def expression(self):
|
||||
"""Return the primary column or expression for this ColumnProperty.
|
||||
|
||||
"""
|
||||
return self.columns[0]
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
if not self.instrument:
|
||||
return
|
||||
|
||||
attributes.register_descriptor(
|
||||
mapper.class_,
|
||||
self.key,
|
||||
comparator=self.comparator_factory(self, mapper),
|
||||
parententity=mapper,
|
||||
doc=self.doc
|
||||
)
|
||||
|
||||
def do_init(self):
|
||||
super(ColumnProperty, self).do_init()
|
||||
if len(self.columns) > 1 and \
|
||||
set(self.parent.primary_key).issuperset(self.columns):
|
||||
util.warn(
|
||||
("On mapper %s, primary key column '%s' is being combined "
|
||||
"with distinct primary key column '%s' in attribute '%s'. "
|
||||
"Use explicit properties to give each column its own mapped "
|
||||
"attribute name.") % (self.parent, self.columns[1],
|
||||
self.columns[0], self.key))
|
||||
|
||||
def copy(self):
|
||||
return ColumnProperty(
|
||||
deferred=self.deferred,
|
||||
group=self.group,
|
||||
active_history=self.active_history,
|
||||
*self.columns)
|
||||
|
||||
def _getcommitted(self, state, dict_, column,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
return state.get_impl(self.key).\
|
||||
get_committed_value(state, dict_, passive=passive)
|
||||
|
||||
def merge(self, session, source_state, source_dict, dest_state,
|
||||
dest_dict, load, _recursive):
|
||||
if not self.instrument:
|
||||
return
|
||||
elif self.key in source_dict:
|
||||
value = source_dict[self.key]
|
||||
|
||||
if not load:
|
||||
dest_dict[self.key] = value
|
||||
else:
|
||||
impl = dest_state.get_impl(self.key)
|
||||
impl.set(dest_state, dest_dict, value, None)
|
||||
elif dest_state.has_identity and self.key not in dest_dict:
|
||||
dest_state._expire_attributes(dest_dict, [self.key])
|
||||
|
||||
class Comparator(PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.ColumnProperty` attributes.
|
||||
|
||||
See the documentation for :class:`.PropComparator` for a brief
|
||||
overview.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.PropComparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
@util.memoized_instancemethod
|
||||
def __clause_element__(self):
|
||||
if self.adapter:
|
||||
return self.adapter(self.prop.columns[0])
|
||||
else:
|
||||
return self.prop.columns[0]._annotate({
|
||||
"parententity": self._parentmapper,
|
||||
"parentmapper": self._parentmapper})
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
ce = self.__clause_element__()
|
||||
try:
|
||||
return ce.info
|
||||
except AttributeError:
|
||||
return self.prop.info
|
||||
|
||||
def __getattr__(self, key):
|
||||
"""proxy attribute access down to the mapped column.
|
||||
|
||||
this allows user-defined comparison methods to be accessed.
|
||||
"""
|
||||
return getattr(self.__clause_element__(), key)
|
||||
|
||||
def operate(self, op, *other, **kwargs):
|
||||
return op(self.__clause_element__(), *other, **kwargs)
|
||||
|
||||
def reverse_operate(self, op, other, **kwargs):
|
||||
col = self.__clause_element__()
|
||||
return op(col._bind_param(op, other), col, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
3577
lib/python3.4/site-packages/sqlalchemy/orm/query.py
Normal file
3577
lib/python3.4/site-packages/sqlalchemy/orm/query.py
Normal file
File diff suppressed because it is too large
Load diff
2714
lib/python3.4/site-packages/sqlalchemy/orm/relationships.py
Normal file
2714
lib/python3.4/site-packages/sqlalchemy/orm/relationships.py
Normal file
File diff suppressed because it is too large
Load diff
177
lib/python3.4/site-packages/sqlalchemy/orm/scoping.py
Normal file
177
lib/python3.4/site-packages/sqlalchemy/orm/scoping.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
# orm/scoping.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import exc as sa_exc
|
||||
from ..util import ScopedRegistry, ThreadLocalRegistry, warn
|
||||
from . import class_mapper, exc as orm_exc
|
||||
from .session import Session
|
||||
|
||||
|
||||
__all__ = ['scoped_session']
|
||||
|
||||
|
||||
class scoped_session(object):
|
||||
"""Provides scoped management of :class:`.Session` objects.
|
||||
|
||||
See :ref:`unitofwork_contextual` for a tutorial.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, session_factory, scopefunc=None):
|
||||
"""Construct a new :class:`.scoped_session`.
|
||||
|
||||
:param session_factory: a factory to create new :class:`.Session`
|
||||
instances. This is usually, but not necessarily, an instance
|
||||
of :class:`.sessionmaker`.
|
||||
:param scopefunc: optional function which defines
|
||||
the current scope. If not passed, the :class:`.scoped_session`
|
||||
object assumes "thread-local" scope, and will use
|
||||
a Python ``threading.local()`` in order to maintain the current
|
||||
:class:`.Session`. If passed, the function should return
|
||||
a hashable token; this token will be used as the key in a
|
||||
dictionary in order to store and retrieve the current
|
||||
:class:`.Session`.
|
||||
|
||||
"""
|
||||
self.session_factory = session_factory
|
||||
if scopefunc:
|
||||
self.registry = ScopedRegistry(session_factory, scopefunc)
|
||||
else:
|
||||
self.registry = ThreadLocalRegistry(session_factory)
|
||||
|
||||
def __call__(self, **kw):
|
||||
"""Return the current :class:`.Session`, creating it
|
||||
using the session factory if not present.
|
||||
|
||||
:param \**kw: Keyword arguments will be passed to the
|
||||
session factory callable, if an existing :class:`.Session`
|
||||
is not present. If the :class:`.Session` is present and
|
||||
keyword arguments have been passed,
|
||||
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
|
||||
|
||||
"""
|
||||
if kw:
|
||||
scope = kw.pop('scope', False)
|
||||
if scope is not None:
|
||||
if self.registry.has():
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Scoped session is already present; "
|
||||
"no new arguments may be specified.")
|
||||
else:
|
||||
sess = self.session_factory(**kw)
|
||||
self.registry.set(sess)
|
||||
return sess
|
||||
else:
|
||||
return self.session_factory(**kw)
|
||||
else:
|
||||
return self.registry()
|
||||
|
||||
def remove(self):
|
||||
"""Dispose of the current :class:`.Session`, if present.
|
||||
|
||||
This will first call :meth:`.Session.close` method
|
||||
on the current :class:`.Session`, which releases any existing
|
||||
transactional/connection resources still being held; transactions
|
||||
specifically are rolled back. The :class:`.Session` is then
|
||||
discarded. Upon next usage within the same scope,
|
||||
the :class:`.scoped_session` will produce a new
|
||||
:class:`.Session` object.
|
||||
|
||||
"""
|
||||
|
||||
if self.registry.has():
|
||||
self.registry().close()
|
||||
self.registry.clear()
|
||||
|
||||
def configure(self, **kwargs):
|
||||
"""reconfigure the :class:`.sessionmaker` used by this
|
||||
:class:`.scoped_session`.
|
||||
|
||||
See :meth:`.sessionmaker.configure`.
|
||||
|
||||
"""
|
||||
|
||||
if self.registry.has():
|
||||
warn('At least one scoped session is already present. '
|
||||
' configure() can not affect sessions that have '
|
||||
'already been created.')
|
||||
|
||||
self.session_factory.configure(**kwargs)
|
||||
|
||||
def query_property(self, query_cls=None):
|
||||
"""return a class property which produces a :class:`.Query` object
|
||||
against the class and the current :class:`.Session` when called.
|
||||
|
||||
e.g.::
|
||||
|
||||
Session = scoped_session(sessionmaker())
|
||||
|
||||
class MyClass(object):
|
||||
query = Session.query_property()
|
||||
|
||||
# after mappers are defined
|
||||
result = MyClass.query.filter(MyClass.name=='foo').all()
|
||||
|
||||
Produces instances of the session's configured query class by
|
||||
default. To override and use a custom implementation, provide
|
||||
a ``query_cls`` callable. The callable will be invoked with
|
||||
the class's mapper as a positional argument and a session
|
||||
keyword argument.
|
||||
|
||||
There is no limit to the number of query properties placed on
|
||||
a class.
|
||||
|
||||
"""
|
||||
class query(object):
|
||||
def __get__(s, instance, owner):
|
||||
try:
|
||||
mapper = class_mapper(owner)
|
||||
if mapper:
|
||||
if query_cls:
|
||||
# custom query class
|
||||
return query_cls(mapper, session=self.registry())
|
||||
else:
|
||||
# session's configured query class
|
||||
return self.registry().query(mapper)
|
||||
except orm_exc.UnmappedClassError:
|
||||
return None
|
||||
return query()
|
||||
|
||||
ScopedSession = scoped_session
|
||||
"""Old name for backwards compatibility."""
|
||||
|
||||
|
||||
def instrument(name):
|
||||
def do(self, *args, **kwargs):
|
||||
return getattr(self.registry(), name)(*args, **kwargs)
|
||||
return do
|
||||
|
||||
for meth in Session.public_methods:
|
||||
setattr(scoped_session, meth, instrument(meth))
|
||||
|
||||
|
||||
def makeprop(name):
|
||||
def set(self, attr):
|
||||
setattr(self.registry(), name, attr)
|
||||
|
||||
def get(self):
|
||||
return getattr(self.registry(), name)
|
||||
|
||||
return property(get, set)
|
||||
|
||||
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
|
||||
'is_active', 'autoflush', 'no_autoflush', 'info'):
|
||||
setattr(scoped_session, prop, makeprop(prop))
|
||||
|
||||
|
||||
def clslevel(name):
|
||||
def do(cls, *args, **kwargs):
|
||||
return getattr(Session, name)(*args, **kwargs)
|
||||
return classmethod(do)
|
||||
|
||||
for prop in ('close_all', 'object_session', 'identity_key'):
|
||||
setattr(scoped_session, prop, clslevel(prop))
|
||||
2458
lib/python3.4/site-packages/sqlalchemy/orm/session.py
Normal file
2458
lib/python3.4/site-packages/sqlalchemy/orm/session.py
Normal file
File diff suppressed because it is too large
Load diff
682
lib/python3.4/site-packages/sqlalchemy/orm/state.py
Normal file
682
lib/python3.4/site-packages/sqlalchemy/orm/state.py
Normal file
|
|
@ -0,0 +1,682 @@
|
|||
# orm/state.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Defines instrumentation of instances.
|
||||
|
||||
This module is usually not directly visible to user applications, but
|
||||
defines a large part of the ORM's interactivity.
|
||||
|
||||
"""
|
||||
|
||||
import weakref
|
||||
from .. import util
|
||||
from . import exc as orm_exc, interfaces
|
||||
from .path_registry import PathRegistry
|
||||
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
|
||||
NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
|
||||
from . import base
|
||||
|
||||
|
||||
class InstanceState(interfaces._InspectionAttr):
|
||||
"""tracks state information at the instance level.
|
||||
|
||||
The :class:`.InstanceState` is a key object used by the
|
||||
SQLAlchemy ORM in order to track the state of an object;
|
||||
it is created the moment an object is instantiated, typically
|
||||
as a result of :term:`instrumentation` which SQLAlchemy applies
|
||||
to the ``__init__()`` method of the class.
|
||||
|
||||
:class:`.InstanceState` is also a semi-public object,
|
||||
available for runtime inspection as to the state of a
|
||||
mapped instance, including information such as its current
|
||||
status within a particular :class:`.Session` and details
|
||||
about data on individual attributes. The public API
|
||||
in order to acquire a :class:`.InstanceState` object
|
||||
is to use the :func:`.inspect` system::
|
||||
|
||||
>>> from sqlalchemy import inspect
|
||||
>>> insp = inspect(some_mapped_object)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`core_inspection_toplevel`
|
||||
|
||||
"""
|
||||
|
||||
session_id = None
|
||||
key = None
|
||||
runid = None
|
||||
load_options = util.EMPTY_SET
|
||||
load_path = ()
|
||||
insert_order = None
|
||||
_strong_obj = None
|
||||
modified = False
|
||||
expired = False
|
||||
deleted = False
|
||||
_load_pending = False
|
||||
|
||||
is_instance = True
|
||||
|
||||
def __init__(self, obj, manager):
|
||||
self.class_ = obj.__class__
|
||||
self.manager = manager
|
||||
self.obj = weakref.ref(obj, self._cleanup)
|
||||
self.callables = {}
|
||||
self.committed_state = {}
|
||||
|
||||
@util.memoized_property
|
||||
def attrs(self):
|
||||
"""Return a namespace representing each attribute on
|
||||
the mapped object, including its current value
|
||||
and history.
|
||||
|
||||
The returned object is an instance of :class:`.AttributeState`.
|
||||
This object allows inspection of the current data
|
||||
within an attribute as well as attribute history
|
||||
since the last flush.
|
||||
|
||||
"""
|
||||
return util.ImmutableProperties(
|
||||
dict(
|
||||
(key, AttributeState(self, key))
|
||||
for key in self.manager
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def transient(self):
|
||||
"""Return true if the object is :term:`transient`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
"""Return true if the object is :term:`pending`.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def persistent(self):
|
||||
"""Return true if the object is :term:`persistent`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is not None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def detached(self):
|
||||
"""Return true if the object is :term:`detached`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is not None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def _attached(self, sessionlib):
|
||||
return self.session_id is not None and \
|
||||
self.session_id in sessionlib._sessions
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def session(self, sessionlib):
|
||||
"""Return the owning :class:`.Session` for this instance,
|
||||
or ``None`` if none available."""
|
||||
return sessionlib._state_session(self)
|
||||
|
||||
@property
|
||||
def object(self):
|
||||
"""Return the mapped object represented by this
|
||||
:class:`.InstanceState`."""
|
||||
return self.obj()
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
"""Return the mapped identity of the mapped object.
|
||||
This is the primary key identity as persisted by the ORM
|
||||
which can always be passed directly to
|
||||
:meth:`.Query.get`.
|
||||
|
||||
Returns ``None`` if the object has no primary key identity.
|
||||
|
||||
.. note::
|
||||
An object which is transient or pending
|
||||
does **not** have a mapped identity until it is flushed,
|
||||
even if its attributes include primary key values.
|
||||
|
||||
"""
|
||||
if self.key is None:
|
||||
return None
|
||||
else:
|
||||
return self.key[1]
|
||||
|
||||
@property
|
||||
def identity_key(self):
|
||||
"""Return the identity key for the mapped object.
|
||||
|
||||
This is the key used to locate the object within
|
||||
the :attr:`.Session.identity_map` mapping. It contains
|
||||
the identity as returned by :attr:`.identity` within it.
|
||||
|
||||
|
||||
"""
|
||||
# TODO: just change .key to .identity_key across
|
||||
# the board ? probably
|
||||
return self.key
|
||||
|
||||
@util.memoized_property
|
||||
def parents(self):
|
||||
return {}
|
||||
|
||||
@util.memoized_property
|
||||
def _pending_mutations(self):
|
||||
return {}
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self):
|
||||
"""Return the :class:`.Mapper` used for this mapepd object."""
|
||||
return self.manager.mapper
|
||||
|
||||
@property
|
||||
def has_identity(self):
|
||||
"""Return ``True`` if this object has an identity key.
|
||||
|
||||
This should always have the same value as the
|
||||
expression ``state.persistent or state.detached``.
|
||||
|
||||
"""
|
||||
return bool(self.key)
|
||||
|
||||
def _detach(self):
|
||||
self.session_id = self._strong_obj = None
|
||||
|
||||
def _dispose(self):
|
||||
self._detach()
|
||||
del self.obj
|
||||
|
||||
def _cleanup(self, ref):
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict:
|
||||
instance_dict.discard(self)
|
||||
|
||||
self.callables = {}
|
||||
self.session_id = self._strong_obj = None
|
||||
del self.obj
|
||||
|
||||
def obj(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def dict(self):
|
||||
"""Return the instance dict used by the object.
|
||||
|
||||
Under normal circumstances, this is always synonymous
|
||||
with the ``__dict__`` attribute of the mapped object,
|
||||
unless an alternative instrumentation system has been
|
||||
configured.
|
||||
|
||||
In the case that the actual object has been garbage
|
||||
collected, this accessor returns a blank dictionary.
|
||||
|
||||
"""
|
||||
o = self.obj()
|
||||
if o is not None:
|
||||
return base.instance_dict(o)
|
||||
else:
|
||||
return {}
|
||||
|
||||
def _initialize_instance(*mixed, **kwargs):
|
||||
self, instance, args = mixed[0], mixed[1], mixed[2:]
|
||||
manager = self.manager
|
||||
|
||||
manager.dispatch.init(self, args, kwargs)
|
||||
|
||||
try:
|
||||
return manager.original_init(*mixed[1:], **kwargs)
|
||||
except:
|
||||
manager.dispatch.init_failure(self, args, kwargs)
|
||||
raise
|
||||
|
||||
def get_history(self, key, passive):
|
||||
return self.manager[key].impl.get_history(self, self.dict, passive)
|
||||
|
||||
def get_impl(self, key):
|
||||
return self.manager[key].impl
|
||||
|
||||
def _get_pending_mutation(self, key):
|
||||
if key not in self._pending_mutations:
|
||||
self._pending_mutations[key] = PendingCollection()
|
||||
return self._pending_mutations[key]
|
||||
|
||||
def __getstate__(self):
|
||||
state_dict = {'instance': self.obj()}
|
||||
state_dict.update(
|
||||
(k, self.__dict__[k]) for k in (
|
||||
'committed_state', '_pending_mutations', 'modified',
|
||||
'expired', 'callables', 'key', 'parents', 'load_options',
|
||||
'class_',
|
||||
) if k in self.__dict__
|
||||
)
|
||||
if self.load_path:
|
||||
state_dict['load_path'] = self.load_path.serialize()
|
||||
|
||||
state_dict['manager'] = self.manager._serialize(self, state_dict)
|
||||
|
||||
return state_dict
|
||||
|
||||
def __setstate__(self, state_dict):
|
||||
inst = state_dict['instance']
|
||||
if inst is not None:
|
||||
self.obj = weakref.ref(inst, self._cleanup)
|
||||
self.class_ = inst.__class__
|
||||
else:
|
||||
# None being possible here generally new as of 0.7.4
|
||||
# due to storage of state in "parents". "class_"
|
||||
# also new.
|
||||
self.obj = None
|
||||
self.class_ = state_dict['class_']
|
||||
|
||||
self.committed_state = state_dict.get('committed_state', {})
|
||||
self._pending_mutations = state_dict.get('_pending_mutations', {})
|
||||
self.parents = state_dict.get('parents', {})
|
||||
self.modified = state_dict.get('modified', False)
|
||||
self.expired = state_dict.get('expired', False)
|
||||
self.callables = state_dict.get('callables', {})
|
||||
|
||||
self.__dict__.update([
|
||||
(k, state_dict[k]) for k in (
|
||||
'key', 'load_options',
|
||||
) if k in state_dict
|
||||
])
|
||||
|
||||
if 'load_path' in state_dict:
|
||||
self.load_path = PathRegistry.\
|
||||
deserialize(state_dict['load_path'])
|
||||
|
||||
state_dict['manager'](self, inst, state_dict)
|
||||
|
||||
def _initialize(self, key):
|
||||
"""Set this attribute to an empty value or collection,
|
||||
based on the AttributeImpl in use."""
|
||||
|
||||
self.manager.get_impl(key).initialize(self, self.dict)
|
||||
|
||||
def _reset(self, dict_, key):
|
||||
"""Remove the given attribute and any
|
||||
callables associated with it."""
|
||||
|
||||
old = dict_.pop(key, None)
|
||||
if old is not None and self.manager[key].impl.collection:
|
||||
self.manager[key].impl._invalidate_collection(old)
|
||||
self.callables.pop(key, None)
|
||||
|
||||
def _expire_attribute_pre_commit(self, dict_, key):
|
||||
"""a fast expire that can be called by column loaders during a load.
|
||||
|
||||
The additional bookkeeping is finished up in commit_all().
|
||||
|
||||
Should only be called for scalar attributes.
|
||||
|
||||
This method is actually called a lot with joined-table
|
||||
loading, when the second table isn't present in the result.
|
||||
|
||||
"""
|
||||
dict_.pop(key, None)
|
||||
self.callables[key] = self
|
||||
|
||||
@classmethod
|
||||
def _row_processor(cls, manager, fn, key):
|
||||
impl = manager[key].impl
|
||||
if impl.collection:
|
||||
def _set_callable(state, dict_, row):
|
||||
old = dict_.pop(key, None)
|
||||
if old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
state.callables[key] = fn
|
||||
else:
|
||||
def _set_callable(state, dict_, row):
|
||||
state.callables[key] = fn
|
||||
return _set_callable
|
||||
|
||||
def _expire(self, dict_, modified_set):
|
||||
self.expired = True
|
||||
if self.modified:
|
||||
modified_set.discard(self)
|
||||
|
||||
self.modified = False
|
||||
self._strong_obj = None
|
||||
|
||||
self.committed_state.clear()
|
||||
|
||||
InstanceState._pending_mutations._reset(self)
|
||||
|
||||
# clear out 'parents' collection. not
|
||||
# entirely clear how we can best determine
|
||||
# which to remove, or not.
|
||||
InstanceState.parents._reset(self)
|
||||
|
||||
for key in self.manager:
|
||||
impl = self.manager[key].impl
|
||||
if impl.accepts_scalar_loader and \
|
||||
(impl.expire_missing or key in dict_):
|
||||
self.callables[key] = self
|
||||
old = dict_.pop(key, None)
|
||||
if impl.collection and old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
|
||||
self.manager.dispatch.expire(self, None)
|
||||
|
||||
def _expire_attributes(self, dict_, attribute_names):
|
||||
pending = self.__dict__.get('_pending_mutations', None)
|
||||
|
||||
for key in attribute_names:
|
||||
impl = self.manager[key].impl
|
||||
if impl.accepts_scalar_loader:
|
||||
self.callables[key] = self
|
||||
old = dict_.pop(key, None)
|
||||
if impl.collection and old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
|
||||
self.committed_state.pop(key, None)
|
||||
if pending:
|
||||
pending.pop(key, None)
|
||||
|
||||
self.manager.dispatch.expire(self, attribute_names)
|
||||
|
||||
def __call__(self, state, passive):
|
||||
"""__call__ allows the InstanceState to act as a deferred
|
||||
callable for loading expired attributes, which is also
|
||||
serializable (picklable).
|
||||
|
||||
"""
|
||||
|
||||
if not passive & SQL_OK:
|
||||
return PASSIVE_NO_RESULT
|
||||
|
||||
toload = self.expired_attributes.\
|
||||
intersection(self.unmodified)
|
||||
|
||||
self.manager.deferred_scalar_loader(self, toload)
|
||||
|
||||
# if the loader failed, or this
|
||||
# instance state didn't have an identity,
|
||||
# the attributes still might be in the callables
|
||||
# dict. ensure they are removed.
|
||||
for k in toload.intersection(self.callables):
|
||||
del self.callables[k]
|
||||
|
||||
return ATTR_WAS_SET
|
||||
|
||||
@property
|
||||
def unmodified(self):
|
||||
"""Return the set of keys which have no uncommitted changes"""
|
||||
|
||||
return set(self.manager).difference(self.committed_state)
|
||||
|
||||
def unmodified_intersection(self, keys):
|
||||
"""Return self.unmodified.intersection(keys)."""
|
||||
|
||||
return set(keys).intersection(self.manager).\
|
||||
difference(self.committed_state)
|
||||
|
||||
@property
|
||||
def unloaded(self):
|
||||
"""Return the set of keys which do not have a loaded value.
|
||||
|
||||
This includes expired attributes and any other attribute that
|
||||
was never populated or modified.
|
||||
|
||||
"""
|
||||
return set(self.manager).\
|
||||
difference(self.committed_state).\
|
||||
difference(self.dict)
|
||||
|
||||
@property
|
||||
def _unloaded_non_object(self):
|
||||
return self.unloaded.intersection(
|
||||
attr for attr in self.manager
|
||||
if self.manager[attr].impl.accepts_scalar_loader
|
||||
)
|
||||
|
||||
@property
|
||||
def expired_attributes(self):
|
||||
"""Return the set of keys which are 'expired' to be loaded by
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
|
||||
see also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs.
|
||||
|
||||
"""
|
||||
return set([k for k, v in self.callables.items() if v is self])
|
||||
|
||||
def _instance_dict(self):
|
||||
return None
|
||||
|
||||
def _modified_event(
|
||||
self, dict_, attr, previous, collection=False, force=False):
|
||||
if not attr.send_modified_events:
|
||||
return
|
||||
if attr.key not in self.committed_state or force:
|
||||
if collection:
|
||||
if previous is NEVER_SET:
|
||||
if attr.key in dict_:
|
||||
previous = dict_[attr.key]
|
||||
|
||||
if previous not in (None, NO_VALUE, NEVER_SET):
|
||||
previous = attr.copy(previous)
|
||||
|
||||
self.committed_state[attr.key] = previous
|
||||
|
||||
# assert self._strong_obj is None or self.modified
|
||||
|
||||
if (self.session_id and self._strong_obj is None) \
|
||||
or not self.modified:
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict:
|
||||
instance_dict._modified.add(self)
|
||||
|
||||
# only create _strong_obj link if attached
|
||||
# to a session
|
||||
|
||||
inst = self.obj()
|
||||
if self.session_id:
|
||||
self._strong_obj = inst
|
||||
|
||||
if inst is None:
|
||||
raise orm_exc.ObjectDereferencedError(
|
||||
"Can't emit change event for attribute '%s' - "
|
||||
"parent object of type %s has been garbage "
|
||||
"collected."
|
||||
% (
|
||||
self.manager[attr.key],
|
||||
base.state_class_str(self)
|
||||
))
|
||||
self.modified = True
|
||||
|
||||
def _commit(self, dict_, keys):
|
||||
"""Commit attributes.
|
||||
|
||||
This is used by a partial-attribute load operation to mark committed
|
||||
those attributes which were refreshed from the database.
|
||||
|
||||
Attributes marked as "expired" can potentially remain "expired" after
|
||||
this step if a value was not populated in state.dict.
|
||||
|
||||
"""
|
||||
for key in keys:
|
||||
self.committed_state.pop(key, None)
|
||||
|
||||
self.expired = False
|
||||
|
||||
for key in set(self.callables).\
|
||||
intersection(keys).\
|
||||
intersection(dict_):
|
||||
del self.callables[key]
|
||||
|
||||
def _commit_all(self, dict_, instance_dict=None):
|
||||
"""commit all attributes unconditionally.
|
||||
|
||||
This is used after a flush() or a full load/refresh
|
||||
to remove all pending state from the instance.
|
||||
|
||||
- all attributes are marked as "committed"
|
||||
- the "strong dirty reference" is removed
|
||||
- the "modified" flag is set to False
|
||||
- any "expired" markers/callables for attributes loaded are removed.
|
||||
|
||||
Attributes marked as "expired" can potentially remain
|
||||
"expired" after this step if a value was not populated in state.dict.
|
||||
|
||||
"""
|
||||
self._commit_all_states([(self, dict_)], instance_dict)
|
||||
|
||||
@classmethod
|
||||
def _commit_all_states(self, iter, instance_dict=None):
|
||||
"""Mass version of commit_all()."""
|
||||
|
||||
for state, dict_ in iter:
|
||||
state.committed_state.clear()
|
||||
InstanceState._pending_mutations._reset(state)
|
||||
|
||||
callables = state.callables
|
||||
for key in list(callables):
|
||||
if key in dict_ and callables[key] is state:
|
||||
del callables[key]
|
||||
|
||||
if instance_dict and state.modified:
|
||||
instance_dict._modified.discard(state)
|
||||
|
||||
state.modified = state.expired = False
|
||||
state._strong_obj = None
|
||||
|
||||
|
||||
class AttributeState(object):
|
||||
"""Provide an inspection interface corresponding
|
||||
to a particular attribute on a particular mapped object.
|
||||
|
||||
The :class:`.AttributeState` object is accessed
|
||||
via the :attr:`.InstanceState.attrs` collection
|
||||
of a particular :class:`.InstanceState`::
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
insp = inspect(some_mapped_object)
|
||||
attr_state = insp.attrs.some_attribute
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, state, key):
|
||||
self.state = state
|
||||
self.key = key
|
||||
|
||||
@property
|
||||
def loaded_value(self):
|
||||
"""The current value of this attribute as loaded from the database.
|
||||
|
||||
If the value has not been loaded, or is otherwise not present
|
||||
in the object's dictionary, returns NO_VALUE.
|
||||
|
||||
"""
|
||||
return self.state.dict.get(self.key, NO_VALUE)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""Return the value of this attribute.
|
||||
|
||||
This operation is equivalent to accessing the object's
|
||||
attribute directly or via ``getattr()``, and will fire
|
||||
off any pending loader callables if needed.
|
||||
|
||||
"""
|
||||
return self.state.manager[self.key].__get__(
|
||||
self.state.obj(), self.state.class_)
|
||||
|
||||
@property
|
||||
def history(self):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method will **not** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.AttributeState.load_history` - retrieve history
|
||||
using loader callables if the value is not locally present.
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_NO_INITIALIZE)
|
||||
|
||||
def load_history(self):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method **will** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.AttributeState.history`
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_OFF ^ INIT_OK)
|
||||
|
||||
|
||||
class PendingCollection(object):
|
||||
"""A writable placeholder for an unloaded collection.
|
||||
|
||||
Stores items appended to and removed from a collection that has not yet
|
||||
been loaded. When the collection is loaded, the changes stored in
|
||||
PendingCollection are applied to it to produce the final result.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.deleted_items = util.IdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
|
||||
def append(self, value):
|
||||
if value in self.deleted_items:
|
||||
self.deleted_items.remove(value)
|
||||
else:
|
||||
self.added_items.add(value)
|
||||
|
||||
def remove(self, value):
|
||||
if value in self.added_items:
|
||||
self.added_items.remove(value)
|
||||
else:
|
||||
self.deleted_items.add(value)
|
||||
1524
lib/python3.4/site-packages/sqlalchemy/orm/strategies.py
Normal file
1524
lib/python3.4/site-packages/sqlalchemy/orm/strategies.py
Normal file
File diff suppressed because it is too large
Load diff
991
lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
991
lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
|
|
@ -0,0 +1,991 @@
|
|||
# orm/strategy_options.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
|
||||
from .interfaces import MapperOption, PropComparator
|
||||
from .. import util
|
||||
from ..sql.base import _generative, Generative
|
||||
from .. import exc as sa_exc, inspect
|
||||
from .base import _is_aliased_class, _class_to_mapper
|
||||
from . import util as orm_util
|
||||
from .path_registry import PathRegistry, TokenRegistry, \
|
||||
_WILDCARD_TOKEN, _DEFAULT_TOKEN
|
||||
|
||||
|
||||
class Load(Generative, MapperOption):
|
||||
"""Represents loader options which modify the state of a
|
||||
:class:`.Query` in order to affect how various mapped attributes are
|
||||
loaded.
|
||||
|
||||
.. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
|
||||
the existing system of loader options, including options such as
|
||||
:func:`.orm.joinedload`, :func:`.orm.defer`, and others. In
|
||||
particular, it introduces a new method-chained system that replaces the
|
||||
need for dot-separated paths as well as "_all()" options such as
|
||||
:func:`.orm.joinedload_all`.
|
||||
|
||||
A :class:`.Load` object can be used directly or indirectly. To use one
|
||||
directly, instantiate given the parent class. This style of usage is
|
||||
useful when dealing with a :class:`.Query` that has multiple entities,
|
||||
or when producing a loader option that can be applied generically to
|
||||
any style of query::
|
||||
|
||||
myopt = Load(MyClass).joinedload("widgets")
|
||||
|
||||
The above ``myopt`` can now be used with :meth:`.Query.options`::
|
||||
|
||||
session.query(MyClass).options(myopt)
|
||||
|
||||
The :class:`.Load` construct is invoked indirectly whenever one makes use
|
||||
of the various loader options that are present in ``sqlalchemy.orm``,
|
||||
including options such as :func:`.orm.joinedload`, :func:`.orm.defer`,
|
||||
:func:`.orm.subqueryload`, and all the rest. These constructs produce an
|
||||
"anonymous" form of the :class:`.Load` object which tracks attributes and
|
||||
options, but is not linked to a parent class until it is associated with a
|
||||
parent :class:`.Query`::
|
||||
|
||||
# produce "unbound" Load object
|
||||
myopt = joinedload("widgets")
|
||||
|
||||
# when applied using options(), the option is "bound" to the
|
||||
# class observed in the given query, e.g. MyClass
|
||||
session.query(MyClass).options(myopt)
|
||||
|
||||
Whether the direct or indirect style is used, the :class:`.Load` object
|
||||
returned now represents a specific "path" along the entities of a
|
||||
:class:`.Query`. This path can be traversed using a standard
|
||||
method-chaining approach. Supposing a class hierarchy such as ``User``,
|
||||
``User.addresses -> Address``, ``User.orders -> Order`` and
|
||||
``Order.items -> Item``, we can specify a variety of loader options along
|
||||
each element in the "path"::
|
||||
|
||||
session.query(User).options(
|
||||
joinedload("addresses"),
|
||||
subqueryload("orders").joinedload("items")
|
||||
)
|
||||
|
||||
Where above, the ``addresses`` collection will be joined-loaded, the
|
||||
``orders`` collection will be subquery-loaded, and within that subquery
|
||||
load the ``items`` collection will be joined-loaded.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, entity):
|
||||
insp = inspect(entity)
|
||||
self.path = insp._path_registry
|
||||
self.context = {}
|
||||
self.local_opts = {}
|
||||
|
||||
def _generate(self):
|
||||
cloned = super(Load, self)._generate()
|
||||
cloned.local_opts = {}
|
||||
return cloned
|
||||
|
||||
strategy = None
|
||||
propagate_to_loaders = False
|
||||
|
||||
def process_query(self, query):
|
||||
self._process(query, True)
|
||||
|
||||
def process_query_conditionally(self, query):
|
||||
self._process(query, False)
|
||||
|
||||
def _process(self, query, raiseerr):
|
||||
current_path = query._current_path
|
||||
if current_path:
|
||||
for (token, start_path), loader in self.context.items():
|
||||
chopped_start_path = self._chop_path(start_path, current_path)
|
||||
if chopped_start_path is not None:
|
||||
query._attributes[(token, chopped_start_path)] = loader
|
||||
else:
|
||||
query._attributes.update(self.context)
|
||||
|
||||
def _generate_path(self, path, attr, wildcard_key, raiseerr=True):
|
||||
if raiseerr and not path.has_entity:
|
||||
if isinstance(path, TokenRegistry):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Wildcard token cannot be followed by another entity")
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Attribute '%s' of entity '%s' does not "
|
||||
"refer to a mapped entity" %
|
||||
(path.prop.key, path.parent.entity)
|
||||
)
|
||||
|
||||
if isinstance(attr, util.string_types):
|
||||
default_token = attr.endswith(_DEFAULT_TOKEN)
|
||||
if attr.endswith(_WILDCARD_TOKEN) or default_token:
|
||||
if default_token:
|
||||
self.propagate_to_loaders = False
|
||||
if wildcard_key:
|
||||
attr = "%s:%s" % (wildcard_key, attr)
|
||||
return path.token(attr)
|
||||
|
||||
try:
|
||||
# use getattr on the class to work around
|
||||
# synonyms, hybrids, etc.
|
||||
attr = getattr(path.entity.class_, attr)
|
||||
except AttributeError:
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't find property named '%s' on the "
|
||||
"mapped entity %s in this Query. " % (
|
||||
attr, path.entity)
|
||||
)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
attr = attr.property
|
||||
|
||||
path = path[attr]
|
||||
else:
|
||||
prop = attr.property
|
||||
|
||||
if not prop.parent.common_parent(path.mapper):
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Attribute '%s' does not "
|
||||
"link from element '%s'" % (attr, path.entity))
|
||||
else:
|
||||
return None
|
||||
|
||||
if getattr(attr, '_of_type', None):
|
||||
ac = attr._of_type
|
||||
ext_info = inspect(ac)
|
||||
|
||||
path_element = ext_info.mapper
|
||||
if not ext_info.is_aliased_class:
|
||||
ac = orm_util.with_polymorphic(
|
||||
ext_info.mapper.base_mapper,
|
||||
ext_info.mapper, aliased=True,
|
||||
_use_mapper_path=True)
|
||||
path.entity_path[prop].set(
|
||||
self.context, "path_with_polymorphic", inspect(ac))
|
||||
path = path[prop][path_element]
|
||||
else:
|
||||
path = path[prop]
|
||||
|
||||
if path.has_entity:
|
||||
path = path.entity_path
|
||||
return path
|
||||
|
||||
def _coerce_strat(self, strategy):
|
||||
if strategy is not None:
|
||||
strategy = tuple(sorted(strategy.items()))
|
||||
return strategy
|
||||
|
||||
@_generative
|
||||
def set_relationship_strategy(
|
||||
self, attr, strategy, propagate_to_loaders=True):
|
||||
strategy = self._coerce_strat(strategy)
|
||||
|
||||
self.propagate_to_loaders = propagate_to_loaders
|
||||
# if the path is a wildcard, this will set propagate_to_loaders=False
|
||||
self.path = self._generate_path(self.path, attr, "relationship")
|
||||
self.strategy = strategy
|
||||
if strategy is not None:
|
||||
self._set_path_strategy()
|
||||
|
||||
@_generative
|
||||
def set_column_strategy(self, attrs, strategy, opts=None):
|
||||
strategy = self._coerce_strat(strategy)
|
||||
|
||||
for attr in attrs:
|
||||
path = self._generate_path(self.path, attr, "column")
|
||||
cloned = self._generate()
|
||||
cloned.strategy = strategy
|
||||
cloned.path = path
|
||||
cloned.propagate_to_loaders = True
|
||||
if opts:
|
||||
cloned.local_opts.update(opts)
|
||||
cloned._set_path_strategy()
|
||||
|
||||
def _set_path_strategy(self):
|
||||
if self.path.has_entity:
|
||||
self.path.parent.set(self.context, "loader", self)
|
||||
else:
|
||||
self.path.set(self.context, "loader", self)
|
||||
|
||||
def __getstate__(self):
|
||||
d = self.__dict__.copy()
|
||||
d["path"] = self.path.serialize()
|
||||
return d
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__dict__.update(state)
|
||||
self.path = PathRegistry.deserialize(self.path)
|
||||
|
||||
def _chop_path(self, to_chop, path):
|
||||
i = -1
|
||||
|
||||
for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
|
||||
if isinstance(c_token, util.string_types):
|
||||
# TODO: this is approximated from the _UnboundLoad
|
||||
# version and probably has issues, not fully covered.
|
||||
|
||||
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
|
||||
return to_chop
|
||||
elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \
|
||||
c_token != p_token.key:
|
||||
return None
|
||||
|
||||
if c_token is p_token:
|
||||
continue
|
||||
else:
|
||||
return None
|
||||
return to_chop[i + 1:]
|
||||
|
||||
|
||||
class _UnboundLoad(Load):
|
||||
"""Represent a loader option that isn't tied to a root entity.
|
||||
|
||||
The loader option will produce an entity-linked :class:`.Load`
|
||||
object when it is passed :meth:`.Query.options`.
|
||||
|
||||
This provides compatibility with the traditional system
|
||||
of freestanding options, e.g. ``joinedload('x.y.z')``.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.path = ()
|
||||
self._to_bind = set()
|
||||
self.local_opts = {}
|
||||
|
||||
_is_chain_link = False
|
||||
|
||||
def _set_path_strategy(self):
|
||||
self._to_bind.add(self)
|
||||
|
||||
def _generate_path(self, path, attr, wildcard_key):
|
||||
if wildcard_key and isinstance(attr, util.string_types) and \
|
||||
attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN):
|
||||
if attr == _DEFAULT_TOKEN:
|
||||
self.propagate_to_loaders = False
|
||||
attr = "%s:%s" % (wildcard_key, attr)
|
||||
|
||||
return path + (attr, )
|
||||
|
||||
def __getstate__(self):
|
||||
d = self.__dict__.copy()
|
||||
d['path'] = ret = []
|
||||
for token in util.to_list(self.path):
|
||||
if isinstance(token, PropComparator):
|
||||
ret.append((token._parentmapper.class_, token.key))
|
||||
else:
|
||||
ret.append(token)
|
||||
return d
|
||||
|
||||
def __setstate__(self, state):
|
||||
ret = []
|
||||
for key in state['path']:
|
||||
if isinstance(key, tuple):
|
||||
cls, propkey = key
|
||||
ret.append(getattr(cls, propkey))
|
||||
else:
|
||||
ret.append(key)
|
||||
state['path'] = tuple(ret)
|
||||
self.__dict__ = state
|
||||
|
||||
def _process(self, query, raiseerr):
|
||||
for val in self._to_bind:
|
||||
val._bind_loader(query, query._attributes, raiseerr)
|
||||
|
||||
@classmethod
|
||||
def _from_keys(self, meth, keys, chained, kw):
|
||||
opt = _UnboundLoad()
|
||||
|
||||
def _split_key(key):
|
||||
if isinstance(key, util.string_types):
|
||||
# coerce fooload('*') into "default loader strategy"
|
||||
if key == _WILDCARD_TOKEN:
|
||||
return (_DEFAULT_TOKEN, )
|
||||
# coerce fooload(".*") into "wildcard on default entity"
|
||||
elif key.startswith("." + _WILDCARD_TOKEN):
|
||||
key = key[1:]
|
||||
return key.split(".")
|
||||
else:
|
||||
return (key,)
|
||||
all_tokens = [token for key in keys for token in _split_key(key)]
|
||||
|
||||
for token in all_tokens[0:-1]:
|
||||
if chained:
|
||||
opt = meth(opt, token, **kw)
|
||||
else:
|
||||
opt = opt.defaultload(token)
|
||||
opt._is_chain_link = True
|
||||
|
||||
opt = meth(opt, all_tokens[-1], **kw)
|
||||
opt._is_chain_link = False
|
||||
|
||||
return opt
|
||||
|
||||
def _chop_path(self, to_chop, path):
|
||||
i = -1
|
||||
for i, (c_token, (p_mapper, p_prop)) in enumerate(
|
||||
zip(to_chop, path.pairs())):
|
||||
if isinstance(c_token, util.string_types):
|
||||
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
|
||||
return to_chop
|
||||
elif c_token != 'relationship:%s' % (
|
||||
_WILDCARD_TOKEN,) and c_token != p_prop.key:
|
||||
return None
|
||||
elif isinstance(c_token, PropComparator):
|
||||
if c_token.property is not p_prop:
|
||||
return None
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return to_chop[i:]
|
||||
|
||||
def _bind_loader(self, query, context, raiseerr):
|
||||
start_path = self.path
|
||||
# _current_path implies we're in a
|
||||
# secondary load with an existing path
|
||||
|
||||
current_path = query._current_path
|
||||
if current_path:
|
||||
start_path = self._chop_path(start_path, current_path)
|
||||
|
||||
if not start_path:
|
||||
return None
|
||||
|
||||
token = start_path[0]
|
||||
if isinstance(token, util.string_types):
|
||||
entity = self._find_entity_basestring(query, token, raiseerr)
|
||||
elif isinstance(token, PropComparator):
|
||||
prop = token.property
|
||||
entity = self._find_entity_prop_comparator(
|
||||
query,
|
||||
prop.key,
|
||||
token._parententity,
|
||||
raiseerr)
|
||||
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"mapper option expects "
|
||||
"string key or list of attributes")
|
||||
|
||||
if not entity:
|
||||
return
|
||||
|
||||
path_element = entity.entity_zero
|
||||
|
||||
# transfer our entity-less state into a Load() object
|
||||
# with a real entity path.
|
||||
loader = Load(path_element)
|
||||
loader.context = context
|
||||
loader.strategy = self.strategy
|
||||
|
||||
path = loader.path
|
||||
for token in start_path:
|
||||
loader.path = path = loader._generate_path(
|
||||
loader.path, token, None, raiseerr)
|
||||
if path is None:
|
||||
return
|
||||
|
||||
loader.local_opts.update(self.local_opts)
|
||||
|
||||
if loader.path.has_entity:
|
||||
effective_path = loader.path.parent
|
||||
else:
|
||||
effective_path = loader.path
|
||||
|
||||
# prioritize "first class" options over those
|
||||
# that were "links in the chain", e.g. "x" and "y" in
|
||||
# someload("x.y.z") versus someload("x") / someload("x.y")
|
||||
if self._is_chain_link:
|
||||
effective_path.setdefault(context, "loader", loader)
|
||||
else:
|
||||
effective_path.set(context, "loader", loader)
|
||||
|
||||
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
|
||||
if _is_aliased_class(mapper):
|
||||
searchfor = mapper
|
||||
else:
|
||||
searchfor = _class_to_mapper(mapper)
|
||||
for ent in query._mapper_entities:
|
||||
if ent.corresponds_to(searchfor):
|
||||
return ent
|
||||
else:
|
||||
if raiseerr:
|
||||
if not list(query._mapper_entities):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Query has only expression-based entities - "
|
||||
"can't find property named '%s'."
|
||||
% (token, )
|
||||
)
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't find property '%s' on any entity "
|
||||
"specified in this Query. Note the full path "
|
||||
"from root (%s) to target entity must be specified."
|
||||
% (token, ",".join(str(x) for
|
||||
x in query._mapper_entities))
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _find_entity_basestring(self, query, token, raiseerr):
|
||||
if token.endswith(':' + _WILDCARD_TOKEN):
|
||||
if len(list(query._mapper_entities)) != 1:
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Wildcard loader can only be used with exactly "
|
||||
"one entity. Use Load(ent) to specify "
|
||||
"specific entities.")
|
||||
elif token.endswith(_DEFAULT_TOKEN):
|
||||
raiseerr = False
|
||||
|
||||
for ent in query._mapper_entities:
|
||||
# return only the first _MapperEntity when searching
|
||||
# based on string prop name. Ideally object
|
||||
# attributes are used to specify more exactly.
|
||||
return ent
|
||||
else:
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Query has only expression-based entities - "
|
||||
"can't find property named '%s'."
|
||||
% (token, )
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class loader_option(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __call__(self, fn):
|
||||
self.name = name = fn.__name__
|
||||
self.fn = fn
|
||||
if hasattr(Load, name):
|
||||
raise TypeError("Load class already has a %s method." % (name))
|
||||
setattr(Load, name, fn)
|
||||
|
||||
return self
|
||||
|
||||
def _add_unbound_fn(self, fn):
|
||||
self._unbound_fn = fn
|
||||
fn_doc = self.fn.__doc__
|
||||
self.fn.__doc__ = """Produce a new :class:`.Load` object with the
|
||||
:func:`.orm.%(name)s` option applied.
|
||||
|
||||
See :func:`.orm.%(name)s` for usage examples.
|
||||
|
||||
""" % {"name": self.name}
|
||||
|
||||
fn.__doc__ = fn_doc
|
||||
return self
|
||||
|
||||
def _add_unbound_all_fn(self, fn):
|
||||
self._unbound_all_fn = fn
|
||||
fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
|
||||
|
||||
.. deprecated:: 0.9.0
|
||||
|
||||
The "_all()" style is replaced by method chaining, e.g.::
|
||||
|
||||
session.query(MyClass).options(
|
||||
%(name)s("someattribute").%(name)s("anotherattribute")
|
||||
)
|
||||
|
||||
""" % {"name": self.name}
|
||||
return self
|
||||
|
||||
|
||||
@loader_option()
|
||||
def contains_eager(loadopt, attr, alias=None):
|
||||
"""Indicate that the given attribute should be eagerly loaded from
|
||||
columns stated manually in the query.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
The option is used in conjunction with an explicit join that loads
|
||||
the desired rows, i.e.::
|
||||
|
||||
sess.query(Order).\\
|
||||
join(Order.user).\\
|
||||
options(contains_eager(Order.user))
|
||||
|
||||
The above query would join from the ``Order`` entity to its related
|
||||
``User`` entity, and the returned ``Order`` objects would have the
|
||||
``Order.user`` attribute pre-populated.
|
||||
|
||||
:func:`contains_eager` also accepts an `alias` argument, which is the
|
||||
string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
|
||||
construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
|
||||
the eagerly-loaded rows are to come from an aliased table::
|
||||
|
||||
user_alias = aliased(User)
|
||||
sess.query(Order).\\
|
||||
join((user_alias, Order.user)).\\
|
||||
options(contains_eager(Order.user, alias=user_alias))
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`contains_eager`
|
||||
|
||||
"""
|
||||
if alias is not None:
|
||||
if not isinstance(alias, str):
|
||||
info = inspect(alias)
|
||||
alias = info.selectable
|
||||
|
||||
cloned = loadopt.set_relationship_strategy(
|
||||
attr,
|
||||
{"lazy": "joined"},
|
||||
propagate_to_loaders=False
|
||||
)
|
||||
cloned.local_opts['eager_from_alias'] = alias
|
||||
return cloned
|
||||
|
||||
|
||||
@contains_eager._add_unbound_fn
|
||||
def contains_eager(*keys, **kw):
|
||||
return _UnboundLoad()._from_keys(
|
||||
_UnboundLoad.contains_eager, keys, True, kw)
|
||||
|
||||
|
||||
@loader_option()
|
||||
def load_only(loadopt, *attrs):
|
||||
"""Indicate that for a particular entity, only the given list
|
||||
of column-based attribute names should be loaded; all others will be
|
||||
deferred.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
Example - given a class ``User``, load only the ``name`` and ``fullname``
|
||||
attributes::
|
||||
|
||||
session.query(User).options(load_only("name", "fullname"))
|
||||
|
||||
Example - given a relationship ``User.addresses -> Address``, specify
|
||||
subquery loading for the ``User.addresses`` collection, but on each
|
||||
``Address`` object load only the ``email_address`` attribute::
|
||||
|
||||
session.query(User).options(
|
||||
subqueryload("addreses").load_only("email_address")
|
||||
)
|
||||
|
||||
For a :class:`.Query` that has multiple entities, the lead entity can be
|
||||
specifically referred to using the :class:`.Load` constructor::
|
||||
|
||||
session.query(User, Address).join(User.addresses).options(
|
||||
Load(User).load_only("name", "fullname"),
|
||||
Load(Address).load_only("email_addres")
|
||||
)
|
||||
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
cloned = loadopt.set_column_strategy(
|
||||
attrs,
|
||||
{"deferred": False, "instrument": True}
|
||||
)
|
||||
cloned.set_column_strategy("*",
|
||||
{"deferred": True, "instrument": True},
|
||||
{"undefer_pks": True})
|
||||
return cloned
|
||||
|
||||
|
||||
@load_only._add_unbound_fn
|
||||
def load_only(*attrs):
|
||||
return _UnboundLoad().load_only(*attrs)
|
||||
|
||||
|
||||
@loader_option()
|
||||
def joinedload(loadopt, attr, innerjoin=None):
|
||||
"""Indicate that the given attribute should be loaded using joined
|
||||
eager loading.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
examples::
|
||||
|
||||
# joined-load the "orders" collection on "User"
|
||||
query(User).options(joinedload(User.orders))
|
||||
|
||||
# joined-load Order.items and then Item.keywords
|
||||
query(Order).options(joinedload(Order.items).joinedload(Item.keywords))
|
||||
|
||||
# lazily load Order.items, but when Items are loaded,
|
||||
# joined-load the keywords collection
|
||||
query(Order).options(lazyload(Order.items).joinedload(Item.keywords))
|
||||
|
||||
:param innerjoin: if ``True``, indicates that the joined eager load should
|
||||
use an inner join instead of the default of left outer join::
|
||||
|
||||
query(Order).options(joinedload(Order.user, innerjoin=True))
|
||||
|
||||
If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
|
||||
``innerjoin=True`` will be bypassed and the join will continue to
|
||||
chain as LEFT OUTER JOIN so that the results don't change. As an
|
||||
alternative, specify the value ``"nested"``. This will instead nest the
|
||||
join on the right side, e.g. using the form "a LEFT OUTER JOIN
|
||||
(b JOIN c)".
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
|
||||
nesting of eager "inner" joins.
|
||||
|
||||
.. note::
|
||||
|
||||
The joins produced by :func:`.orm.joinedload` are **anonymously
|
||||
aliased**. The criteria by which the join proceeds cannot be
|
||||
modified, nor can the :class:`.Query` refer to these joins in any way,
|
||||
including ordering.
|
||||
|
||||
To produce a specific SQL JOIN which is explicitly available, use
|
||||
:meth:`.Query.join`. To combine explicit JOINs with eager loading
|
||||
of collections, use :func:`.orm.contains_eager`; see
|
||||
:ref:`contains_eager`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`loading_toplevel`
|
||||
|
||||
:ref:`contains_eager`
|
||||
|
||||
:func:`.orm.subqueryload`
|
||||
|
||||
:func:`.orm.lazyload`
|
||||
|
||||
:paramref:`.relationship.lazy`
|
||||
|
||||
:paramref:`.relationship.innerjoin` - :func:`.relationship`-level
|
||||
version of the :paramref:`.joinedload.innerjoin` option.
|
||||
|
||||
"""
|
||||
loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
|
||||
if innerjoin is not None:
|
||||
loader.local_opts['innerjoin'] = innerjoin
|
||||
return loader
|
||||
|
||||
|
||||
@joinedload._add_unbound_fn
|
||||
def joinedload(*keys, **kw):
|
||||
return _UnboundLoad._from_keys(
|
||||
_UnboundLoad.joinedload, keys, False, kw)
|
||||
|
||||
|
||||
@joinedload._add_unbound_all_fn
|
||||
def joinedload_all(*keys, **kw):
|
||||
return _UnboundLoad._from_keys(
|
||||
_UnboundLoad.joinedload, keys, True, kw)
|
||||
|
||||
|
||||
@loader_option()
|
||||
def subqueryload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using
|
||||
subquery eager loading.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
examples::
|
||||
|
||||
# subquery-load the "orders" collection on "User"
|
||||
query(User).options(subqueryload(User.orders))
|
||||
|
||||
# subquery-load Order.items and then Item.keywords
|
||||
query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
|
||||
|
||||
# lazily load Order.items, but when Items are loaded,
|
||||
# subquery-load the keywords collection
|
||||
query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`loading_toplevel`
|
||||
|
||||
:func:`.orm.joinedload`
|
||||
|
||||
:func:`.orm.lazyload`
|
||||
|
||||
:paramref:`.relationship.lazy`
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
|
||||
|
||||
|
||||
@subqueryload._add_unbound_fn
|
||||
def subqueryload(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
|
||||
|
||||
|
||||
@subqueryload._add_unbound_all_fn
|
||||
def subqueryload_all(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def lazyload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using "lazy"
|
||||
loading.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.relationship.lazy`
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
|
||||
|
||||
|
||||
@lazyload._add_unbound_fn
|
||||
def lazyload(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
|
||||
|
||||
|
||||
@lazyload._add_unbound_all_fn
|
||||
def lazyload_all(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def immediateload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using
|
||||
an immediate load with a per-attribute SELECT statement.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`loading_toplevel`
|
||||
|
||||
:func:`.orm.joinedload`
|
||||
|
||||
:func:`.orm.lazyload`
|
||||
|
||||
:paramref:`.relationship.lazy`
|
||||
|
||||
"""
|
||||
loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
|
||||
return loader
|
||||
|
||||
|
||||
@immediateload._add_unbound_fn
|
||||
def immediateload(*keys):
|
||||
return _UnboundLoad._from_keys(
|
||||
_UnboundLoad.immediateload, keys, False, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def noload(loadopt, attr):
|
||||
"""Indicate that the given relationship attribute should remain unloaded.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
:func:`.orm.noload` applies to :func:`.relationship` attributes; for
|
||||
column-based attributes, see :func:`.orm.defer`.
|
||||
|
||||
"""
|
||||
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
|
||||
|
||||
|
||||
@noload._add_unbound_fn
|
||||
def noload(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def defaultload(loadopt, attr):
|
||||
"""Indicate an attribute should load using its default loader style.
|
||||
|
||||
This method is used to link to other loader options, such as
|
||||
to set the :func:`.orm.defer` option on a class that is linked to
|
||||
a relationship of the parent class being loaded, :func:`.orm.defaultload`
|
||||
can be used to navigate this path without changing the loading style
|
||||
of the relationship::
|
||||
|
||||
session.query(MyClass).options(defaultload("someattr").defer("some_column"))
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.orm.defer`
|
||||
|
||||
:func:`.orm.undefer`
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(
|
||||
attr,
|
||||
None
|
||||
)
|
||||
|
||||
|
||||
@defaultload._add_unbound_fn
|
||||
def defaultload(*keys):
|
||||
return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def defer(loadopt, key):
|
||||
"""Indicate that the given column-oriented attribute should be deferred, e.g.
|
||||
not loaded until accessed.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy.orm import defer
|
||||
|
||||
session.query(MyClass).options(
|
||||
defer("attribute_one"),
|
||||
defer("attribute_two"))
|
||||
|
||||
session.query(MyClass).options(
|
||||
defer(MyClass.attribute_one),
|
||||
defer(MyClass.attribute_two))
|
||||
|
||||
To specify a deferred load of an attribute on a related class,
|
||||
the path can be specified one token at a time, specifying the loading
|
||||
style for each link along the chain. To leave the loading style
|
||||
for a link unchanged, use :func:`.orm.defaultload`::
|
||||
|
||||
session.query(MyClass).options(defaultload("someattr").defer("some_column"))
|
||||
|
||||
A :class:`.Load` object that is present on a certain path can have
|
||||
:meth:`.Load.defer` called multiple times, each will operate on the same
|
||||
parent entity::
|
||||
|
||||
|
||||
session.query(MyClass).options(
|
||||
defaultload("someattr").
|
||||
defer("some_column").
|
||||
defer("some_other_column").
|
||||
defer("another_column")
|
||||
)
|
||||
|
||||
:param key: Attribute to be deferred.
|
||||
|
||||
:param \*addl_attrs: Deprecated; this option supports the old 0.8 style
|
||||
of specifying a path as a series of attributes, which is now superseded
|
||||
by the method-chained style.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deferred`
|
||||
|
||||
:func:`.orm.undefer`
|
||||
|
||||
"""
|
||||
return loadopt.set_column_strategy(
|
||||
(key, ),
|
||||
{"deferred": True, "instrument": True}
|
||||
)
|
||||
|
||||
|
||||
@defer._add_unbound_fn
|
||||
def defer(key, *addl_attrs):
|
||||
return _UnboundLoad._from_keys(
|
||||
_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def undefer(loadopt, key):
|
||||
"""Indicate that the given column-oriented attribute should be undeferred,
|
||||
e.g. specified within the SELECT statement of the entity as a whole.
|
||||
|
||||
The column being undeferred is typically set up on the mapping as a
|
||||
:func:`.deferred` attribute.
|
||||
|
||||
This function is part of the :class:`.Load` interface and supports
|
||||
both method-chained and standalone operation.
|
||||
|
||||
Examples::
|
||||
|
||||
# undefer two columns
|
||||
session.query(MyClass).options(undefer("col1"), undefer("col2"))
|
||||
|
||||
# undefer all columns specific to a single class using Load + *
|
||||
session.query(MyClass, MyOtherClass).options(
|
||||
Load(MyClass).undefer("*"))
|
||||
|
||||
:param key: Attribute to be undeferred.
|
||||
|
||||
:param \*addl_attrs: Deprecated; this option supports the old 0.8 style
|
||||
of specifying a path as a series of attributes, which is now superseded
|
||||
by the method-chained style.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deferred`
|
||||
|
||||
:func:`.orm.defer`
|
||||
|
||||
:func:`.orm.undefer_group`
|
||||
|
||||
"""
|
||||
return loadopt.set_column_strategy(
|
||||
(key, ),
|
||||
{"deferred": False, "instrument": True}
|
||||
)
|
||||
|
||||
|
||||
@undefer._add_unbound_fn
|
||||
def undefer(key, *addl_attrs):
|
||||
return _UnboundLoad._from_keys(
|
||||
_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
|
||||
|
||||
|
||||
@loader_option()
|
||||
def undefer_group(loadopt, name):
|
||||
"""Indicate that columns within the given deferred group name should be
|
||||
undeferred.
|
||||
|
||||
The columns being undeferred are set up on the mapping as
|
||||
:func:`.deferred` attributes and include a "group" name.
|
||||
|
||||
E.g::
|
||||
|
||||
session.query(MyClass).options(undefer_group("large_attrs"))
|
||||
|
||||
To undefer a group of attributes on a related entity, the path can be
|
||||
spelled out using relationship loader options, such as
|
||||
:func:`.orm.defaultload`::
|
||||
|
||||
session.query(MyClass).options(
|
||||
defaultload("someattr").undefer_group("large_attrs"))
|
||||
|
||||
.. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
|
||||
particiular entity load path.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deferred`
|
||||
|
||||
:func:`.orm.defer`
|
||||
|
||||
:func:`.orm.undefer`
|
||||
|
||||
"""
|
||||
return loadopt.set_column_strategy(
|
||||
"*",
|
||||
None,
|
||||
{"undefer_group": name}
|
||||
)
|
||||
|
||||
|
||||
@undefer_group._add_unbound_fn
|
||||
def undefer_group(name):
|
||||
return _UnboundLoad().undefer_group(name)
|
||||
123
lib/python3.4/site-packages/sqlalchemy/orm/sync.py
Normal file
123
lib/python3.4/site-packages/sqlalchemy/orm/sync.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
# orm/sync.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""private module containing functions used for copying data
|
||||
between instances based on join conditions.
|
||||
|
||||
"""
|
||||
|
||||
from . import exc, util as orm_util, attributes
|
||||
|
||||
|
||||
def populate(source, source_mapper, dest, dest_mapper,
|
||||
synchronize_pairs, uowcommit, flag_cascaded_pks):
|
||||
source_dict = source.dict
|
||||
dest_dict = dest.dict
|
||||
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
# inline of source_mapper._get_state_attr_by_column
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source.manager[prop.key].impl.get(source, source_dict,
|
||||
attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
|
||||
|
||||
try:
|
||||
# inline of dest_mapper._set_state_attr_by_column
|
||||
prop = dest_mapper._columntoproperty[r]
|
||||
dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, source_mapper, l, dest_mapper, r)
|
||||
|
||||
# technically the "r.primary_key" check isn't
|
||||
# needed here, but we check for this condition to limit
|
||||
# how often this logic is invoked for memory/performance
|
||||
# reasons, since we only need this info for a primary key
|
||||
# destination.
|
||||
if flag_cascaded_pks and l.primary_key and \
|
||||
r.primary_key and \
|
||||
r.references(l):
|
||||
uowcommit.attributes[("pk_cascaded", dest, r)] = True
|
||||
|
||||
|
||||
def clear(dest, dest_mapper, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
if r.primary_key and \
|
||||
dest_mapper._get_state_attr_by_column(
|
||||
dest, dest.dict, r) is not None:
|
||||
|
||||
raise AssertionError(
|
||||
"Dependency rule tried to blank-out primary key "
|
||||
"column '%s' on instance '%s'" %
|
||||
(r, orm_util.state_str(dest))
|
||||
)
|
||||
try:
|
||||
dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, None, l, dest_mapper, r)
|
||||
|
||||
|
||||
def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
oldvalue = source_mapper._get_committed_attr_by_column(
|
||||
source.obj(), l)
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
dest[r.key] = value
|
||||
dest[old_prefix + r.key] = oldvalue
|
||||
|
||||
|
||||
def populate_dict(source, source_mapper, dict_, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
|
||||
dict_[r.key] = value
|
||||
|
||||
|
||||
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
|
||||
"""return true if the source object has changes from an old to a
|
||||
new value on the given synchronize pairs
|
||||
|
||||
"""
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
history = uowcommit.get_attribute_history(
|
||||
source, prop.key, attributes.PASSIVE_NO_INITIALIZE)
|
||||
if bool(history.deleted):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _raise_col_to_prop(isdest, source_mapper, source_column,
|
||||
dest_mapper, dest_column):
|
||||
if isdest:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"destination column '%s'; mapper '%s' does not map "
|
||||
"this column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include this column (or use "
|
||||
"a viewonly=True relation)." % (dest_column, dest_mapper))
|
||||
else:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"source column '%s'; mapper '%s' does not map this "
|
||||
"column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include destination column "
|
||||
"'%s' (or use a viewonly=True relation)." %
|
||||
(source_column, source_mapper, dest_column))
|
||||
650
lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
650
lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
|
|
@ -0,0 +1,650 @@
|
|||
# orm/unitofwork.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""The internals for the unit of work system.
|
||||
|
||||
The session's flush() process passes objects to a contextual object
|
||||
here, which assembles flush tasks based on mappers and their properties,
|
||||
organizes them in order of dependency, and executes.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util, event
|
||||
from ..util import topological
|
||||
from . import attributes, persistence, util as orm_util
|
||||
|
||||
|
||||
def track_cascade_events(descriptor, prop):
|
||||
"""Establish event listeners on object attributes which handle
|
||||
cascade-on-set/append.
|
||||
|
||||
"""
|
||||
key = prop.key
|
||||
|
||||
def append(state, item, initiator):
|
||||
# process "save_update" cascade rules for when
|
||||
# an instance is appended to the list of another instance
|
||||
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("collection append")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
item_state = attributes.instance_state(item)
|
||||
if prop._cascade.save_update and \
|
||||
(prop.cascade_backrefs or key == initiator.key) and \
|
||||
not sess._contains_state(item_state):
|
||||
sess._save_or_update_state(item_state)
|
||||
return item
|
||||
|
||||
def remove(state, item, initiator):
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning(
|
||||
"collection remove"
|
||||
if prop.uselist
|
||||
else "related attribute delete")
|
||||
|
||||
# expunge pending orphans
|
||||
item_state = attributes.instance_state(item)
|
||||
if prop._cascade.delete_orphan and \
|
||||
item_state in sess._new and \
|
||||
prop.mapper._is_orphan(item_state):
|
||||
sess.expunge(item)
|
||||
|
||||
def set_(state, newvalue, oldvalue, initiator):
|
||||
# process "save_update" cascade rules for when an instance
|
||||
# is attached to another instance
|
||||
if oldvalue is newvalue:
|
||||
return newvalue
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("related attribute set")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
if newvalue is not None:
|
||||
newvalue_state = attributes.instance_state(newvalue)
|
||||
if prop._cascade.save_update and \
|
||||
(prop.cascade_backrefs or key == initiator.key) and \
|
||||
not sess._contains_state(newvalue_state):
|
||||
sess._save_or_update_state(newvalue_state)
|
||||
|
||||
if oldvalue is not None and \
|
||||
oldvalue is not attributes.NEVER_SET and \
|
||||
oldvalue is not attributes.PASSIVE_NO_RESULT and \
|
||||
prop._cascade.delete_orphan:
|
||||
# possible to reach here with attributes.NEVER_SET ?
|
||||
oldvalue_state = attributes.instance_state(oldvalue)
|
||||
|
||||
if oldvalue_state in sess._new and \
|
||||
prop.mapper._is_orphan(oldvalue_state):
|
||||
sess.expunge(oldvalue)
|
||||
return newvalue
|
||||
|
||||
event.listen(descriptor, 'append', append, raw=True, retval=True)
|
||||
event.listen(descriptor, 'remove', remove, raw=True, retval=True)
|
||||
event.listen(descriptor, 'set', set_, raw=True, retval=True)
|
||||
|
||||
|
||||
class UOWTransaction(object):
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
# dictionary used by external actors to
|
||||
# store arbitrary state information.
|
||||
self.attributes = {}
|
||||
|
||||
# dictionary of mappers to sets of
|
||||
# DependencyProcessors, which are also
|
||||
# set to be part of the sorted flush actions,
|
||||
# which have that mapper as a parent.
|
||||
self.deps = util.defaultdict(set)
|
||||
|
||||
# dictionary of mappers to sets of InstanceState
|
||||
# items pending for flush which have that mapper
|
||||
# as a parent.
|
||||
self.mappers = util.defaultdict(set)
|
||||
|
||||
# a dictionary of Preprocess objects, which gather
|
||||
# additional states impacted by the flush
|
||||
# and determine if a flush action is needed
|
||||
self.presort_actions = {}
|
||||
|
||||
# dictionary of PostSortRec objects, each
|
||||
# one issues work during the flush within
|
||||
# a certain ordering.
|
||||
self.postsort_actions = {}
|
||||
|
||||
# a set of 2-tuples, each containing two
|
||||
# PostSortRec objects where the second
|
||||
# is dependent on the first being executed
|
||||
# first
|
||||
self.dependencies = set()
|
||||
|
||||
# dictionary of InstanceState-> (isdelete, listonly)
|
||||
# tuples, indicating if this state is to be deleted
|
||||
# or insert/updated, or just refreshed
|
||||
self.states = {}
|
||||
|
||||
# tracks InstanceStates which will be receiving
|
||||
# a "post update" call. Keys are mappers,
|
||||
# values are a set of states and a set of the
|
||||
# columns which should be included in the update.
|
||||
self.post_update_states = util.defaultdict(lambda: (set(), set()))
|
||||
|
||||
@property
|
||||
def has_work(self):
|
||||
return bool(self.states)
|
||||
|
||||
def is_deleted(self, state):
|
||||
"""return true if the given state is marked as deleted
|
||||
within this uowtransaction."""
|
||||
|
||||
return state in self.states and self.states[state][0]
|
||||
|
||||
def memo(self, key, callable_):
|
||||
if key in self.attributes:
|
||||
return self.attributes[key]
|
||||
else:
|
||||
self.attributes[key] = ret = callable_()
|
||||
return ret
|
||||
|
||||
def remove_state_actions(self, state):
|
||||
"""remove pending actions for a state from the uowtransaction."""
|
||||
|
||||
isdelete = self.states[state][0]
|
||||
|
||||
self.states[state] = (isdelete, True)
|
||||
|
||||
def get_attribute_history(self, state, key,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
"""facade to attributes.get_state_history(), including
|
||||
caching of results."""
|
||||
|
||||
hashkey = ("history", state, key)
|
||||
|
||||
# cache the objects, not the states; the strong reference here
|
||||
# prevents newly loaded objects from being dereferenced during the
|
||||
# flush process
|
||||
|
||||
if hashkey in self.attributes:
|
||||
history, state_history, cached_passive = self.attributes[hashkey]
|
||||
# if the cached lookup was "passive" and now
|
||||
# we want non-passive, do a non-passive lookup and re-cache
|
||||
|
||||
if not cached_passive & attributes.SQL_OK \
|
||||
and passive & attributes.SQL_OK:
|
||||
impl = state.manager[key].impl
|
||||
history = impl.get_history(state, state.dict,
|
||||
attributes.PASSIVE_OFF |
|
||||
attributes.LOAD_AGAINST_COMMITTED)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history, passive)
|
||||
else:
|
||||
impl = state.manager[key].impl
|
||||
# TODO: store the history as (state, object) tuples
|
||||
# so we don't have to keep converting here
|
||||
history = impl.get_history(state, state.dict, passive |
|
||||
attributes.LOAD_AGAINST_COMMITTED)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history,
|
||||
passive)
|
||||
|
||||
return state_history
|
||||
|
||||
def has_dep(self, processor):
|
||||
return (processor, True) in self.presort_actions
|
||||
|
||||
def register_preprocessor(self, processor, fromparent):
|
||||
key = (processor, fromparent)
|
||||
if key not in self.presort_actions:
|
||||
self.presort_actions[key] = Preprocess(processor, fromparent)
|
||||
|
||||
def register_object(self, state, isdelete=False,
|
||||
listonly=False, cancel_delete=False,
|
||||
operation=None, prop=None):
|
||||
if not self.session._contains_state(state):
|
||||
if not state.deleted and operation is not None:
|
||||
util.warn("Object of type %s not in session, %s operation "
|
||||
"along '%s' will not proceed" %
|
||||
(orm_util.state_class_str(state), operation, prop))
|
||||
return False
|
||||
|
||||
if state not in self.states:
|
||||
mapper = state.manager.mapper
|
||||
|
||||
if mapper not in self.mappers:
|
||||
self._per_mapper_flush_actions(mapper)
|
||||
|
||||
self.mappers[mapper].add(state)
|
||||
self.states[state] = (isdelete, listonly)
|
||||
else:
|
||||
if not listonly and (isdelete or cancel_delete):
|
||||
self.states[state] = (isdelete, False)
|
||||
return True
|
||||
|
||||
def issue_post_update(self, state, post_update_cols):
|
||||
mapper = state.manager.mapper.base_mapper
|
||||
states, cols = self.post_update_states[mapper]
|
||||
states.add(state)
|
||||
cols.update(post_update_cols)
|
||||
|
||||
def _per_mapper_flush_actions(self, mapper):
|
||||
saves = SaveUpdateAll(self, mapper.base_mapper)
|
||||
deletes = DeleteAll(self, mapper.base_mapper)
|
||||
self.dependencies.add((saves, deletes))
|
||||
|
||||
for dep in mapper._dependency_processors:
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
for prop in mapper.relationships:
|
||||
if prop.viewonly:
|
||||
continue
|
||||
dep = prop._dependency_processor
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
@util.memoized_property
|
||||
def _mapper_for_dep(self):
|
||||
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
|
||||
True or False, indicating if the DependencyProcessor operates
|
||||
on objects of that Mapper.
|
||||
|
||||
The result is stored in the dictionary persistently once
|
||||
calculated.
|
||||
|
||||
"""
|
||||
return util.PopulateDict(
|
||||
lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
|
||||
)
|
||||
|
||||
def filter_states_for_dep(self, dep, states):
|
||||
"""Filter the given list of InstanceStates to those relevant to the
|
||||
given DependencyProcessor.
|
||||
|
||||
"""
|
||||
mapper_for_dep = self._mapper_for_dep
|
||||
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
|
||||
|
||||
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
|
||||
checktup = (isdelete, listonly)
|
||||
for mapper in mapper.base_mapper.self_and_descendants:
|
||||
for state in self.mappers[mapper]:
|
||||
if self.states[state] == checktup:
|
||||
yield state
|
||||
|
||||
def _generate_actions(self):
|
||||
"""Generate the full, unsorted collection of PostSortRecs as
|
||||
well as dependency pairs for this UOWTransaction.
|
||||
|
||||
"""
|
||||
# execute presort_actions, until all states
|
||||
# have been processed. a presort_action might
|
||||
# add new states to the uow.
|
||||
while True:
|
||||
ret = False
|
||||
for action in list(self.presort_actions.values()):
|
||||
if action.execute(self):
|
||||
ret = True
|
||||
if not ret:
|
||||
break
|
||||
|
||||
# see if the graph of mapper dependencies has cycles.
|
||||
self.cycles = cycles = topological.find_cycles(
|
||||
self.dependencies,
|
||||
list(self.postsort_actions.values()))
|
||||
|
||||
if cycles:
|
||||
# if yes, break the per-mapper actions into
|
||||
# per-state actions
|
||||
convert = dict(
|
||||
(rec, set(rec.per_state_flush_actions(self)))
|
||||
for rec in cycles
|
||||
)
|
||||
|
||||
# rewrite the existing dependencies to point to
|
||||
# the per-state actions for those per-mapper actions
|
||||
# that were broken up.
|
||||
for edge in list(self.dependencies):
|
||||
if None in edge or \
|
||||
edge[0].disabled or edge[1].disabled or \
|
||||
cycles.issuperset(edge):
|
||||
self.dependencies.remove(edge)
|
||||
elif edge[0] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[0]]:
|
||||
self.dependencies.add((dep, edge[1]))
|
||||
elif edge[1] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[1]]:
|
||||
self.dependencies.add((edge[0], dep))
|
||||
|
||||
return set([a for a in self.postsort_actions.values()
|
||||
if not a.disabled
|
||||
]
|
||||
).difference(cycles)
|
||||
|
||||
def execute(self):
|
||||
postsort_actions = self._generate_actions()
|
||||
|
||||
# sort = topological.sort(self.dependencies, postsort_actions)
|
||||
# print "--------------"
|
||||
# print "\ndependencies:", self.dependencies
|
||||
# print "\ncycles:", self.cycles
|
||||
# print "\nsort:", list(sort)
|
||||
# print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
|
||||
|
||||
# execute
|
||||
if self.cycles:
|
||||
for set_ in topological.sort_as_subsets(
|
||||
self.dependencies,
|
||||
postsort_actions):
|
||||
while set_:
|
||||
n = set_.pop()
|
||||
n.execute_aggregate(self, set_)
|
||||
else:
|
||||
for rec in topological.sort(
|
||||
self.dependencies,
|
||||
postsort_actions):
|
||||
rec.execute(self)
|
||||
|
||||
def finalize_flush_changes(self):
|
||||
"""mark processed objects as clean / deleted after a successful
|
||||
flush().
|
||||
|
||||
this method is called within the flush() method after the
|
||||
execute() method has succeeded and the transaction has been committed.
|
||||
|
||||
"""
|
||||
states = set(self.states)
|
||||
isdel = set(
|
||||
s for (s, (isdelete, listonly)) in self.states.items()
|
||||
if isdelete
|
||||
)
|
||||
other = states.difference(isdel)
|
||||
self.session._remove_newly_deleted(isdel)
|
||||
self.session._register_newly_persistent(other)
|
||||
|
||||
|
||||
class IterateMappersMixin(object):
|
||||
def _mappers(self, uow):
|
||||
if self.fromparent:
|
||||
return iter(
|
||||
m for m in
|
||||
self.dependency_processor.parent.self_and_descendants
|
||||
if uow._mapper_for_dep[(m, self.dependency_processor)]
|
||||
)
|
||||
else:
|
||||
return self.dependency_processor.mapper.self_and_descendants
|
||||
|
||||
|
||||
class Preprocess(IterateMappersMixin):
|
||||
def __init__(self, dependency_processor, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.fromparent = fromparent
|
||||
self.processed = set()
|
||||
self.setup_flush_actions = False
|
||||
|
||||
def execute(self, uow):
|
||||
delete_states = set()
|
||||
save_states = set()
|
||||
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper].difference(self.processed):
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if not listonly:
|
||||
if isdelete:
|
||||
delete_states.add(state)
|
||||
else:
|
||||
save_states.add(state)
|
||||
|
||||
if delete_states:
|
||||
self.dependency_processor.presort_deletes(uow, delete_states)
|
||||
self.processed.update(delete_states)
|
||||
if save_states:
|
||||
self.dependency_processor.presort_saves(uow, save_states)
|
||||
self.processed.update(save_states)
|
||||
|
||||
if (delete_states or save_states):
|
||||
if not self.setup_flush_actions and (
|
||||
self.dependency_processor.
|
||||
prop_has_changes(uow, delete_states, True) or
|
||||
self.dependency_processor.
|
||||
prop_has_changes(uow, save_states, False)
|
||||
):
|
||||
self.dependency_processor.per_property_flush_actions(uow)
|
||||
self.setup_flush_actions = True
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class PostSortRec(object):
|
||||
disabled = False
|
||||
|
||||
def __new__(cls, uow, *args):
|
||||
key = (cls, ) + args
|
||||
if key in uow.postsort_actions:
|
||||
return uow.postsort_actions[key]
|
||||
else:
|
||||
uow.postsort_actions[key] = \
|
||||
ret = \
|
||||
object.__new__(cls)
|
||||
return ret
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
self.execute(uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
",".join(str(x) for x in self.__dict__.values())
|
||||
)
|
||||
|
||||
|
||||
class ProcessAll(IterateMappersMixin, PostSortRec):
|
||||
def __init__(self, uow, dependency_processor, delete, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.delete = delete
|
||||
self.fromparent = fromparent
|
||||
uow.deps[dependency_processor.parent.base_mapper].\
|
||||
add(dependency_processor)
|
||||
|
||||
def execute(self, uow):
|
||||
states = self._elements(uow)
|
||||
if self.delete:
|
||||
self.dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
self.dependency_processor.process_saves(uow, states)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
# this is handled by SaveUpdateAll and DeleteAll,
|
||||
# since a ProcessAll should unconditionally be pulled
|
||||
# into per-state if either the parent/child mappers
|
||||
# are part of a cycle
|
||||
return iter([])
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, delete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
self.delete
|
||||
)
|
||||
|
||||
def _elements(self, uow):
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper]:
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if isdelete == self.delete and not listonly:
|
||||
yield state
|
||||
|
||||
|
||||
class IssuePostUpdate(PostSortRec):
|
||||
def __init__(self, uow, mapper, isdelete):
|
||||
self.mapper = mapper
|
||||
self.isdelete = isdelete
|
||||
|
||||
def execute(self, uow):
|
||||
states, cols = uow.post_update_states[self.mapper]
|
||||
states = [s for s in states if uow.states[s][0] == self.isdelete]
|
||||
|
||||
persistence.post_update(self.mapper, states, uow, cols)
|
||||
|
||||
|
||||
class SaveUpdateAll(PostSortRec):
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
def execute(self, uow):
|
||||
persistence.save_obj(self.mapper,
|
||||
uow.states_for_mapper_hierarchy(
|
||||
self.mapper, False, False),
|
||||
uow
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(uow.states_for_mapper_hierarchy(
|
||||
self.mapper, False, False))
|
||||
base_mapper = self.mapper.base_mapper
|
||||
delete_all = DeleteAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = SaveUpdateState(uow, state, base_mapper)
|
||||
uow.dependencies.add((action, delete_all))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, False)
|
||||
|
||||
|
||||
class DeleteAll(PostSortRec):
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
def execute(self, uow):
|
||||
persistence.delete_obj(self.mapper,
|
||||
uow.states_for_mapper_hierarchy(
|
||||
self.mapper, True, False),
|
||||
uow
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(uow.states_for_mapper_hierarchy(
|
||||
self.mapper, True, False))
|
||||
base_mapper = self.mapper.base_mapper
|
||||
save_all = SaveUpdateAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = DeleteState(uow, state, base_mapper)
|
||||
uow.dependencies.add((save_all, action))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, True)
|
||||
|
||||
|
||||
class ProcessState(PostSortRec):
|
||||
def __init__(self, uow, dependency_processor, delete, state):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.delete = delete
|
||||
self.state = state
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
dependency_processor = self.dependency_processor
|
||||
delete = self.delete
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.dependency_processor is dependency_processor and
|
||||
r.delete is delete]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
if delete:
|
||||
dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
dependency_processor.process_saves(uow, states)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, %s, delete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
orm_util.state_str(self.state),
|
||||
self.delete
|
||||
)
|
||||
|
||||
|
||||
class SaveUpdateState(PostSortRec):
|
||||
def __init__(self, uow, state, mapper):
|
||||
self.state = state
|
||||
self.mapper = mapper
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.mapper is mapper]
|
||||
recs.difference_update(our_recs)
|
||||
persistence.save_obj(mapper,
|
||||
[self.state] +
|
||||
[r.state for r in our_recs],
|
||||
uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state)
|
||||
)
|
||||
|
||||
|
||||
class DeleteState(PostSortRec):
|
||||
def __init__(self, uow, state, mapper):
|
||||
self.state = state
|
||||
self.mapper = mapper
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.mapper is mapper]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
persistence.delete_obj(mapper,
|
||||
[s for s in states if uow.states[s][0]],
|
||||
uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state)
|
||||
)
|
||||
964
lib/python3.4/site-packages/sqlalchemy/orm/util.py
Normal file
964
lib/python3.4/site-packages/sqlalchemy/orm/util.py
Normal file
|
|
@ -0,0 +1,964 @@
|
|||
# orm/util.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from .. import sql, util, event, exc as sa_exc, inspection
|
||||
from ..sql import expression, util as sql_util, operators
|
||||
from .interfaces import PropComparator, MapperProperty
|
||||
from . import attributes
|
||||
import re
|
||||
|
||||
from .base import instance_str, state_str, state_class_str, attribute_str, \
|
||||
state_attribute_str, object_mapper, object_state, _none_set
|
||||
from .base import class_mapper, _class_to_mapper
|
||||
from .base import _InspectionAttr
|
||||
from .path_registry import PathRegistry
|
||||
|
||||
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
|
||||
"expunge", "save-update", "refresh-expire",
|
||||
"none"))
|
||||
|
||||
|
||||
class CascadeOptions(frozenset):
|
||||
"""Keeps track of the options sent to relationship().cascade"""
|
||||
|
||||
_add_w_all_cascades = all_cascades.difference([
|
||||
'all', 'none', 'delete-orphan'])
|
||||
_allowed_cascades = all_cascades
|
||||
|
||||
def __new__(cls, arg):
|
||||
values = set([
|
||||
c for c
|
||||
in re.split('\s*,\s*', arg or "")
|
||||
if c
|
||||
])
|
||||
|
||||
if values.difference(cls._allowed_cascades):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Invalid cascade option(s): %s" %
|
||||
", ".join([repr(x) for x in
|
||||
sorted(
|
||||
values.difference(cls._allowed_cascades)
|
||||
)])
|
||||
)
|
||||
|
||||
if "all" in values:
|
||||
values.update(cls._add_w_all_cascades)
|
||||
if "none" in values:
|
||||
values.clear()
|
||||
values.discard('all')
|
||||
|
||||
self = frozenset.__new__(CascadeOptions, values)
|
||||
self.save_update = 'save-update' in values
|
||||
self.delete = 'delete' in values
|
||||
self.refresh_expire = 'refresh-expire' in values
|
||||
self.merge = 'merge' in values
|
||||
self.expunge = 'expunge' in values
|
||||
self.delete_orphan = "delete-orphan" in values
|
||||
|
||||
if self.delete_orphan and not self.delete:
|
||||
util.warn("The 'delete-orphan' cascade "
|
||||
"option requires 'delete'.")
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "CascadeOptions(%r)" % (
|
||||
",".join([x for x in sorted(self)])
|
||||
)
|
||||
|
||||
|
||||
def _validator_events(
|
||||
desc, key, validator, include_removes, include_backrefs):
|
||||
"""Runs a validation method on an attribute value to be set or
|
||||
appended.
|
||||
"""
|
||||
|
||||
if not include_backrefs:
|
||||
def detect_is_backref(state, initiator):
|
||||
impl = state.manager[key].impl
|
||||
return initiator.impl is not impl
|
||||
|
||||
if include_removes:
|
||||
def append(state, value, initiator):
|
||||
if include_backrefs or not detect_is_backref(state, initiator):
|
||||
return validator(state.obj(), key, value, False)
|
||||
else:
|
||||
return value
|
||||
|
||||
def set_(state, value, oldvalue, initiator):
|
||||
if include_backrefs or not detect_is_backref(state, initiator):
|
||||
return validator(state.obj(), key, value, False)
|
||||
else:
|
||||
return value
|
||||
|
||||
def remove(state, value, initiator):
|
||||
if include_backrefs or not detect_is_backref(state, initiator):
|
||||
validator(state.obj(), key, value, True)
|
||||
|
||||
else:
|
||||
def append(state, value, initiator):
|
||||
if include_backrefs or not detect_is_backref(state, initiator):
|
||||
return validator(state.obj(), key, value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def set_(state, value, oldvalue, initiator):
|
||||
if include_backrefs or not detect_is_backref(state, initiator):
|
||||
return validator(state.obj(), key, value)
|
||||
else:
|
||||
return value
|
||||
|
||||
event.listen(desc, 'append', append, raw=True, retval=True)
|
||||
event.listen(desc, 'set', set_, raw=True, retval=True)
|
||||
if include_removes:
|
||||
event.listen(desc, "remove", remove, raw=True, retval=True)
|
||||
|
||||
|
||||
def polymorphic_union(table_map, typecolname,
|
||||
aliasname='p_union', cast_nulls=True):
|
||||
"""Create a ``UNION`` statement used by a polymorphic mapper.
|
||||
|
||||
See :ref:`concrete_inheritance` for an example of how
|
||||
this is used.
|
||||
|
||||
:param table_map: mapping of polymorphic identities to
|
||||
:class:`.Table` objects.
|
||||
:param typecolname: string name of a "discriminator" column, which will be
|
||||
derived from the query, producing the polymorphic identity for
|
||||
each row. If ``None``, no polymorphic discriminator is generated.
|
||||
:param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()`
|
||||
construct generated.
|
||||
:param cast_nulls: if True, non-existent columns, which are represented
|
||||
as labeled NULLs, will be passed into CAST. This is a legacy behavior
|
||||
that is problematic on some backends such as Oracle - in which case it
|
||||
can be set to False.
|
||||
|
||||
"""
|
||||
|
||||
colnames = util.OrderedSet()
|
||||
colnamemaps = {}
|
||||
types = {}
|
||||
for key in table_map:
|
||||
table = table_map[key]
|
||||
|
||||
# mysql doesn't like selecting from a select;
|
||||
# make it an alias of the select
|
||||
if isinstance(table, sql.Select):
|
||||
table = table.alias()
|
||||
table_map[key] = table
|
||||
|
||||
m = {}
|
||||
for c in table.c:
|
||||
colnames.add(c.key)
|
||||
m[c.key] = c
|
||||
types[c.key] = c.type
|
||||
colnamemaps[table] = m
|
||||
|
||||
def col(name, table):
|
||||
try:
|
||||
return colnamemaps[table][name]
|
||||
except KeyError:
|
||||
if cast_nulls:
|
||||
return sql.cast(sql.null(), types[name]).label(name)
|
||||
else:
|
||||
return sql.type_coerce(sql.null(), types[name]).label(name)
|
||||
|
||||
result = []
|
||||
for type, table in table_map.items():
|
||||
if typecolname is not None:
|
||||
result.append(
|
||||
sql.select([col(name, table) for name in colnames] +
|
||||
[sql.literal_column(
|
||||
sql_util._quote_ddl_expr(type)).
|
||||
label(typecolname)],
|
||||
from_obj=[table]))
|
||||
else:
|
||||
result.append(sql.select([col(name, table) for name in colnames],
|
||||
from_obj=[table]))
|
||||
return sql.union_all(*result).alias(aliasname)
|
||||
|
||||
|
||||
def identity_key(*args, **kwargs):
|
||||
"""Generate "identity key" tuples, as are used as keys in the
|
||||
:attr:`.Session.identity_map` dictionary.
|
||||
|
||||
This function has several call styles:
|
||||
|
||||
* ``identity_key(class, ident)``
|
||||
|
||||
This form receives a mapped class and a primary key scalar or
|
||||
tuple as an argument.
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> identity_key(MyClass, (1, 2))
|
||||
(<class '__main__.MyClass'>, (1, 2))
|
||||
|
||||
:param class: mapped class (must be a positional argument)
|
||||
:param ident: primary key, may be a scalar or tuple argument.
|
||||
|
||||
|
||||
* ``identity_key(instance=instance)``
|
||||
|
||||
This form will produce the identity key for a given instance. The
|
||||
instance need not be persistent, only that its primary key attributes
|
||||
are populated (else the key will contain ``None`` for those missing
|
||||
values).
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> instance = MyClass(1, 2)
|
||||
>>> identity_key(instance=instance)
|
||||
(<class '__main__.MyClass'>, (1, 2))
|
||||
|
||||
In this form, the given instance is ultimately run though
|
||||
:meth:`.Mapper.identity_key_from_instance`, which will have the
|
||||
effect of performing a database check for the corresponding row
|
||||
if the object is expired.
|
||||
|
||||
:param instance: object instance (must be given as a keyword arg)
|
||||
|
||||
* ``identity_key(class, row=row)``
|
||||
|
||||
This form is similar to the class/tuple form, except is passed a
|
||||
database result row as a :class:`.RowProxy` object.
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> row = engine.execute("select * from table where a=1 and b=2").\
|
||||
first()
|
||||
>>> identity_key(MyClass, row=row)
|
||||
(<class '__main__.MyClass'>, (1, 2))
|
||||
|
||||
:param class: mapped class (must be a positional argument)
|
||||
:param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy`
|
||||
(must be given as a keyword arg)
|
||||
|
||||
"""
|
||||
if args:
|
||||
if len(args) == 1:
|
||||
class_ = args[0]
|
||||
try:
|
||||
row = kwargs.pop("row")
|
||||
except KeyError:
|
||||
ident = kwargs.pop("ident")
|
||||
elif len(args) == 2:
|
||||
class_, ident = args
|
||||
elif len(args) == 3:
|
||||
class_, ident = args
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"expected up to three positional arguments, "
|
||||
"got %s" % len(args))
|
||||
if kwargs:
|
||||
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
|
||||
% ", ".join(kwargs))
|
||||
mapper = class_mapper(class_)
|
||||
if "ident" in locals():
|
||||
return mapper.identity_key_from_primary_key(util.to_list(ident))
|
||||
return mapper.identity_key_from_row(row)
|
||||
instance = kwargs.pop("instance")
|
||||
if kwargs:
|
||||
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
|
||||
% ", ".join(kwargs.keys))
|
||||
mapper = object_mapper(instance)
|
||||
return mapper.identity_key_from_instance(instance)
|
||||
|
||||
|
||||
class ORMAdapter(sql_util.ColumnAdapter):
|
||||
"""Extends ColumnAdapter to accept ORM entities.
|
||||
|
||||
The selectable is extracted from the given entity,
|
||||
and the AliasedClass if any is referenced.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, entity, equivalents=None, adapt_required=False,
|
||||
chain_to=None):
|
||||
info = inspection.inspect(entity)
|
||||
|
||||
self.mapper = info.mapper
|
||||
selectable = info.selectable
|
||||
is_aliased_class = info.is_aliased_class
|
||||
if is_aliased_class:
|
||||
self.aliased_class = entity
|
||||
else:
|
||||
self.aliased_class = None
|
||||
sql_util.ColumnAdapter.__init__(self, selectable,
|
||||
equivalents, chain_to,
|
||||
adapt_required=adapt_required)
|
||||
|
||||
def replace(self, elem):
|
||||
entity = elem._annotations.get('parentmapper', None)
|
||||
if not entity or entity.isa(self.mapper):
|
||||
return sql_util.ColumnAdapter.replace(self, elem)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class AliasedClass(object):
|
||||
"""Represents an "aliased" form of a mapped class for usage with Query.
|
||||
|
||||
The ORM equivalent of a :func:`sqlalchemy.sql.expression.alias`
|
||||
construct, this object mimics the mapped class using a
|
||||
__getattr__ scheme and maintains a reference to a
|
||||
real :class:`~sqlalchemy.sql.expression.Alias` object.
|
||||
|
||||
Usage is via the :func:`.orm.aliased` function, or alternatively
|
||||
via the :func:`.orm.with_polymorphic` function.
|
||||
|
||||
Usage example::
|
||||
|
||||
# find all pairs of users with the same name
|
||||
user_alias = aliased(User)
|
||||
session.query(User, user_alias).\\
|
||||
join((user_alias, User.id > user_alias.id)).\\
|
||||
filter(User.name==user_alias.name)
|
||||
|
||||
The resulting object is an instance of :class:`.AliasedClass`.
|
||||
This object implements an attribute scheme which produces the
|
||||
same attribute and method interface as the original mapped
|
||||
class, allowing :class:`.AliasedClass` to be compatible
|
||||
with any attribute technique which works on the original class,
|
||||
including hybrid attributes (see :ref:`hybrids_toplevel`).
|
||||
|
||||
The :class:`.AliasedClass` can be inspected for its underlying
|
||||
:class:`.Mapper`, aliased selectable, and other information
|
||||
using :func:`.inspect`::
|
||||
|
||||
from sqlalchemy import inspect
|
||||
my_alias = aliased(MyClass)
|
||||
insp = inspect(my_alias)
|
||||
|
||||
The resulting inspection object is an instance of :class:`.AliasedInsp`.
|
||||
|
||||
See :func:`.aliased` and :func:`.with_polymorphic` for construction
|
||||
argument descriptions.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cls, alias=None,
|
||||
name=None,
|
||||
flat=False,
|
||||
adapt_on_names=False,
|
||||
# TODO: None for default here?
|
||||
with_polymorphic_mappers=(),
|
||||
with_polymorphic_discriminator=None,
|
||||
base_alias=None,
|
||||
use_mapper_path=False):
|
||||
mapper = _class_to_mapper(cls)
|
||||
if alias is None:
|
||||
alias = mapper._with_polymorphic_selectable.alias(
|
||||
name=name, flat=flat)
|
||||
self._aliased_insp = AliasedInsp(
|
||||
self,
|
||||
mapper,
|
||||
alias,
|
||||
name,
|
||||
with_polymorphic_mappers
|
||||
if with_polymorphic_mappers
|
||||
else mapper.with_polymorphic_mappers,
|
||||
with_polymorphic_discriminator
|
||||
if with_polymorphic_discriminator is not None
|
||||
else mapper.polymorphic_on,
|
||||
base_alias,
|
||||
use_mapper_path,
|
||||
adapt_on_names
|
||||
)
|
||||
|
||||
self.__name__ = 'AliasedClass_%s' % mapper.class_.__name__
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
_aliased_insp = self.__dict__['_aliased_insp']
|
||||
except KeyError:
|
||||
raise AttributeError()
|
||||
else:
|
||||
for base in _aliased_insp._target.__mro__:
|
||||
try:
|
||||
attr = object.__getattribute__(base, key)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise AttributeError(key)
|
||||
|
||||
if isinstance(attr, PropComparator):
|
||||
ret = attr.adapt_to_entity(_aliased_insp)
|
||||
setattr(self, key, ret)
|
||||
return ret
|
||||
elif hasattr(attr, 'func_code'):
|
||||
is_method = getattr(_aliased_insp._target, key, None)
|
||||
if is_method and is_method.__self__ is not None:
|
||||
return util.types.MethodType(attr.__func__, self, self)
|
||||
else:
|
||||
return None
|
||||
elif hasattr(attr, '__get__'):
|
||||
ret = attr.__get__(None, self)
|
||||
if isinstance(ret, PropComparator):
|
||||
return ret.adapt_to_entity(_aliased_insp)
|
||||
else:
|
||||
return ret
|
||||
else:
|
||||
return attr
|
||||
|
||||
def __repr__(self):
|
||||
return '<AliasedClass at 0x%x; %s>' % (
|
||||
id(self), self._aliased_insp._target.__name__)
|
||||
|
||||
|
||||
class AliasedInsp(_InspectionAttr):
|
||||
"""Provide an inspection interface for an
|
||||
:class:`.AliasedClass` object.
|
||||
|
||||
The :class:`.AliasedInsp` object is returned
|
||||
given an :class:`.AliasedClass` using the
|
||||
:func:`.inspect` function::
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
my_alias = aliased(MyMappedClass)
|
||||
insp = inspect(my_alias)
|
||||
|
||||
Attributes on :class:`.AliasedInsp`
|
||||
include:
|
||||
|
||||
* ``entity`` - the :class:`.AliasedClass` represented.
|
||||
* ``mapper`` - the :class:`.Mapper` mapping the underlying class.
|
||||
* ``selectable`` - the :class:`.Alias` construct which ultimately
|
||||
represents an aliased :class:`.Table` or :class:`.Select`
|
||||
construct.
|
||||
* ``name`` - the name of the alias. Also is used as the attribute
|
||||
name when returned in a result tuple from :class:`.Query`.
|
||||
* ``with_polymorphic_mappers`` - collection of :class:`.Mapper` objects
|
||||
indicating all those mappers expressed in the select construct
|
||||
for the :class:`.AliasedClass`.
|
||||
* ``polymorphic_on`` - an alternate column or SQL expression which
|
||||
will be used as the "discriminator" for a polymorphic load.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inspection_toplevel`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, entity, mapper, selectable, name,
|
||||
with_polymorphic_mappers, polymorphic_on,
|
||||
_base_alias, _use_mapper_path, adapt_on_names):
|
||||
self.entity = entity
|
||||
self.mapper = mapper
|
||||
self.selectable = selectable
|
||||
self.name = name
|
||||
self.with_polymorphic_mappers = with_polymorphic_mappers
|
||||
self.polymorphic_on = polymorphic_on
|
||||
self._base_alias = _base_alias or self
|
||||
self._use_mapper_path = _use_mapper_path
|
||||
|
||||
self._adapter = sql_util.ClauseAdapter(
|
||||
selectable, equivalents=mapper._equivalent_columns,
|
||||
adapt_on_names=adapt_on_names)
|
||||
|
||||
self._adapt_on_names = adapt_on_names
|
||||
self._target = mapper.class_
|
||||
|
||||
for poly in self.with_polymorphic_mappers:
|
||||
if poly is not mapper:
|
||||
setattr(self.entity, poly.class_.__name__,
|
||||
AliasedClass(poly.class_, selectable, base_alias=self,
|
||||
adapt_on_names=adapt_on_names,
|
||||
use_mapper_path=_use_mapper_path))
|
||||
|
||||
is_aliased_class = True
|
||||
"always returns True"
|
||||
|
||||
@property
|
||||
def class_(self):
|
||||
"""Return the mapped class ultimately represented by this
|
||||
:class:`.AliasedInsp`."""
|
||||
return self.mapper.class_
|
||||
|
||||
@util.memoized_property
|
||||
def _path_registry(self):
|
||||
if self._use_mapper_path:
|
||||
return self.mapper._path_registry
|
||||
else:
|
||||
return PathRegistry.per_mapper(self)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'entity': self.entity,
|
||||
'mapper': self.mapper,
|
||||
'alias': self.selectable,
|
||||
'name': self.name,
|
||||
'adapt_on_names': self._adapt_on_names,
|
||||
'with_polymorphic_mappers':
|
||||
self.with_polymorphic_mappers,
|
||||
'with_polymorphic_discriminator':
|
||||
self.polymorphic_on,
|
||||
'base_alias': self._base_alias,
|
||||
'use_mapper_path': self._use_mapper_path
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__init__(
|
||||
state['entity'],
|
||||
state['mapper'],
|
||||
state['alias'],
|
||||
state['name'],
|
||||
state['with_polymorphic_mappers'],
|
||||
state['with_polymorphic_discriminator'],
|
||||
state['base_alias'],
|
||||
state['use_mapper_path'],
|
||||
state['adapt_on_names']
|
||||
)
|
||||
|
||||
def _adapt_element(self, elem):
|
||||
return self._adapter.traverse(elem).\
|
||||
_annotate({
|
||||
'parententity': self.entity,
|
||||
'parentmapper': self.mapper}
|
||||
)
|
||||
|
||||
def _entity_for_mapper(self, mapper):
|
||||
self_poly = self.with_polymorphic_mappers
|
||||
if mapper in self_poly:
|
||||
return getattr(self.entity, mapper.class_.__name__)._aliased_insp
|
||||
elif mapper.isa(self.mapper):
|
||||
return self
|
||||
else:
|
||||
assert False, "mapper %s doesn't correspond to %s" % (
|
||||
mapper, self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<AliasedInsp at 0x%x; %s>' % (
|
||||
id(self), self.class_.__name__)
|
||||
|
||||
|
||||
inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
|
||||
inspection._inspects(AliasedInsp)(lambda target: target)
|
||||
|
||||
|
||||
def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
|
||||
"""Produce an alias of the given element, usually an :class:`.AliasedClass`
|
||||
instance.
|
||||
|
||||
E.g.::
|
||||
|
||||
my_alias = aliased(MyClass)
|
||||
|
||||
session.query(MyClass, my_alias).filter(MyClass.id > my_alias.id)
|
||||
|
||||
The :func:`.aliased` function is used to create an ad-hoc mapping
|
||||
of a mapped class to a new selectable. By default, a selectable
|
||||
is generated from the normally mapped selectable (typically a
|
||||
:class:`.Table`) using the :meth:`.FromClause.alias` method.
|
||||
However, :func:`.aliased` can also be used to link the class to
|
||||
a new :func:`.select` statement. Also, the :func:`.with_polymorphic`
|
||||
function is a variant of :func:`.aliased` that is intended to specify
|
||||
a so-called "polymorphic selectable", that corresponds to the union
|
||||
of several joined-inheritance subclasses at once.
|
||||
|
||||
For convenience, the :func:`.aliased` function also accepts plain
|
||||
:class:`.FromClause` constructs, such as a :class:`.Table` or
|
||||
:func:`.select` construct. In those cases, the :meth:`.FromClause.alias`
|
||||
method is called on the object and the new :class:`.Alias` object
|
||||
returned. The returned :class:`.Alias` is not ORM-mapped in this case.
|
||||
|
||||
:param element: element to be aliased. Is normally a mapped class,
|
||||
but for convenience can also be a :class:`.FromClause` element.
|
||||
|
||||
:param alias: Optional selectable unit to map the element to. This should
|
||||
normally be a :class:`.Alias` object corresponding to the :class:`.Table`
|
||||
to which the class is mapped, or to a :func:`.select` construct that
|
||||
is compatible with the mapping. By default, a simple anonymous
|
||||
alias of the mapped table is generated.
|
||||
|
||||
:param name: optional string name to use for the alias, if not specified
|
||||
by the ``alias`` parameter. The name, among other things, forms the
|
||||
attribute name that will be accessible via tuples returned by a
|
||||
:class:`.Query` object.
|
||||
|
||||
:param flat: Boolean, will be passed through to the
|
||||
:meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects
|
||||
don't include an enclosing SELECT. This can lead to more efficient
|
||||
queries in many circumstances. A JOIN against a nested JOIN will be
|
||||
rewritten as a JOIN against an aliased SELECT subquery on backends that
|
||||
don't support this syntax.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso:: :meth:`.Join.alias`
|
||||
|
||||
:param adapt_on_names: if True, more liberal "matching" will be used when
|
||||
mapping the mapped columns of the ORM entity to those of the
|
||||
given selectable - a name-based match will be performed if the
|
||||
given selectable doesn't otherwise have a column that corresponds
|
||||
to one on the entity. The use case for this is when associating
|
||||
an entity with some derived selectable such as one that uses
|
||||
aggregate functions::
|
||||
|
||||
class UnitPrice(Base):
|
||||
__tablename__ = 'unit_price'
|
||||
...
|
||||
unit_id = Column(Integer)
|
||||
price = Column(Numeric)
|
||||
|
||||
aggregated_unit_price = Session.query(
|
||||
func.sum(UnitPrice.price).label('price')
|
||||
).group_by(UnitPrice.unit_id).subquery()
|
||||
|
||||
aggregated_unit_price = aliased(UnitPrice,
|
||||
alias=aggregated_unit_price, adapt_on_names=True)
|
||||
|
||||
Above, functions on ``aggregated_unit_price`` which refer to
|
||||
``.price`` will return the
|
||||
``fund.sum(UnitPrice.price).label('price')`` column, as it is
|
||||
matched on the name "price". Ordinarily, the "price" function
|
||||
wouldn't have any "column correspondence" to the actual
|
||||
``UnitPrice.price`` column as it is not a proxy of the original.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
|
||||
|
||||
"""
|
||||
if isinstance(element, expression.FromClause):
|
||||
if adapt_on_names:
|
||||
raise sa_exc.ArgumentError(
|
||||
"adapt_on_names only applies to ORM elements"
|
||||
)
|
||||
return element.alias(name, flat=flat)
|
||||
else:
|
||||
return AliasedClass(element, alias=alias, flat=flat,
|
||||
name=name, adapt_on_names=adapt_on_names)
|
||||
|
||||
|
||||
def with_polymorphic(base, classes, selectable=False,
|
||||
flat=False,
|
||||
polymorphic_on=None, aliased=False,
|
||||
innerjoin=False, _use_mapper_path=False):
|
||||
"""Produce an :class:`.AliasedClass` construct which specifies
|
||||
columns for descendant mappers of the given base.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
:func:`.orm.with_polymorphic` is in addition to the existing
|
||||
:class:`.Query` method :meth:`.Query.with_polymorphic`,
|
||||
which has the same purpose but is not as flexible in its usage.
|
||||
|
||||
Using this method will ensure that each descendant mapper's
|
||||
tables are included in the FROM clause, and will allow filter()
|
||||
criterion to be used against those tables. The resulting
|
||||
instances will also have those columns already loaded so that
|
||||
no "post fetch" of those columns will be required.
|
||||
|
||||
See the examples at :ref:`with_polymorphic`.
|
||||
|
||||
:param base: Base class to be aliased.
|
||||
|
||||
:param classes: a single class or mapper, or list of
|
||||
class/mappers, which inherit from the base class.
|
||||
Alternatively, it may also be the string ``'*'``, in which case
|
||||
all descending mapped classes will be added to the FROM clause.
|
||||
|
||||
:param aliased: when True, the selectable will be wrapped in an
|
||||
alias, that is ``(SELECT * FROM <fromclauses>) AS anon_1``.
|
||||
This can be important when using the with_polymorphic()
|
||||
to create the target of a JOIN on a backend that does not
|
||||
support parenthesized joins, such as SQLite and older
|
||||
versions of MySQL.
|
||||
|
||||
:param flat: Boolean, will be passed through to the
|
||||
:meth:`.FromClause.alias` call so that aliases of :class:`.Join`
|
||||
objects don't include an enclosing SELECT. This can lead to more
|
||||
efficient queries in many circumstances. A JOIN against a nested JOIN
|
||||
will be rewritten as a JOIN against an aliased SELECT subquery on
|
||||
backends that don't support this syntax.
|
||||
|
||||
Setting ``flat`` to ``True`` implies the ``aliased`` flag is
|
||||
also ``True``.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso:: :meth:`.Join.alias`
|
||||
|
||||
:param selectable: a table or select() statement that will
|
||||
be used in place of the generated FROM clause. This argument is
|
||||
required if any of the desired classes use concrete table
|
||||
inheritance, since SQLAlchemy currently cannot generate UNIONs
|
||||
among tables automatically. If used, the ``selectable`` argument
|
||||
must represent the full set of tables and columns mapped by every
|
||||
mapped class. Otherwise, the unaccounted mapped columns will
|
||||
result in their table being appended directly to the FROM clause
|
||||
which will usually lead to incorrect results.
|
||||
|
||||
:param polymorphic_on: a column to be used as the "discriminator"
|
||||
column for the given selectable. If not given, the polymorphic_on
|
||||
attribute of the base classes' mapper will be used, if any. This
|
||||
is useful for mappings that don't have polymorphic loading
|
||||
behavior by default.
|
||||
|
||||
:param innerjoin: if True, an INNER JOIN will be used. This should
|
||||
only be specified if querying for one specific subtype only
|
||||
"""
|
||||
primary_mapper = _class_to_mapper(base)
|
||||
mappers, selectable = primary_mapper.\
|
||||
_with_polymorphic_args(classes, selectable,
|
||||
innerjoin=innerjoin)
|
||||
if aliased or flat:
|
||||
selectable = selectable.alias(flat=flat)
|
||||
return AliasedClass(base,
|
||||
selectable,
|
||||
with_polymorphic_mappers=mappers,
|
||||
with_polymorphic_discriminator=polymorphic_on,
|
||||
use_mapper_path=_use_mapper_path)
|
||||
|
||||
|
||||
def _orm_annotate(element, exclude=None):
|
||||
"""Deep copy the given ClauseElement, annotating each element with the
|
||||
"_orm_adapt" flag.
|
||||
|
||||
Elements within the exclude collection will be cloned but not annotated.
|
||||
|
||||
"""
|
||||
return sql_util._deep_annotate(element, {'_orm_adapt': True}, exclude)
|
||||
|
||||
|
||||
def _orm_deannotate(element):
|
||||
"""Remove annotations that link a column to a particular mapping.
|
||||
|
||||
Note this doesn't affect "remote" and "foreign" annotations
|
||||
passed by the :func:`.orm.foreign` and :func:`.orm.remote`
|
||||
annotators.
|
||||
|
||||
"""
|
||||
|
||||
return sql_util._deep_deannotate(element,
|
||||
values=("_orm_adapt", "parententity")
|
||||
)
|
||||
|
||||
|
||||
def _orm_full_deannotate(element):
|
||||
return sql_util._deep_deannotate(element)
|
||||
|
||||
|
||||
class _ORMJoin(expression.Join):
|
||||
"""Extend Join to support ORM constructs as input."""
|
||||
|
||||
__visit_name__ = expression.Join.__visit_name__
|
||||
|
||||
def __init__(self, left, right, onclause=None, isouter=False):
|
||||
|
||||
left_info = inspection.inspect(left)
|
||||
left_orm_info = getattr(left, '_joined_from_info', left_info)
|
||||
|
||||
right_info = inspection.inspect(right)
|
||||
adapt_to = right_info.selectable
|
||||
|
||||
self._joined_from_info = right_info
|
||||
|
||||
if isinstance(onclause, util.string_types):
|
||||
onclause = getattr(left_orm_info.entity, onclause)
|
||||
|
||||
if isinstance(onclause, attributes.QueryableAttribute):
|
||||
on_selectable = onclause.comparator._source_selectable()
|
||||
prop = onclause.property
|
||||
elif isinstance(onclause, MapperProperty):
|
||||
prop = onclause
|
||||
on_selectable = prop.parent.selectable
|
||||
else:
|
||||
prop = None
|
||||
|
||||
if prop:
|
||||
if sql_util.clause_is_present(
|
||||
on_selectable, left_info.selectable):
|
||||
adapt_from = on_selectable
|
||||
else:
|
||||
adapt_from = left_info.selectable
|
||||
|
||||
pj, sj, source, dest, \
|
||||
secondary, target_adapter = prop._create_joins(
|
||||
source_selectable=adapt_from,
|
||||
dest_selectable=adapt_to,
|
||||
source_polymorphic=True,
|
||||
dest_polymorphic=True,
|
||||
of_type=right_info.mapper)
|
||||
|
||||
if sj is not None:
|
||||
if isouter:
|
||||
# note this is an inner join from secondary->right
|
||||
right = sql.join(secondary, right, sj)
|
||||
onclause = pj
|
||||
else:
|
||||
left = sql.join(left, secondary, pj, isouter)
|
||||
onclause = sj
|
||||
else:
|
||||
onclause = pj
|
||||
self._target_adapter = target_adapter
|
||||
|
||||
expression.Join.__init__(self, left, right, onclause, isouter)
|
||||
|
||||
def join(self, right, onclause=None, isouter=False, join_to_left=None):
|
||||
return _ORMJoin(self, right, onclause, isouter)
|
||||
|
||||
def outerjoin(self, right, onclause=None, join_to_left=None):
|
||||
return _ORMJoin(self, right, onclause, True)
|
||||
|
||||
|
||||
def join(left, right, onclause=None, isouter=False, join_to_left=None):
|
||||
"""Produce an inner join between left and right clauses.
|
||||
|
||||
:func:`.orm.join` is an extension to the core join interface
|
||||
provided by :func:`.sql.expression.join()`, where the
|
||||
left and right selectables may be not only core selectable
|
||||
objects such as :class:`.Table`, but also mapped classes or
|
||||
:class:`.AliasedClass` instances. The "on" clause can
|
||||
be a SQL expression, or an attribute or string name
|
||||
referencing a configured :func:`.relationship`.
|
||||
|
||||
:func:`.orm.join` is not commonly needed in modern usage,
|
||||
as its functionality is encapsulated within that of the
|
||||
:meth:`.Query.join` method, which features a
|
||||
significant amount of automation beyond :func:`.orm.join`
|
||||
by itself. Explicit usage of :func:`.orm.join`
|
||||
with :class:`.Query` involves usage of the
|
||||
:meth:`.Query.select_from` method, as in::
|
||||
|
||||
from sqlalchemy.orm import join
|
||||
session.query(User).\\
|
||||
select_from(join(User, Address, User.addresses)).\\
|
||||
filter(Address.email_address=='foo@bar.com')
|
||||
|
||||
In modern SQLAlchemy the above join can be written more
|
||||
succinctly as::
|
||||
|
||||
session.query(User).\\
|
||||
join(User.addresses).\\
|
||||
filter(Address.email_address=='foo@bar.com')
|
||||
|
||||
See :meth:`.Query.join` for information on modern usage
|
||||
of ORM level joins.
|
||||
|
||||
.. versionchanged:: 0.8.1 - the ``join_to_left`` parameter
|
||||
is no longer used, and is deprecated.
|
||||
|
||||
"""
|
||||
return _ORMJoin(left, right, onclause, isouter)
|
||||
|
||||
|
||||
def outerjoin(left, right, onclause=None, join_to_left=None):
|
||||
"""Produce a left outer join between left and right clauses.
|
||||
|
||||
This is the "outer join" version of the :func:`.orm.join` function,
|
||||
featuring the same behavior except that an OUTER JOIN is generated.
|
||||
See that function's documentation for other usage details.
|
||||
|
||||
"""
|
||||
return _ORMJoin(left, right, onclause, True)
|
||||
|
||||
|
||||
def with_parent(instance, prop):
|
||||
"""Create filtering criterion that relates this query's primary entity
|
||||
to the given related instance, using established :func:`.relationship()`
|
||||
configuration.
|
||||
|
||||
The SQL rendered is the same as that rendered when a lazy loader
|
||||
would fire off from the given parent on that attribute, meaning
|
||||
that the appropriate state is taken from the parent object in
|
||||
Python without the need to render joins to the parent table
|
||||
in the rendered statement.
|
||||
|
||||
.. versionchanged:: 0.6.4
|
||||
This method accepts parent instances in all
|
||||
persistence states, including transient, persistent, and detached.
|
||||
Only the requisite primary key/foreign key attributes need to
|
||||
be populated. Previous versions didn't work with transient
|
||||
instances.
|
||||
|
||||
:param instance:
|
||||
An instance which has some :func:`.relationship`.
|
||||
|
||||
:param property:
|
||||
String property name, or class-bound attribute, which indicates
|
||||
what relationship from the instance should be used to reconcile the
|
||||
parent/child relationship.
|
||||
|
||||
"""
|
||||
if isinstance(prop, util.string_types):
|
||||
mapper = object_mapper(instance)
|
||||
prop = getattr(mapper.class_, prop).property
|
||||
elif isinstance(prop, attributes.QueryableAttribute):
|
||||
prop = prop.property
|
||||
|
||||
return prop.compare(operators.eq,
|
||||
instance,
|
||||
value_is_parent=True)
|
||||
|
||||
|
||||
def has_identity(object):
|
||||
"""Return True if the given object has a database
|
||||
identity.
|
||||
|
||||
This typically corresponds to the object being
|
||||
in either the persistent or detached state.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.was_deleted`
|
||||
|
||||
"""
|
||||
state = attributes.instance_state(object)
|
||||
return state.has_identity
|
||||
|
||||
|
||||
def was_deleted(object):
|
||||
"""Return True if the given object was deleted
|
||||
within a session flush.
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
"""
|
||||
|
||||
state = attributes.instance_state(object)
|
||||
return state.deleted
|
||||
|
||||
|
||||
def randomize_unitofwork():
|
||||
"""Use random-ordering sets within the unit of work in order
|
||||
to detect unit of work sorting issues.
|
||||
|
||||
This is a utility function that can be used to help reproduce
|
||||
inconsistent unit of work sorting issues. For example,
|
||||
if two kinds of objects A and B are being inserted, and
|
||||
B has a foreign key reference to A - the A must be inserted first.
|
||||
However, if there is no relationship between A and B, the unit of work
|
||||
won't know to perform this sorting, and an operation may or may not
|
||||
fail, depending on how the ordering works out. Since Python sets
|
||||
and dictionaries have non-deterministic ordering, such an issue may
|
||||
occur on some runs and not on others, and in practice it tends to
|
||||
have a great dependence on the state of the interpreter. This leads
|
||||
to so-called "heisenbugs" where changing entirely irrelevant aspects
|
||||
of the test program still cause the failure behavior to change.
|
||||
|
||||
By calling ``randomize_unitofwork()`` when a script first runs, the
|
||||
ordering of a key series of sets within the unit of work implementation
|
||||
are randomized, so that the script can be minimized down to the
|
||||
fundamental mapping and operation that's failing, while still reproducing
|
||||
the issue on at least some runs.
|
||||
|
||||
This utility is also available when running the test suite via the
|
||||
``--reversetop`` flag.
|
||||
|
||||
.. versionadded:: 0.8.1 created a standalone version of the
|
||||
``--reversetop`` feature.
|
||||
|
||||
"""
|
||||
from sqlalchemy.orm import unitofwork, session, mapper, dependency
|
||||
from sqlalchemy.util import topological
|
||||
from sqlalchemy.testing.util import RandomSet
|
||||
topological.set = unitofwork.set = session.set = mapper.set = \
|
||||
dependency.set = RandomSet
|
||||
Loading…
Add table
Add a link
Reference in a new issue