run update
This commit is contained in:
parent
e85b9478a7
commit
c6f20c5f92
515 changed files with 22459 additions and 12734 deletions
275
lib/python3.7/site-packages/sqlalchemy/orm/__init__.py
Normal file
275
lib/python3.7/site-packages/sqlalchemy/orm/__init__.py
Normal file
|
|
@ -0,0 +1,275 @@
|
|||
# orm/__init__.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Functional constructs for ORM configuration.
|
||||
|
||||
See the SQLAlchemy object relational tutorial and mapper configuration
|
||||
documentation for an overview of how this module is used.
|
||||
|
||||
"""
|
||||
|
||||
from . import exc
|
||||
from .mapper import (
|
||||
Mapper,
|
||||
_mapper_registry,
|
||||
class_mapper,
|
||||
configure_mappers,
|
||||
reconstructor,
|
||||
validates
|
||||
)
|
||||
from .interfaces import (
|
||||
EXT_CONTINUE,
|
||||
EXT_STOP,
|
||||
PropComparator,
|
||||
)
|
||||
from .deprecated_interfaces import (
|
||||
MapperExtension,
|
||||
SessionExtension,
|
||||
AttributeExtension,
|
||||
)
|
||||
from .util import (
|
||||
aliased,
|
||||
join,
|
||||
object_mapper,
|
||||
outerjoin,
|
||||
polymorphic_union,
|
||||
was_deleted,
|
||||
with_parent,
|
||||
with_polymorphic,
|
||||
)
|
||||
from .properties import ColumnProperty
|
||||
from .relationships import RelationshipProperty
|
||||
from .descriptor_props import (
|
||||
ComparableProperty,
|
||||
CompositeProperty,
|
||||
SynonymProperty,
|
||||
)
|
||||
from .relationships import (
|
||||
foreign,
|
||||
remote,
|
||||
)
|
||||
from .session import (
|
||||
Session,
|
||||
object_session,
|
||||
sessionmaker,
|
||||
make_transient,
|
||||
make_transient_to_detached
|
||||
)
|
||||
from .scoping import (
|
||||
scoped_session
|
||||
)
|
||||
from . import mapper as mapperlib
|
||||
from .query import AliasOption, Query, Bundle
|
||||
from ..util.langhelpers import public_factory
|
||||
from .. import util as _sa_util
|
||||
from . import strategies as _strategies
|
||||
|
||||
|
||||
def create_session(bind=None, **kwargs):
|
||||
"""Create a new :class:`.Session`
|
||||
with no automation enabled by default.
|
||||
|
||||
This function is used primarily for testing. The usual
|
||||
route to :class:`.Session` creation is via its constructor
|
||||
or the :func:`.sessionmaker` function.
|
||||
|
||||
:param bind: optional, a single Connectable to use for all
|
||||
database access in the created
|
||||
:class:`~sqlalchemy.orm.session.Session`.
|
||||
|
||||
:param \*\*kwargs: optional, passed through to the
|
||||
:class:`.Session` constructor.
|
||||
|
||||
:returns: an :class:`~sqlalchemy.orm.session.Session` instance
|
||||
|
||||
The defaults of create_session() are the opposite of that of
|
||||
:func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are
|
||||
False, ``autocommit`` is True. In this sense the session acts
|
||||
more like the "classic" SQLAlchemy 0.3 session with these.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> from sqlalchemy.orm import create_session
|
||||
>>> session = create_session()
|
||||
|
||||
It is recommended to use :func:`sessionmaker` instead of
|
||||
create_session().
|
||||
|
||||
"""
|
||||
kwargs.setdefault('autoflush', False)
|
||||
kwargs.setdefault('autocommit', True)
|
||||
kwargs.setdefault('expire_on_commit', False)
|
||||
return Session(bind=bind, **kwargs)
|
||||
|
||||
relationship = public_factory(RelationshipProperty, ".orm.relationship")
|
||||
|
||||
|
||||
def relation(*arg, **kw):
|
||||
"""A synonym for :func:`relationship`."""
|
||||
|
||||
return relationship(*arg, **kw)
|
||||
|
||||
|
||||
def dynamic_loader(argument, **kw):
|
||||
"""Construct a dynamically-loading mapper property.
|
||||
|
||||
This is essentially the same as
|
||||
using the ``lazy='dynamic'`` argument with :func:`relationship`::
|
||||
|
||||
dynamic_loader(SomeClass)
|
||||
|
||||
# is the same as
|
||||
|
||||
relationship(SomeClass, lazy="dynamic")
|
||||
|
||||
See the section :ref:`dynamic_relationship` for more details
|
||||
on dynamic loading.
|
||||
|
||||
"""
|
||||
kw['lazy'] = 'dynamic'
|
||||
return relationship(argument, **kw)
|
||||
|
||||
|
||||
column_property = public_factory(ColumnProperty, ".orm.column_property")
|
||||
composite = public_factory(CompositeProperty, ".orm.composite")
|
||||
|
||||
|
||||
def backref(name, **kwargs):
|
||||
"""Create a back reference with explicit keyword arguments, which are the
|
||||
same arguments one can send to :func:`relationship`.
|
||||
|
||||
Used with the ``backref`` keyword argument to :func:`relationship` in
|
||||
place of a string argument, e.g.::
|
||||
|
||||
'items':relationship(
|
||||
SomeItem, backref=backref('parent', lazy='subquery'))
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`relationships_backref`
|
||||
|
||||
"""
|
||||
|
||||
return (name, kwargs)
|
||||
|
||||
|
||||
def deferred(*columns, **kw):
|
||||
"""Indicate a column-based mapped attribute that by default will
|
||||
not load unless accessed.
|
||||
|
||||
:param \*columns: columns to be mapped. This is typically a single
|
||||
:class:`.Column` object, however a collection is supported in order
|
||||
to support multiple columns mapped under the same attribute.
|
||||
|
||||
:param \**kw: additional keyword arguments passed to
|
||||
:class:`.ColumnProperty`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deferred`
|
||||
|
||||
"""
|
||||
return ColumnProperty(deferred=True, *columns, **kw)
|
||||
|
||||
|
||||
mapper = public_factory(Mapper, ".orm.mapper")
|
||||
|
||||
synonym = public_factory(SynonymProperty, ".orm.synonym")
|
||||
|
||||
comparable_property = public_factory(ComparableProperty,
|
||||
".orm.comparable_property")
|
||||
|
||||
|
||||
@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
|
||||
"is renamed to :func:`.configure_mappers`")
|
||||
def compile_mappers():
|
||||
"""Initialize the inter-mapper relationships of all mappers that have
|
||||
been defined.
|
||||
|
||||
"""
|
||||
configure_mappers()
|
||||
|
||||
|
||||
def clear_mappers():
|
||||
"""Remove all mappers from all classes.
|
||||
|
||||
This function removes all instrumentation from classes and disposes
|
||||
of their associated mappers. Once called, the classes are unmapped
|
||||
and can be later re-mapped with new mappers.
|
||||
|
||||
:func:`.clear_mappers` is *not* for normal use, as there is literally no
|
||||
valid usage for it outside of very specific testing scenarios. Normally,
|
||||
mappers are permanent structural components of user-defined classes, and
|
||||
are never discarded independently of their class. If a mapped class
|
||||
itself is garbage collected, its mapper is automatically disposed of as
|
||||
well. As such, :func:`.clear_mappers` is only for usage in test suites
|
||||
that re-use the same classes with different mappings, which is itself an
|
||||
extremely rare use case - the only such use case is in fact SQLAlchemy's
|
||||
own test suite, and possibly the test suites of other ORM extension
|
||||
libraries which intend to test various combinations of mapper construction
|
||||
upon a fixed set of classes.
|
||||
|
||||
"""
|
||||
mapperlib._CONFIGURE_MUTEX.acquire()
|
||||
try:
|
||||
while _mapper_registry:
|
||||
try:
|
||||
# can't even reliably call list(weakdict) in jython
|
||||
mapper, b = _mapper_registry.popitem()
|
||||
mapper.dispose()
|
||||
except KeyError:
|
||||
pass
|
||||
finally:
|
||||
mapperlib._CONFIGURE_MUTEX.release()
|
||||
|
||||
from . import strategy_options
|
||||
|
||||
joinedload = strategy_options.joinedload._unbound_fn
|
||||
joinedload_all = strategy_options.joinedload._unbound_all_fn
|
||||
contains_eager = strategy_options.contains_eager._unbound_fn
|
||||
defer = strategy_options.defer._unbound_fn
|
||||
undefer = strategy_options.undefer._unbound_fn
|
||||
undefer_group = strategy_options.undefer_group._unbound_fn
|
||||
load_only = strategy_options.load_only._unbound_fn
|
||||
lazyload = strategy_options.lazyload._unbound_fn
|
||||
lazyload_all = strategy_options.lazyload_all._unbound_all_fn
|
||||
subqueryload = strategy_options.subqueryload._unbound_fn
|
||||
subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
|
||||
immediateload = strategy_options.immediateload._unbound_fn
|
||||
noload = strategy_options.noload._unbound_fn
|
||||
defaultload = strategy_options.defaultload._unbound_fn
|
||||
|
||||
from .strategy_options import Load
|
||||
|
||||
|
||||
def eagerload(*args, **kwargs):
|
||||
"""A synonym for :func:`joinedload()`."""
|
||||
return joinedload(*args, **kwargs)
|
||||
|
||||
|
||||
def eagerload_all(*args, **kwargs):
|
||||
"""A synonym for :func:`joinedload_all()`"""
|
||||
return joinedload_all(*args, **kwargs)
|
||||
|
||||
|
||||
contains_alias = public_factory(AliasOption, ".orm.contains_alias")
|
||||
|
||||
|
||||
def __go(lcls):
|
||||
global __all__
|
||||
from .. import util as sa_util
|
||||
from . import dynamic
|
||||
from . import events
|
||||
import inspect as _inspect
|
||||
|
||||
__all__ = sorted(name for name, obj in lcls.items()
|
||||
if not (name.startswith('_') or _inspect.ismodule(obj)))
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.orm")
|
||||
|
||||
__go(locals())
|
||||
1598
lib/python3.7/site-packages/sqlalchemy/orm/attributes.py
Normal file
1598
lib/python3.7/site-packages/sqlalchemy/orm/attributes.py
Normal file
File diff suppressed because it is too large
Load diff
540
lib/python3.7/site-packages/sqlalchemy/orm/base.py
Normal file
540
lib/python3.7/site-packages/sqlalchemy/orm/base.py
Normal file
|
|
@ -0,0 +1,540 @@
|
|||
# orm/base.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Constants and rudimental functions used throughout the ORM.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util, inspection, exc as sa_exc
|
||||
from ..sql import expression
|
||||
from . import exc
|
||||
import operator
|
||||
|
||||
PASSIVE_NO_RESULT = util.symbol(
|
||||
'PASSIVE_NO_RESULT',
|
||||
"""Symbol returned by a loader callable or other attribute/history
|
||||
retrieval operation when a value could not be determined, based
|
||||
on loader callable flags.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_WAS_SET = util.symbol(
|
||||
'ATTR_WAS_SET',
|
||||
"""Symbol returned by a loader callable to indicate the
|
||||
retrieved value, or values, were assigned to their attributes
|
||||
on the target object.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_EMPTY = util.symbol(
|
||||
'ATTR_EMPTY',
|
||||
"""Symbol used internally to indicate an attribute had no callable."""
|
||||
)
|
||||
|
||||
NO_VALUE = util.symbol(
|
||||
'NO_VALUE',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute,
|
||||
indicating no value was loaded for an attribute when it was modified,
|
||||
and flags indicated we were not to load it.
|
||||
"""
|
||||
)
|
||||
|
||||
NEVER_SET = util.symbol(
|
||||
'NEVER_SET',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute
|
||||
indicating that the attribute had not been assigned to previously.
|
||||
"""
|
||||
)
|
||||
|
||||
NO_CHANGE = util.symbol(
|
||||
"NO_CHANGE",
|
||||
"""No callables or SQL should be emitted on attribute access
|
||||
and no state should change
|
||||
""", canonical=0
|
||||
)
|
||||
|
||||
CALLABLES_OK = util.symbol(
|
||||
"CALLABLES_OK",
|
||||
"""Loader callables can be fired off if a value
|
||||
is not present.
|
||||
""", canonical=1
|
||||
)
|
||||
|
||||
SQL_OK = util.symbol(
|
||||
"SQL_OK",
|
||||
"""Loader callables can emit SQL at least on scalar value attributes.""",
|
||||
canonical=2
|
||||
)
|
||||
|
||||
RELATED_OBJECT_OK = util.symbol(
|
||||
"RELATED_OBJECT_OK",
|
||||
"""Callables can use SQL to load related objects as well
|
||||
as scalar value attributes.
|
||||
""", canonical=4
|
||||
)
|
||||
|
||||
INIT_OK = util.symbol(
|
||||
"INIT_OK",
|
||||
"""Attributes should be initialized with a blank
|
||||
value (None or an empty collection) upon get, if no other
|
||||
value can be obtained.
|
||||
""", canonical=8
|
||||
)
|
||||
|
||||
NON_PERSISTENT_OK = util.symbol(
|
||||
"NON_PERSISTENT_OK",
|
||||
"""Callables can be emitted if the parent is not persistent.""",
|
||||
canonical=16
|
||||
)
|
||||
|
||||
LOAD_AGAINST_COMMITTED = util.symbol(
|
||||
"LOAD_AGAINST_COMMITTED",
|
||||
"""Callables should use committed values as primary/foreign keys during a
|
||||
load.
|
||||
""", canonical=32
|
||||
)
|
||||
|
||||
NO_AUTOFLUSH = util.symbol(
|
||||
"NO_AUTOFLUSH",
|
||||
"""Loader callables should disable autoflush.""",
|
||||
canonical=64
|
||||
)
|
||||
|
||||
# pre-packaged sets of flags used as inputs
|
||||
PASSIVE_OFF = util.symbol(
|
||||
"PASSIVE_OFF",
|
||||
"Callables can be emitted in all cases.",
|
||||
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
|
||||
INIT_OK | CALLABLES_OK | SQL_OK)
|
||||
)
|
||||
PASSIVE_RETURN_NEVER_SET = util.symbol(
|
||||
"PASSIVE_RETURN_NEVER_SET",
|
||||
"""PASSIVE_OFF ^ INIT_OK""",
|
||||
canonical=PASSIVE_OFF ^ INIT_OK
|
||||
)
|
||||
PASSIVE_NO_INITIALIZE = util.symbol(
|
||||
"PASSIVE_NO_INITIALIZE",
|
||||
"PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
|
||||
canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH = util.symbol(
|
||||
"PASSIVE_NO_FETCH",
|
||||
"PASSIVE_OFF ^ SQL_OK",
|
||||
canonical=PASSIVE_OFF ^ SQL_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH_RELATED = util.symbol(
|
||||
"PASSIVE_NO_FETCH_RELATED",
|
||||
"PASSIVE_OFF ^ RELATED_OBJECT_OK",
|
||||
canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
|
||||
)
|
||||
PASSIVE_ONLY_PERSISTENT = util.symbol(
|
||||
"PASSIVE_ONLY_PERSISTENT",
|
||||
"PASSIVE_OFF ^ NON_PERSISTENT_OK",
|
||||
canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
|
||||
)
|
||||
|
||||
DEFAULT_MANAGER_ATTR = '_sa_class_manager'
|
||||
DEFAULT_STATE_ATTR = '_sa_instance_state'
|
||||
_INSTRUMENTOR = ('mapper', 'instrumentor')
|
||||
|
||||
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
|
||||
EXT_STOP = util.symbol('EXT_STOP')
|
||||
|
||||
ONETOMANY = util.symbol(
|
||||
'ONETOMANY',
|
||||
"""Indicates the one-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOONE = util.symbol(
|
||||
'MANYTOONE',
|
||||
"""Indicates the many-to-one direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOMANY = util.symbol(
|
||||
'MANYTOMANY',
|
||||
"""Indicates the many-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
NOT_EXTENSION = util.symbol(
|
||||
'NOT_EXTENSION',
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
""")
|
||||
|
||||
_never_set = frozenset([NEVER_SET])
|
||||
|
||||
_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
|
||||
|
||||
_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
|
||||
|
||||
_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
|
||||
|
||||
|
||||
def _generative(*assertions):
|
||||
"""Mark a method as generative, e.g. method-chained."""
|
||||
|
||||
@util.decorator
|
||||
def generate(fn, *args, **kw):
|
||||
self = args[0]._clone()
|
||||
for assertion in assertions:
|
||||
assertion(self, fn.__name__)
|
||||
fn(self, *args[1:], **kw)
|
||||
return self
|
||||
return generate
|
||||
|
||||
|
||||
# these can be replaced by sqlalchemy.ext.instrumentation
|
||||
# if augmented class instrumentation is enabled.
|
||||
def manager_of_class(cls):
|
||||
return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
|
||||
|
||||
instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
|
||||
|
||||
instance_dict = operator.attrgetter('__dict__')
|
||||
|
||||
|
||||
def instance_str(instance):
|
||||
"""Return a string describing an instance."""
|
||||
|
||||
return state_str(instance_state(instance))
|
||||
|
||||
|
||||
def state_str(state):
|
||||
"""Return a string describing an instance via its InstanceState."""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
|
||||
|
||||
|
||||
def state_class_str(state):
|
||||
"""Return a string describing an instance's class via its
|
||||
InstanceState.
|
||||
"""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s>' % (state.class_.__name__, )
|
||||
|
||||
|
||||
def attribute_str(instance, attribute):
|
||||
return instance_str(instance) + "." + attribute
|
||||
|
||||
|
||||
def state_attribute_str(state, attribute):
|
||||
return state_str(state) + "." + attribute
|
||||
|
||||
|
||||
def object_mapper(instance):
|
||||
"""Given an object, return the primary Mapper associated with the object
|
||||
instance.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
This function is available via the inspection system as::
|
||||
|
||||
inspect(instance).mapper
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
return object_state(instance).mapper
|
||||
|
||||
|
||||
def object_state(instance):
|
||||
"""Given an object, return the :class:`.InstanceState`
|
||||
associated with the object.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(instance)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
state = _inspect_mapped_object(instance)
|
||||
if state is None:
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
else:
|
||||
return state
|
||||
|
||||
|
||||
@inspection._inspects(object)
|
||||
def _inspect_mapped_object(instance):
|
||||
try:
|
||||
return instance_state(instance)
|
||||
# TODO: whats the py-2/3 syntax to catch two
|
||||
# different kinds of exceptions at once ?
|
||||
except exc.UnmappedClassError:
|
||||
return None
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
|
||||
|
||||
def _class_to_mapper(class_or_mapper):
|
||||
insp = inspection.inspect(class_or_mapper, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
raise exc.UnmappedClassError(class_or_mapper)
|
||||
|
||||
|
||||
def _mapper_or_none(entity):
|
||||
"""Return the :class:`.Mapper` for the given class or None if the
|
||||
class is not mapped.
|
||||
"""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _is_mapped_class(entity):
|
||||
"""Return True if the given object is a mapped class,
|
||||
:class:`.Mapper`, or :class:`.AliasedClass`.
|
||||
"""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
not insp.is_clause_element and \
|
||||
(
|
||||
insp.is_mapper or insp.is_aliased_class
|
||||
)
|
||||
|
||||
|
||||
def _attr_as_key(attr):
|
||||
if hasattr(attr, 'key'):
|
||||
return attr.key
|
||||
else:
|
||||
return expression._column_as_key(attr)
|
||||
|
||||
|
||||
def _orm_columns(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
if hasattr(insp, 'selectable'):
|
||||
return [c for c in insp.selectable.c]
|
||||
else:
|
||||
return [entity]
|
||||
|
||||
|
||||
def _is_aliased_class(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
getattr(insp, "is_aliased_class", False)
|
||||
|
||||
|
||||
def _entity_descriptor(entity, key):
|
||||
"""Return a class attribute given an entity and string name.
|
||||
|
||||
May return :class:`.InstrumentedAttribute` or user-defined
|
||||
attribute.
|
||||
|
||||
"""
|
||||
insp = inspection.inspect(entity)
|
||||
if insp.is_selectable:
|
||||
description = entity
|
||||
entity = insp.c
|
||||
elif insp.is_aliased_class:
|
||||
entity = insp.entity
|
||||
description = entity
|
||||
elif hasattr(insp, "mapper"):
|
||||
description = entity = insp.mapper.class_
|
||||
else:
|
||||
description = entity
|
||||
|
||||
try:
|
||||
return getattr(entity, key)
|
||||
except AttributeError:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Entity '%s' has no property '%s'" %
|
||||
(description, key)
|
||||
)
|
||||
|
||||
_state_mapper = util.dottedgetter('manager.mapper')
|
||||
|
||||
|
||||
@inspection._inspects(type)
|
||||
def _inspect_mapped_class(class_, configure=False):
|
||||
try:
|
||||
class_manager = manager_of_class(class_)
|
||||
if not class_manager.is_mapped:
|
||||
return None
|
||||
mapper = class_manager.mapper
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
else:
|
||||
if configure and mapper._new_mappers:
|
||||
mapper._configure_all()
|
||||
return mapper
|
||||
|
||||
|
||||
def class_mapper(class_, configure=True):
|
||||
"""Given a class, return the primary :class:`.Mapper` associated
|
||||
with the key.
|
||||
|
||||
Raises :exc:`.UnmappedClassError` if no mapping is configured
|
||||
on the given class, or :exc:`.ArgumentError` if a non-class
|
||||
object is passed.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(some_mapped_class)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
|
||||
|
||||
"""
|
||||
mapper = _inspect_mapped_class(class_, configure=configure)
|
||||
if mapper is None:
|
||||
if not isinstance(class_, type):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Class object expected, got '%r'." % (class_, ))
|
||||
raise exc.UnmappedClassError(class_)
|
||||
else:
|
||||
return mapper
|
||||
|
||||
|
||||
class InspectionAttr(object):
|
||||
"""A base class applied to all ORM objects that can be returned
|
||||
by the :func:`.inspect` function.
|
||||
|
||||
The attributes defined here allow the usage of simple boolean
|
||||
checks to test basic facts about the object returned.
|
||||
|
||||
While the boolean checks here are basically the same as using
|
||||
the Python isinstance() function, the flags here can be used without
|
||||
the need to import all of these classes, and also such that
|
||||
the SQLAlchemy class system can change while leaving the flags
|
||||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of :class:`.Selectable`."""
|
||||
|
||||
is_aliased_class = False
|
||||
"""True if this object is an instance of :class:`.AliasedClass`."""
|
||||
|
||||
is_instance = False
|
||||
"""True if this object is an instance of :class:`.InstanceState`."""
|
||||
|
||||
is_mapper = False
|
||||
"""True if this object is an instance of :class:`.Mapper`."""
|
||||
|
||||
is_property = False
|
||||
"""True if this object is an instance of :class:`.MapperProperty`."""
|
||||
|
||||
is_attribute = False
|
||||
"""True if this object is a Python :term:`descriptor`.
|
||||
|
||||
This can refer to one of many types. Usually a
|
||||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`.InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
|
||||
is_clause_element = False
|
||||
"""True if this object is an instance of :class:`.ClauseElement`."""
|
||||
|
||||
extension_type = NOT_EXTENSION
|
||||
"""The extension type, if any.
|
||||
Defaults to :data:`.interfaces.NOT_EXTENSION`
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:data:`.HYBRID_METHOD`
|
||||
|
||||
:data:`.HYBRID_PROPERTY`
|
||||
|
||||
:data:`.ASSOCIATION_PROXY`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class InspectionAttrInfo(InspectionAttr):
|
||||
"""Adds the ``.info`` attribute to :class:`.InspectionAttr`.
|
||||
|
||||
The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo`
|
||||
is that the former is compatible as a mixin for classes that specify
|
||||
``__slots__``; this is essentially an implementation artifact.
|
||||
|
||||
"""
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.InspectionAttr`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
|
||||
available on extension types via the
|
||||
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
|
||||
to a wider variety of ORM and extension constructs.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
||||
class _MappedAttribute(object):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
1580
lib/python3.7/site-packages/sqlalchemy/orm/collections.py
Normal file
1580
lib/python3.7/site-packages/sqlalchemy/orm/collections.py
Normal file
File diff suppressed because it is too large
Load diff
1175
lib/python3.7/site-packages/sqlalchemy/orm/dependency.py
Normal file
1175
lib/python3.7/site-packages/sqlalchemy/orm/dependency.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,487 @@
|
|||
# orm/deprecated_interfaces.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import event, util
|
||||
from .interfaces import EXT_CONTINUE
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class MapperExtension(object):
|
||||
"""Base implementation for :class:`.Mapper` event hooks.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.MapperExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.MapperEvents`.
|
||||
|
||||
New extension classes subclass :class:`.MapperExtension` and are specified
|
||||
using the ``extension`` mapper() argument, which is a single
|
||||
:class:`.MapperExtension` or a list of such::
|
||||
|
||||
from sqlalchemy.orm.interfaces import MapperExtension
|
||||
|
||||
class MyExtension(MapperExtension):
|
||||
def before_insert(self, mapper, connection, instance):
|
||||
print "instance %s before insert !" % instance
|
||||
|
||||
m = mapper(User, users_table, extension=MyExtension())
|
||||
|
||||
A single mapper can maintain a chain of ``MapperExtension``
|
||||
objects. When a particular mapping event occurs, the
|
||||
corresponding method on each ``MapperExtension`` is invoked
|
||||
serially, and each method has the ability to halt the chain
|
||||
from proceeding further::
|
||||
|
||||
m = mapper(User, users_table, extension=[ext1, ext2, ext3])
|
||||
|
||||
Each ``MapperExtension`` method returns the symbol
|
||||
EXT_CONTINUE by default. This symbol generally means "move
|
||||
to the next ``MapperExtension`` for processing". For methods
|
||||
that return objects like translated rows or new object
|
||||
instances, EXT_CONTINUE means the result of the method
|
||||
should be ignored. In some cases it's required for a
|
||||
default mapper activity to be performed, such as adding a
|
||||
new instance to a result list.
|
||||
|
||||
The symbol EXT_STOP has significance within a chain
|
||||
of ``MapperExtension`` objects that the chain will be stopped
|
||||
when this symbol is returned. Like EXT_CONTINUE, it also
|
||||
has additional significance in some cases that a default
|
||||
mapper activity will not be performed.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_instrument_class(cls, self, listener):
|
||||
cls._adapt_listener_methods(self, listener, ('instrument_class',))
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
cls._adapt_listener_methods(
|
||||
self, listener,
|
||||
(
|
||||
'init_instance',
|
||||
'init_failed',
|
||||
'reconstruct_instance',
|
||||
'before_insert',
|
||||
'after_insert',
|
||||
'before_update',
|
||||
'after_update',
|
||||
'before_delete',
|
||||
'after_delete'
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener_methods(cls, self, listener, methods):
|
||||
|
||||
for meth in methods:
|
||||
me_meth = getattr(MapperExtension, meth)
|
||||
ls_meth = getattr(listener, meth)
|
||||
|
||||
if not util.methods_equivalent(me_meth, ls_meth):
|
||||
if meth == 'reconstruct_instance':
|
||||
def go(ls_meth):
|
||||
def reconstruct(instance, ctx):
|
||||
ls_meth(self, instance)
|
||||
return reconstruct
|
||||
event.listen(self.class_manager, 'load',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
elif meth == 'init_instance':
|
||||
def go(ls_meth):
|
||||
def init_instance(instance, args, kwargs):
|
||||
ls_meth(self, self.class_,
|
||||
self.class_manager.original_init,
|
||||
instance, args, kwargs)
|
||||
return init_instance
|
||||
event.listen(self.class_manager, 'init',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
elif meth == 'init_failed':
|
||||
def go(ls_meth):
|
||||
def init_failed(instance, args, kwargs):
|
||||
util.warn_exception(
|
||||
ls_meth, self, self.class_,
|
||||
self.class_manager.original_init,
|
||||
instance, args, kwargs)
|
||||
|
||||
return init_failed
|
||||
event.listen(self.class_manager, 'init_failure',
|
||||
go(ls_meth), raw=False, propagate=True)
|
||||
else:
|
||||
event.listen(self, "%s" % meth, ls_meth,
|
||||
raw=False, retval=True, propagate=True)
|
||||
|
||||
def instrument_class(self, mapper, class_):
|
||||
"""Receive a class when the mapper is first constructed, and has
|
||||
applied instrumentation to the mapped class.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
|
||||
"""Receive an instance when its constructor is called.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
|
||||
"""Receive an instance when its constructor has been called,
|
||||
and raised an exception.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def reconstruct_instance(self, mapper, instance):
|
||||
"""Receive an object instance after it has been created via
|
||||
``__new__``, and after initial attribute population has
|
||||
occurred.
|
||||
|
||||
This typically occurs when the instance is created based on
|
||||
incoming result rows, and is only called once for that
|
||||
instance's lifetime.
|
||||
|
||||
Note that during a result-row load, this method is called upon
|
||||
the first row received for this instance. Note that some
|
||||
attributes and collections may or may not be loaded or even
|
||||
initialized, depending on what's present in the result rows.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_insert(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is inserted
|
||||
into its table.
|
||||
|
||||
This is a good place to set up primary key values and such
|
||||
that aren't handled otherwise.
|
||||
|
||||
Column-based attributes can be modified within this method
|
||||
which will result in the new value being inserted. However
|
||||
*no* changes to the overall flush plan can be made, and
|
||||
manipulation of the ``Session`` will not have the desired effect.
|
||||
To manipulate the ``Session`` within an extension, use
|
||||
``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_insert(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is inserted.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_update(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is updated.
|
||||
|
||||
Note that this method is called for all instances that are marked as
|
||||
"dirty", even those which have no net changes to their column-based
|
||||
attributes. An object is marked as dirty when any of its column-based
|
||||
attributes have a "set attribute" operation called or when any of its
|
||||
collections are modified. If, at update time, no column-based
|
||||
attributes have any net changes, no UPDATE statement will be issued.
|
||||
This means that an instance being sent to before_update is *not* a
|
||||
guarantee that an UPDATE statement will be issued (although you can
|
||||
affect the outcome here).
|
||||
|
||||
To detect if the column-based attributes on the object have net
|
||||
changes, and will therefore generate an UPDATE statement, use
|
||||
``object_session(instance).is_modified(instance,
|
||||
include_collections=False)``.
|
||||
|
||||
Column-based attributes can be modified within this method
|
||||
which will result in the new value being updated. However
|
||||
*no* changes to the overall flush plan can be made, and
|
||||
manipulation of the ``Session`` will not have the desired effect.
|
||||
To manipulate the ``Session`` within an extension, use
|
||||
``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_update(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is updated.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def before_delete(self, mapper, connection, instance):
|
||||
"""Receive an object instance before that instance is deleted.
|
||||
|
||||
Note that *no* changes to the overall flush plan can be made
|
||||
here; and manipulation of the ``Session`` will not have the
|
||||
desired effect. To manipulate the ``Session`` within an
|
||||
extension, use ``SessionExtension``.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def after_delete(self, mapper, connection, instance):
|
||||
"""Receive an object instance after that instance is deleted.
|
||||
|
||||
The return value is only significant within the ``MapperExtension``
|
||||
chain; the parent mapper's behavior isn't modified by this method.
|
||||
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class SessionExtension(object):
|
||||
|
||||
"""Base implementation for :class:`.Session` event hooks.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.SessionExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.SessionEvents`.
|
||||
|
||||
Subclasses may be installed into a :class:`.Session` (or
|
||||
:class:`.sessionmaker`) using the ``extension`` keyword
|
||||
argument::
|
||||
|
||||
from sqlalchemy.orm.interfaces import SessionExtension
|
||||
|
||||
class MySessionExtension(SessionExtension):
|
||||
def before_commit(self, session):
|
||||
print "before commit!"
|
||||
|
||||
Session = sessionmaker(extension=MySessionExtension())
|
||||
|
||||
The same :class:`.SessionExtension` instance can be used
|
||||
with any number of sessions.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
for meth in [
|
||||
'before_commit',
|
||||
'after_commit',
|
||||
'after_rollback',
|
||||
'before_flush',
|
||||
'after_flush',
|
||||
'after_flush_postexec',
|
||||
'after_begin',
|
||||
'after_attach',
|
||||
'after_bulk_update',
|
||||
'after_bulk_delete',
|
||||
]:
|
||||
me_meth = getattr(SessionExtension, meth)
|
||||
ls_meth = getattr(listener, meth)
|
||||
|
||||
if not util.methods_equivalent(me_meth, ls_meth):
|
||||
event.listen(self, meth, getattr(listener, meth))
|
||||
|
||||
def before_commit(self, session):
|
||||
"""Execute right before commit is called.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def after_commit(self, session):
|
||||
"""Execute after a commit has occurred.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def after_rollback(self, session):
|
||||
"""Execute after a rollback has occurred.
|
||||
|
||||
Note that this may not be per-flush if a longer running
|
||||
transaction is ongoing."""
|
||||
|
||||
def before_flush(self, session, flush_context, instances):
|
||||
"""Execute before flush process has started.
|
||||
|
||||
`instances` is an optional list of objects which were passed to
|
||||
the ``flush()`` method. """
|
||||
|
||||
def after_flush(self, session, flush_context):
|
||||
"""Execute after flush has completed, but before commit has been
|
||||
called.
|
||||
|
||||
Note that the session's state is still in pre-flush, i.e. 'new',
|
||||
'dirty', and 'deleted' lists still show pre-flush state as well
|
||||
as the history settings on instance attributes."""
|
||||
|
||||
def after_flush_postexec(self, session, flush_context):
|
||||
"""Execute after flush has completed, and after the post-exec
|
||||
state occurs.
|
||||
|
||||
This will be when the 'new', 'dirty', and 'deleted' lists are in
|
||||
their final state. An actual commit() may or may not have
|
||||
occurred, depending on whether or not the flush started its own
|
||||
transaction or participated in a larger transaction. """
|
||||
|
||||
def after_begin(self, session, transaction, connection):
|
||||
"""Execute after a transaction is begun on a connection
|
||||
|
||||
`transaction` is the SessionTransaction. This method is called
|
||||
after an engine level transaction is begun on a connection. """
|
||||
|
||||
def after_attach(self, session, instance):
|
||||
"""Execute after an instance is attached to a session.
|
||||
|
||||
This is called after an add, delete or merge. """
|
||||
|
||||
def after_bulk_update(self, session, query, query_context, result):
|
||||
"""Execute after a bulk update operation to the session.
|
||||
|
||||
This is called after a session.query(...).update()
|
||||
|
||||
`query` is the query object that this update operation was
|
||||
called on. `query_context` was the query context object.
|
||||
`result` is the result object returned from the bulk operation.
|
||||
"""
|
||||
|
||||
def after_bulk_delete(self, session, query, query_context, result):
|
||||
"""Execute after a bulk delete operation to the session.
|
||||
|
||||
This is called after a session.query(...).delete()
|
||||
|
||||
`query` is the query object that this delete operation was
|
||||
called on. `query_context` was the query context object.
|
||||
`result` is the result object returned from the bulk operation.
|
||||
"""
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class AttributeExtension(object):
|
||||
"""Base implementation for :class:`.AttributeImpl` event hooks, events
|
||||
that fire upon attribute mutations in user code.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.AttributeExtension` is deprecated. Please
|
||||
refer to :func:`.event.listen` as well as
|
||||
:class:`.AttributeEvents`.
|
||||
|
||||
:class:`.AttributeExtension` is used to listen for set,
|
||||
remove, and append events on individual mapped attributes.
|
||||
It is established on an individual mapped attribute using
|
||||
the `extension` argument, available on
|
||||
:func:`.column_property`, :func:`.relationship`, and
|
||||
others::
|
||||
|
||||
from sqlalchemy.orm.interfaces import AttributeExtension
|
||||
from sqlalchemy.orm import mapper, relationship, column_property
|
||||
|
||||
class MyAttrExt(AttributeExtension):
|
||||
def append(self, state, value, initiator):
|
||||
print "append event !"
|
||||
return value
|
||||
|
||||
def set(self, state, value, oldvalue, initiator):
|
||||
print "set event !"
|
||||
return value
|
||||
|
||||
mapper(SomeClass, sometable, properties={
|
||||
'foo':column_property(sometable.c.foo, extension=MyAttrExt()),
|
||||
'bar':relationship(Bar, extension=MyAttrExt())
|
||||
})
|
||||
|
||||
Note that the :class:`.AttributeExtension` methods
|
||||
:meth:`~.AttributeExtension.append` and
|
||||
:meth:`~.AttributeExtension.set` need to return the
|
||||
``value`` parameter. The returned value is used as the
|
||||
effective value, and allows the extension to change what is
|
||||
ultimately persisted.
|
||||
|
||||
AttributeExtension is assembled within the descriptors associated
|
||||
with a mapped class.
|
||||
|
||||
"""
|
||||
|
||||
active_history = True
|
||||
"""indicates that the set() method would like to receive the 'old' value,
|
||||
even if it means firing lazy callables.
|
||||
|
||||
Note that ``active_history`` can also be set directly via
|
||||
:func:`.column_property` and :func:`.relationship`.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _adapt_listener(cls, self, listener):
|
||||
event.listen(self, 'append', listener.append,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
event.listen(self, 'remove', listener.remove,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
event.listen(self, 'set', listener.set,
|
||||
active_history=listener.active_history,
|
||||
raw=True, retval=True)
|
||||
|
||||
def append(self, state, value, initiator):
|
||||
"""Receive a collection append event.
|
||||
|
||||
The returned value will be used as the actual value to be
|
||||
appended.
|
||||
|
||||
"""
|
||||
return value
|
||||
|
||||
def remove(self, state, value, initiator):
|
||||
"""Receive a remove event.
|
||||
|
||||
No return value is defined.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def set(self, state, value, oldvalue, initiator):
|
||||
"""Receive a set event.
|
||||
|
||||
The returned value will be used as the actual value to be
|
||||
set.
|
||||
|
||||
"""
|
||||
return value
|
||||
699
lib/python3.7/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
699
lib/python3.7/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
|
|
@ -0,0 +1,699 @@
|
|||
# orm/descriptor_props.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Descriptor properties are more "auxiliary" properties
|
||||
that exist as configurational elements, but don't participate
|
||||
as actively in the load/persist ORM loop.
|
||||
|
||||
"""
|
||||
|
||||
from .interfaces import MapperProperty, PropComparator
|
||||
from .util import _none_set
|
||||
from . import attributes
|
||||
from .. import util, sql, exc as sa_exc, event, schema
|
||||
from ..sql import expression
|
||||
from . import properties
|
||||
from . import query
|
||||
|
||||
|
||||
class DescriptorProperty(MapperProperty):
|
||||
""":class:`.MapperProperty` which proxies access to a
|
||||
user-defined descriptor."""
|
||||
|
||||
doc = None
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
prop = self
|
||||
|
||||
class _ProxyImpl(object):
|
||||
accepts_scalar_loader = False
|
||||
expire_missing = True
|
||||
collection = False
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
if hasattr(prop, 'get_history'):
|
||||
def get_history(self, state, dict_,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
return prop.get_history(state, dict_, passive)
|
||||
|
||||
if self.descriptor is None:
|
||||
desc = getattr(mapper.class_, self.key, None)
|
||||
if mapper._is_userland_descriptor(desc):
|
||||
self.descriptor = desc
|
||||
|
||||
if self.descriptor is None:
|
||||
def fset(obj, value):
|
||||
setattr(obj, self.name, value)
|
||||
|
||||
def fdel(obj):
|
||||
delattr(obj, self.name)
|
||||
|
||||
def fget(obj):
|
||||
return getattr(obj, self.name)
|
||||
|
||||
self.descriptor = property(
|
||||
fget=fget,
|
||||
fset=fset,
|
||||
fdel=fdel,
|
||||
)
|
||||
|
||||
proxy_attr = attributes.create_proxied_attribute(
|
||||
self.descriptor)(
|
||||
self.parent.class_,
|
||||
self.key,
|
||||
self.descriptor,
|
||||
lambda: self._comparator_factory(mapper),
|
||||
doc=self.doc,
|
||||
original_property=self
|
||||
)
|
||||
proxy_attr.impl = _ProxyImpl(self.key)
|
||||
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class CompositeProperty(DescriptorProperty):
|
||||
"""Defines a "composite" mapped attribute, representing a collection
|
||||
of columns as one attribute.
|
||||
|
||||
:class:`.CompositeProperty` is constructed using the :func:`.composite`
|
||||
function.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mapper_composite`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, class_, *attrs, **kwargs):
|
||||
"""Return a composite column-based property for use with a Mapper.
|
||||
|
||||
See the mapping documentation section :ref:`mapper_composite` for a
|
||||
full usage example.
|
||||
|
||||
The :class:`.MapperProperty` returned by :func:`.composite`
|
||||
is the :class:`.CompositeProperty`.
|
||||
|
||||
:param class\_:
|
||||
The "composite type" class.
|
||||
|
||||
:param \*cols:
|
||||
List of Column objects to be mapped.
|
||||
|
||||
:param active_history=False:
|
||||
When ``True``, indicates that the "previous" value for a
|
||||
scalar attribute should be loaded when replaced, if not
|
||||
already loaded. See the same flag on :func:`.column_property`.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
This flag specifically becomes meaningful
|
||||
- previously it was a placeholder.
|
||||
|
||||
:param group:
|
||||
A group name for this property when marked as deferred.
|
||||
|
||||
:param deferred:
|
||||
When True, the column property is "deferred", meaning that it does
|
||||
not load immediately, and is instead loaded when the attribute is
|
||||
first accessed on an instance. See also
|
||||
:func:`~sqlalchemy.orm.deferred`.
|
||||
|
||||
:param comparator_factory: a class which extends
|
||||
:class:`.CompositeProperty.Comparator` which provides custom SQL
|
||||
clause generation for comparison operations.
|
||||
|
||||
:param doc:
|
||||
optional string that will be applied as the doc on the
|
||||
class-bound descriptor.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.MapperProperty.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
:param extension:
|
||||
an :class:`.AttributeExtension` instance,
|
||||
or list of extensions, which will be prepended to the list of
|
||||
attribute listeners for the resulting descriptor placed on the
|
||||
class. **Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
super(CompositeProperty, self).__init__()
|
||||
|
||||
self.attrs = attrs
|
||||
self.composite_class = class_
|
||||
self.active_history = kwargs.get('active_history', False)
|
||||
self.deferred = kwargs.get('deferred', False)
|
||||
self.group = kwargs.get('group', None)
|
||||
self.comparator_factory = kwargs.pop('comparator_factory',
|
||||
self.__class__.Comparator)
|
||||
if 'info' in kwargs:
|
||||
self.info = kwargs.pop('info')
|
||||
|
||||
util.set_creation_order(self)
|
||||
self._create_descriptor()
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
super(CompositeProperty, self).instrument_class(mapper)
|
||||
self._setup_event_handlers()
|
||||
|
||||
def do_init(self):
|
||||
"""Initialization which occurs after the :class:`.CompositeProperty`
|
||||
has been associated with its parent mapper.
|
||||
|
||||
"""
|
||||
self._setup_arguments_on_columns()
|
||||
|
||||
def _create_descriptor(self):
|
||||
"""Create the Python descriptor that will serve as
|
||||
the access point on instances of the mapped class.
|
||||
|
||||
"""
|
||||
|
||||
def fget(instance):
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
|
||||
if self.key not in dict_:
|
||||
# key not present. Iterate through related
|
||||
# attributes, retrieve their values. This
|
||||
# ensures they all load.
|
||||
values = [
|
||||
getattr(instance, key)
|
||||
for key in self._attribute_keys
|
||||
]
|
||||
|
||||
# current expected behavior here is that the composite is
|
||||
# created on access if the object is persistent or if
|
||||
# col attributes have non-None. This would be better
|
||||
# if the composite were created unconditionally,
|
||||
# but that would be a behavioral change.
|
||||
if self.key not in dict_ and (
|
||||
state.key is not None or
|
||||
not _none_set.issuperset(values)
|
||||
):
|
||||
dict_[self.key] = self.composite_class(*values)
|
||||
state.manager.dispatch.refresh(state, None, [self.key])
|
||||
|
||||
return dict_.get(self.key, None)
|
||||
|
||||
def fset(instance, value):
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
attr = state.manager[self.key]
|
||||
previous = dict_.get(self.key, attributes.NO_VALUE)
|
||||
for fn in attr.dispatch.set:
|
||||
value = fn(state, value, previous, attr.impl)
|
||||
dict_[self.key] = value
|
||||
if value is None:
|
||||
for key in self._attribute_keys:
|
||||
setattr(instance, key, None)
|
||||
else:
|
||||
for key, value in zip(
|
||||
self._attribute_keys,
|
||||
value.__composite_values__()):
|
||||
setattr(instance, key, value)
|
||||
|
||||
def fdel(instance):
|
||||
state = attributes.instance_state(instance)
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
previous = dict_.pop(self.key, attributes.NO_VALUE)
|
||||
attr = state.manager[self.key]
|
||||
attr.dispatch.remove(state, previous, attr.impl)
|
||||
for key in self._attribute_keys:
|
||||
setattr(instance, key, None)
|
||||
|
||||
self.descriptor = property(fget, fset, fdel)
|
||||
|
||||
@util.memoized_property
|
||||
def _comparable_elements(self):
|
||||
return [
|
||||
getattr(self.parent.class_, prop.key)
|
||||
for prop in self.props
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def props(self):
|
||||
props = []
|
||||
for attr in self.attrs:
|
||||
if isinstance(attr, str):
|
||||
prop = self.parent.get_property(
|
||||
attr, _configure_mappers=False)
|
||||
elif isinstance(attr, schema.Column):
|
||||
prop = self.parent._columntoproperty[attr]
|
||||
elif isinstance(attr, attributes.InstrumentedAttribute):
|
||||
prop = attr.property
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Composite expects Column objects or mapped "
|
||||
"attributes/attribute names as arguments, got: %r"
|
||||
% (attr,))
|
||||
props.append(prop)
|
||||
return props
|
||||
|
||||
@property
|
||||
def columns(self):
|
||||
return [a for a in self.attrs if isinstance(a, schema.Column)]
|
||||
|
||||
def _setup_arguments_on_columns(self):
|
||||
"""Propagate configuration arguments made on this composite
|
||||
to the target columns, for those that apply.
|
||||
|
||||
"""
|
||||
for prop in self.props:
|
||||
prop.active_history = self.active_history
|
||||
if self.deferred:
|
||||
prop.deferred = self.deferred
|
||||
prop.strategy_class = prop._strategy_lookup(
|
||||
("deferred", True),
|
||||
("instrument", True))
|
||||
prop.group = self.group
|
||||
|
||||
def _setup_event_handlers(self):
|
||||
"""Establish events that populate/expire the composite attribute."""
|
||||
|
||||
def load_handler(state, *args):
|
||||
dict_ = state.dict
|
||||
|
||||
if self.key in dict_:
|
||||
return
|
||||
|
||||
# if column elements aren't loaded, skip.
|
||||
# __get__() will initiate a load for those
|
||||
# columns
|
||||
for k in self._attribute_keys:
|
||||
if k not in dict_:
|
||||
return
|
||||
|
||||
# assert self.key not in dict_
|
||||
dict_[self.key] = self.composite_class(
|
||||
*[state.dict[key] for key in
|
||||
self._attribute_keys]
|
||||
)
|
||||
|
||||
def expire_handler(state, keys):
|
||||
if keys is None or set(self._attribute_keys).intersection(keys):
|
||||
state.dict.pop(self.key, None)
|
||||
|
||||
def insert_update_handler(mapper, connection, state):
|
||||
"""After an insert or update, some columns may be expired due
|
||||
to server side defaults, or re-populated due to client side
|
||||
defaults. Pop out the composite value here so that it
|
||||
recreates.
|
||||
|
||||
"""
|
||||
|
||||
state.dict.pop(self.key, None)
|
||||
|
||||
event.listen(self.parent, 'after_insert',
|
||||
insert_update_handler, raw=True)
|
||||
event.listen(self.parent, 'after_update',
|
||||
insert_update_handler, raw=True)
|
||||
event.listen(self.parent, 'load',
|
||||
load_handler, raw=True, propagate=True)
|
||||
event.listen(self.parent, 'refresh',
|
||||
load_handler, raw=True, propagate=True)
|
||||
event.listen(self.parent, 'expire',
|
||||
expire_handler, raw=True, propagate=True)
|
||||
|
||||
# TODO: need a deserialize hook here
|
||||
|
||||
@util.memoized_property
|
||||
def _attribute_keys(self):
|
||||
return [
|
||||
prop.key for prop in self.props
|
||||
]
|
||||
|
||||
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
"""Provided for userland code that uses attributes.get_history()."""
|
||||
|
||||
added = []
|
||||
deleted = []
|
||||
|
||||
has_history = False
|
||||
for prop in self.props:
|
||||
key = prop.key
|
||||
hist = state.manager[key].impl.get_history(state, dict_)
|
||||
if hist.has_changes():
|
||||
has_history = True
|
||||
|
||||
non_deleted = hist.non_deleted()
|
||||
if non_deleted:
|
||||
added.extend(non_deleted)
|
||||
else:
|
||||
added.append(None)
|
||||
if hist.deleted:
|
||||
deleted.extend(hist.deleted)
|
||||
else:
|
||||
deleted.append(None)
|
||||
|
||||
if has_history:
|
||||
return attributes.History(
|
||||
[self.composite_class(*added)],
|
||||
(),
|
||||
[self.composite_class(*deleted)]
|
||||
)
|
||||
else:
|
||||
return attributes.History(
|
||||
(), [self.composite_class(*added)], ()
|
||||
)
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
|
||||
class CompositeBundle(query.Bundle):
|
||||
def __init__(self, property, expr):
|
||||
self.property = property
|
||||
super(CompositeProperty.CompositeBundle, self).__init__(
|
||||
property.key, *expr)
|
||||
|
||||
def create_row_processor(self, query, procs, labels):
|
||||
def proc(row):
|
||||
return self.property.composite_class(
|
||||
*[proc(row) for proc in procs])
|
||||
return proc
|
||||
|
||||
class Comparator(PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.CompositeProperty` attributes.
|
||||
|
||||
See the example in :ref:`composite_operations` for an overview
|
||||
of usage , as well as the documentation for :class:`.PropComparator`.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.PropComparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
__hash__ = None
|
||||
|
||||
@property
|
||||
def clauses(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def __clause_element__(self):
|
||||
return expression.ClauseList(
|
||||
group=False, *self._comparable_elements)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return CompositeProperty.CompositeBundle(
|
||||
self.prop, self.__clause_element__())
|
||||
|
||||
@util.memoized_property
|
||||
def _comparable_elements(self):
|
||||
if self._adapt_to_entity:
|
||||
return [
|
||||
getattr(
|
||||
self._adapt_to_entity.entity,
|
||||
prop.key
|
||||
) for prop in self.prop._comparable_elements
|
||||
]
|
||||
else:
|
||||
return self.prop._comparable_elements
|
||||
|
||||
def __eq__(self, other):
|
||||
if other is None:
|
||||
values = [None] * len(self.prop._comparable_elements)
|
||||
else:
|
||||
values = other.__composite_values__()
|
||||
comparisons = [
|
||||
a == b
|
||||
for a, b in zip(self.prop._comparable_elements, values)
|
||||
]
|
||||
if self._adapt_to_entity:
|
||||
comparisons = [self.adapter(x) for x in comparisons]
|
||||
return sql.and_(*comparisons)
|
||||
|
||||
def __ne__(self, other):
|
||||
return sql.not_(self.__eq__(other))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ConcreteInheritedProperty(DescriptorProperty):
|
||||
"""A 'do nothing' :class:`.MapperProperty` that disables
|
||||
an attribute on a concrete subclass that is only present
|
||||
on the inherited mapper, not the concrete classes' mapper.
|
||||
|
||||
Cases where this occurs include:
|
||||
|
||||
* When the superclass mapper is mapped against a
|
||||
"polymorphic union", which includes all attributes from
|
||||
all subclasses.
|
||||
* When a relationship() is configured on an inherited mapper,
|
||||
but not on the subclass mapper. Concrete mappers require
|
||||
that relationship() is configured explicitly on each
|
||||
subclass.
|
||||
|
||||
"""
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
comparator_callable = None
|
||||
|
||||
for m in self.parent.iterate_to_root():
|
||||
p = m._props[self.key]
|
||||
if not isinstance(p, ConcreteInheritedProperty):
|
||||
comparator_callable = p.comparator_factory
|
||||
break
|
||||
return comparator_callable
|
||||
|
||||
def __init__(self):
|
||||
super(ConcreteInheritedProperty, self).__init__()
|
||||
def warn():
|
||||
raise AttributeError("Concrete %s does not implement "
|
||||
"attribute %r at the instance level. Add "
|
||||
"this property explicitly to %s." %
|
||||
(self.parent, self.key, self.parent))
|
||||
|
||||
class NoninheritedConcreteProp(object):
|
||||
def __set__(s, obj, value):
|
||||
warn()
|
||||
|
||||
def __delete__(s, obj):
|
||||
warn()
|
||||
|
||||
def __get__(s, obj, owner):
|
||||
if obj is None:
|
||||
return self.descriptor
|
||||
warn()
|
||||
self.descriptor = NoninheritedConcreteProp()
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class SynonymProperty(DescriptorProperty):
|
||||
|
||||
def __init__(self, name, map_column=None,
|
||||
descriptor=None, comparator_factory=None,
|
||||
doc=None, info=None):
|
||||
"""Denote an attribute name as a synonym to a mapped property,
|
||||
in that the attribute will mirror the value and expression behavior
|
||||
of another attribute.
|
||||
|
||||
:param name: the name of the existing mapped property. This
|
||||
can refer to the string name of any :class:`.MapperProperty`
|
||||
configured on the class, including column-bound attributes
|
||||
and relationships.
|
||||
|
||||
:param descriptor: a Python :term:`descriptor` that will be used
|
||||
as a getter (and potentially a setter) when this attribute is
|
||||
accessed at the instance level.
|
||||
|
||||
:param map_column: if ``True``, the :func:`.synonym` construct will
|
||||
locate the existing named :class:`.MapperProperty` based on the
|
||||
attribute name of this :func:`.synonym`, and assign it to a new
|
||||
attribute linked to the name of this :func:`.synonym`.
|
||||
That is, given a mapping like::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_status = Column(String(50))
|
||||
|
||||
job_status = synonym("_job_status", map_column=True)
|
||||
|
||||
The above class ``MyClass`` will now have the ``job_status``
|
||||
:class:`.Column` object mapped to the attribute named
|
||||
``_job_status``, and the attribute named ``job_status`` will refer
|
||||
to the synonym itself. This feature is typically used in
|
||||
conjunction with the ``descriptor`` argument in order to link a
|
||||
user-defined descriptor as a "wrapper" for an existing column.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.InspectionAttr.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param comparator_factory: A subclass of :class:`.PropComparator`
|
||||
that will provide custom comparison behavior at the SQL expression
|
||||
level.
|
||||
|
||||
.. note::
|
||||
|
||||
For the use case of providing an attribute which redefines both
|
||||
Python-level and SQL-expression level behavior of an attribute,
|
||||
please refer to the Hybrid attribute introduced at
|
||||
:ref:`mapper_hybrids` for a more effective technique.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`synonyms` - examples of functionality.
|
||||
|
||||
:ref:`mapper_hybrids` - Hybrids provide a better approach for
|
||||
more complicated attribute-wrapping schemes than synonyms.
|
||||
|
||||
"""
|
||||
super(SynonymProperty, self).__init__()
|
||||
|
||||
self.name = name
|
||||
self.map_column = map_column
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
if info:
|
||||
self.info = info
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
# TODO: when initialized, check _proxied_property,
|
||||
# emit a warning if its not a column-based property
|
||||
|
||||
@util.memoized_property
|
||||
def _proxied_property(self):
|
||||
return getattr(self.parent.class_, self.name).property
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
prop = self._proxied_property
|
||||
|
||||
if self.comparator_factory:
|
||||
comp = self.comparator_factory(prop, mapper)
|
||||
else:
|
||||
comp = prop.comparator_factory(prop, mapper)
|
||||
return comp
|
||||
|
||||
def set_parent(self, parent, init):
|
||||
if self.map_column:
|
||||
# implement the 'map_column' option.
|
||||
if self.key not in parent.mapped_table.c:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't compile synonym '%s': no column on table "
|
||||
"'%s' named '%s'"
|
||||
% (self.name, parent.mapped_table.description, self.key))
|
||||
elif parent.mapped_table.c[self.key] in \
|
||||
parent._columntoproperty and \
|
||||
parent._columntoproperty[
|
||||
parent.mapped_table.c[self.key]
|
||||
].key == self.name:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't call map_column=True for synonym %r=%r, "
|
||||
"a ColumnProperty already exists keyed to the name "
|
||||
"%r for column %r" %
|
||||
(self.key, self.name, self.name, self.key)
|
||||
)
|
||||
p = properties.ColumnProperty(parent.mapped_table.c[self.key])
|
||||
parent._configure_property(
|
||||
self.name, p,
|
||||
init=init,
|
||||
setparent=True)
|
||||
p._mapped_by_synonym = self.key
|
||||
|
||||
self.parent = parent
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ComparableProperty(DescriptorProperty):
|
||||
"""Instruments a Python property for use in query expressions."""
|
||||
|
||||
def __init__(
|
||||
self, comparator_factory, descriptor=None, doc=None, info=None):
|
||||
"""Provides a method of applying a :class:`.PropComparator`
|
||||
to any Python descriptor attribute.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
:func:`.comparable_property` is superseded by
|
||||
the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
|
||||
at :ref:`hybrid_custom_comparators`.
|
||||
|
||||
Allows any Python descriptor to behave like a SQL-enabled
|
||||
attribute when used at the class level in queries, allowing
|
||||
redefinition of expression operator behavior.
|
||||
|
||||
In the example below we redefine :meth:`.PropComparator.operate`
|
||||
to wrap both sides of an expression in ``func.lower()`` to produce
|
||||
case-insensitive comparison::
|
||||
|
||||
from sqlalchemy.orm import comparable_property
|
||||
from sqlalchemy.orm.interfaces import PropComparator
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy import Integer, String, Column
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
class CaseInsensitiveComparator(PropComparator):
|
||||
def __clause_element__(self):
|
||||
return self.prop
|
||||
|
||||
def operate(self, op, other):
|
||||
return op(
|
||||
func.lower(self.__clause_element__()),
|
||||
func.lower(other)
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class SearchWord(Base):
|
||||
__tablename__ = 'search_word'
|
||||
id = Column(Integer, primary_key=True)
|
||||
word = Column(String)
|
||||
word_insensitive = comparable_property(lambda prop, mapper:
|
||||
CaseInsensitiveComparator(
|
||||
mapper.c.word, mapper)
|
||||
)
|
||||
|
||||
|
||||
A mapping like the above allows the ``word_insensitive`` attribute
|
||||
to render an expression like::
|
||||
|
||||
>>> print SearchWord.word_insensitive == "Trucks"
|
||||
lower(search_word.word) = lower(:lower_1)
|
||||
|
||||
:param comparator_factory:
|
||||
A PropComparator subclass or factory that defines operator behavior
|
||||
for this property.
|
||||
|
||||
:param descriptor:
|
||||
Optional when used in a ``properties={}`` declaration. The Python
|
||||
descriptor or property to layer comparison behavior on top of.
|
||||
|
||||
The like-named descriptor will be automatically retrieved from the
|
||||
mapped class if left blank in a ``properties`` declaration.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.InspectionAttr.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
super(ComparableProperty, self).__init__()
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
if info:
|
||||
self.info = info
|
||||
util.set_creation_order(self)
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
370
lib/python3.7/site-packages/sqlalchemy/orm/dynamic.py
Normal file
370
lib/python3.7/site-packages/sqlalchemy/orm/dynamic.py
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
# orm/dynamic.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Dynamic collection API.
|
||||
|
||||
Dynamic collections act like Query() objects for read operations and support
|
||||
basic add/delete mutation.
|
||||
|
||||
"""
|
||||
|
||||
from .. import log, util, exc
|
||||
from ..sql import operators
|
||||
from . import (
|
||||
attributes, object_session, util as orm_util, strategies,
|
||||
object_mapper, exc as orm_exc, properties
|
||||
)
|
||||
from .query import Query
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="dynamic")
|
||||
class DynaLoader(strategies.AbstractRelationshipLoader):
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
if not self.uselist:
|
||||
raise exc.InvalidRequestError(
|
||||
"On relationship %s, 'dynamic' loaders cannot be used with "
|
||||
"many-to-one/one-to-one relationships and/or "
|
||||
"uselist=False." % self.parent_property)
|
||||
strategies._register_attribute(
|
||||
self,
|
||||
mapper,
|
||||
useobject=True,
|
||||
uselist=True,
|
||||
impl_class=DynamicAttributeImpl,
|
||||
target_mapper=self.parent_property.mapper,
|
||||
order_by=self.parent_property.order_by,
|
||||
query_class=self.parent_property.query_class,
|
||||
backref=self.parent_property.back_populates,
|
||||
)
|
||||
|
||||
|
||||
class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
uses_objects = True
|
||||
accepts_scalar_loader = False
|
||||
supports_population = False
|
||||
collection = False
|
||||
|
||||
def __init__(self, class_, key, typecallable,
|
||||
dispatch,
|
||||
target_mapper, order_by, query_class=None, **kw):
|
||||
super(DynamicAttributeImpl, self).\
|
||||
__init__(class_, key, typecallable, dispatch, **kw)
|
||||
self.target_mapper = target_mapper
|
||||
self.order_by = order_by
|
||||
if not query_class:
|
||||
self.query_class = AppenderQuery
|
||||
elif AppenderMixin in query_class.mro():
|
||||
self.query_class = query_class
|
||||
else:
|
||||
self.query_class = mixin_user_query(query_class)
|
||||
|
||||
def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
if not passive & attributes.SQL_OK:
|
||||
return self._get_collection_history(
|
||||
state, attributes.PASSIVE_NO_INITIALIZE).added_items
|
||||
else:
|
||||
return self.query_class(self, state)
|
||||
|
||||
def get_collection(self, state, dict_, user_data=None,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
if not passive & attributes.SQL_OK:
|
||||
return self._get_collection_history(state,
|
||||
passive).added_items
|
||||
else:
|
||||
history = self._get_collection_history(state, passive)
|
||||
return history.added_plus_unchanged
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return attributes.Event(self, attributes.OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return attributes.Event(self, attributes.OP_REMOVE)
|
||||
|
||||
def fire_append_event(self, state, dict_, value, initiator,
|
||||
collection_history=None):
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_added(value)
|
||||
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, True)
|
||||
|
||||
def fire_remove_event(self, state, dict_, value, initiator,
|
||||
collection_history=None):
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_removed(value)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
|
||||
def _modified_event(self, state, dict_):
|
||||
|
||||
if self.key not in state.committed_state:
|
||||
state.committed_state[self.key] = CollectionHistory(self, state)
|
||||
|
||||
state._modified_event(dict_,
|
||||
self,
|
||||
attributes.NEVER_SET)
|
||||
|
||||
# this is a hack to allow the fixtures.ComparableEntity fixture
|
||||
# to work
|
||||
dict_[self.key] = True
|
||||
return state.committed_state[self.key]
|
||||
|
||||
def set(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF,
|
||||
check_old=None, pop=False):
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
if pop and value is None:
|
||||
return
|
||||
self._set_iterable(state, dict_, value)
|
||||
|
||||
def _set_iterable(self, state, dict_, iterable, adapter=None):
|
||||
new_values = list(iterable)
|
||||
if state.has_identity:
|
||||
old_collection = util.IdentitySet(self.get(state, dict_))
|
||||
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
if not state.has_identity:
|
||||
old_collection = collection_history.added_items
|
||||
else:
|
||||
old_collection = old_collection.union(
|
||||
collection_history.added_items)
|
||||
|
||||
idset = util.IdentitySet
|
||||
constants = old_collection.intersection(new_values)
|
||||
additions = idset(new_values).difference(constants)
|
||||
removals = old_collection.difference(constants)
|
||||
|
||||
for member in new_values:
|
||||
if member in additions:
|
||||
self.fire_append_event(state, dict_, member, None,
|
||||
collection_history=collection_history)
|
||||
|
||||
for member in removals:
|
||||
self.fire_remove_event(state, dict_, member, None,
|
||||
collection_history=collection_history)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_committed_value(self, state, dict_, value):
|
||||
raise NotImplementedError("Dynamic attributes don't support "
|
||||
"collection population.")
|
||||
|
||||
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
|
||||
c = self._get_collection_history(state, passive)
|
||||
return c.as_history()
|
||||
|
||||
def get_all_pending(self, state, dict_,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
c = self._get_collection_history(
|
||||
state, passive)
|
||||
return [
|
||||
(attributes.instance_state(x), x)
|
||||
for x in
|
||||
c.all_items
|
||||
]
|
||||
|
||||
def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
|
||||
if self.key in state.committed_state:
|
||||
c = state.committed_state[self.key]
|
||||
else:
|
||||
c = CollectionHistory(self, state)
|
||||
|
||||
if state.has_identity and (passive & attributes.INIT_OK):
|
||||
return CollectionHistory(self, state, apply_to=c)
|
||||
else:
|
||||
return c
|
||||
|
||||
def append(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
if initiator is not self:
|
||||
self.fire_append_event(state, dict_, value, initiator)
|
||||
|
||||
def remove(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
if initiator is not self:
|
||||
self.fire_remove_event(state, dict_, value, initiator)
|
||||
|
||||
def pop(self, state, dict_, value, initiator,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
self.remove(state, dict_, value, initiator, passive=passive)
|
||||
|
||||
|
||||
class AppenderMixin(object):
|
||||
query_class = None
|
||||
|
||||
def __init__(self, attr, state):
|
||||
super(AppenderMixin, self).__init__(attr.target_mapper, None)
|
||||
self.instance = instance = state.obj()
|
||||
self.attr = attr
|
||||
|
||||
mapper = object_mapper(instance)
|
||||
prop = mapper._props[self.attr.key]
|
||||
self._criterion = prop._with_parent(
|
||||
instance,
|
||||
alias_secondary=False)
|
||||
|
||||
if self.attr.order_by:
|
||||
self._order_by = self.attr.order_by
|
||||
|
||||
def session(self):
|
||||
sess = object_session(self.instance)
|
||||
if sess is not None and self.autoflush and sess.autoflush \
|
||||
and self.instance in sess:
|
||||
sess.flush()
|
||||
if not orm_util.has_identity(self.instance):
|
||||
return None
|
||||
else:
|
||||
return sess
|
||||
session = property(session, lambda s, x: None)
|
||||
|
||||
def __iter__(self):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return iter(self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).added_items)
|
||||
else:
|
||||
return iter(self._clone(sess))
|
||||
|
||||
def __getitem__(self, index):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).indexed(index)
|
||||
else:
|
||||
return self._clone(sess).__getitem__(index)
|
||||
|
||||
def count(self):
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return len(self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.PASSIVE_NO_INITIALIZE).added_items)
|
||||
else:
|
||||
return self._clone(sess).count()
|
||||
|
||||
def _clone(self, sess=None):
|
||||
# note we're returning an entirely new Query class instance
|
||||
# here without any assignment capabilities; the class of this
|
||||
# query is determined by the session.
|
||||
instance = self.instance
|
||||
if sess is None:
|
||||
sess = object_session(instance)
|
||||
if sess is None:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Parent instance %s is not bound to a Session, and no "
|
||||
"contextual session is established; lazy load operation "
|
||||
"of attribute '%s' cannot proceed" % (
|
||||
orm_util.instance_str(instance), self.attr.key))
|
||||
|
||||
if self.query_class:
|
||||
query = self.query_class(self.attr.target_mapper, session=sess)
|
||||
else:
|
||||
query = sess.query(self.attr.target_mapper)
|
||||
|
||||
query._criterion = self._criterion
|
||||
query._order_by = self._order_by
|
||||
|
||||
return query
|
||||
|
||||
def extend(self, iterator):
|
||||
for item in iterator:
|
||||
self.attr.append(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
def append(self, item):
|
||||
self.attr.append(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
def remove(self, item):
|
||||
self.attr.remove(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance), item, None)
|
||||
|
||||
|
||||
class AppenderQuery(AppenderMixin, Query):
|
||||
"""A dynamic query that supports basic collection storage operations."""
|
||||
|
||||
|
||||
def mixin_user_query(cls):
|
||||
"""Return a new class with AppenderQuery functionality layered over."""
|
||||
name = 'Appender' + cls.__name__
|
||||
return type(name, (AppenderMixin, cls), {'query_class': cls})
|
||||
|
||||
|
||||
class CollectionHistory(object):
|
||||
"""Overrides AttributeHistory to receive append/remove events directly."""
|
||||
|
||||
def __init__(self, attr, state, apply_to=None):
|
||||
if apply_to:
|
||||
coll = AppenderQuery(attr, state).autoflush(False)
|
||||
self.unchanged_items = util.OrderedIdentitySet(coll)
|
||||
self.added_items = apply_to.added_items
|
||||
self.deleted_items = apply_to.deleted_items
|
||||
self._reconcile_collection = True
|
||||
else:
|
||||
self.deleted_items = util.OrderedIdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
self.unchanged_items = util.OrderedIdentitySet()
|
||||
self._reconcile_collection = False
|
||||
|
||||
@property
|
||||
def added_plus_unchanged(self):
|
||||
return list(self.added_items.union(self.unchanged_items))
|
||||
|
||||
@property
|
||||
def all_items(self):
|
||||
return list(self.added_items.union(
|
||||
self.unchanged_items).union(self.deleted_items))
|
||||
|
||||
def as_history(self):
|
||||
if self._reconcile_collection:
|
||||
added = self.added_items.difference(self.unchanged_items)
|
||||
deleted = self.deleted_items.intersection(self.unchanged_items)
|
||||
unchanged = self.unchanged_items.difference(deleted)
|
||||
else:
|
||||
added, unchanged, deleted = self.added_items,\
|
||||
self.unchanged_items,\
|
||||
self.deleted_items
|
||||
return attributes.History(
|
||||
list(added),
|
||||
list(unchanged),
|
||||
list(deleted),
|
||||
)
|
||||
|
||||
def indexed(self, index):
|
||||
return list(self.added_items)[index]
|
||||
|
||||
def add_added(self, value):
|
||||
self.added_items.add(value)
|
||||
|
||||
def add_removed(self, value):
|
||||
if value in self.added_items:
|
||||
self.added_items.remove(value)
|
||||
else:
|
||||
self.deleted_items.add(value)
|
||||
134
lib/python3.7/site-packages/sqlalchemy/orm/evaluator.py
Normal file
134
lib/python3.7/site-packages/sqlalchemy/orm/evaluator.py
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
# orm/evaluator.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import operator
|
||||
from ..sql import operators
|
||||
|
||||
|
||||
class UnevaluatableError(Exception):
|
||||
pass
|
||||
|
||||
_straight_ops = set(getattr(operators, op)
|
||||
for op in ('add', 'mul', 'sub',
|
||||
'div',
|
||||
'mod', 'truediv',
|
||||
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
|
||||
|
||||
|
||||
_notimplemented_ops = set(getattr(operators, op)
|
||||
for op in ('like_op', 'notlike_op', 'ilike_op',
|
||||
'notilike_op', 'between_op', 'in_op',
|
||||
'notin_op', 'endswith_op', 'concat_op'))
|
||||
|
||||
|
||||
class EvaluatorCompiler(object):
|
||||
def __init__(self, target_cls=None):
|
||||
self.target_cls = target_cls
|
||||
|
||||
def process(self, clause):
|
||||
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
|
||||
if not meth:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s" % type(clause).__name__)
|
||||
return meth(clause)
|
||||
|
||||
def visit_grouping(self, clause):
|
||||
return self.process(clause.element)
|
||||
|
||||
def visit_null(self, clause):
|
||||
return lambda obj: None
|
||||
|
||||
def visit_false(self, clause):
|
||||
return lambda obj: False
|
||||
|
||||
def visit_true(self, clause):
|
||||
return lambda obj: True
|
||||
|
||||
def visit_column(self, clause):
|
||||
if 'parentmapper' in clause._annotations:
|
||||
parentmapper = clause._annotations['parentmapper']
|
||||
if self.target_cls and not issubclass(
|
||||
self.target_cls, parentmapper.class_):
|
||||
raise UnevaluatableError(
|
||||
"Can't evaluate criteria against alternate class %s" %
|
||||
parentmapper.class_
|
||||
)
|
||||
key = parentmapper._columntoproperty[clause].key
|
||||
else:
|
||||
key = clause.key
|
||||
|
||||
get_corresponding_attr = operator.attrgetter(key)
|
||||
return lambda obj: get_corresponding_attr(obj)
|
||||
|
||||
def visit_clauselist(self, clause):
|
||||
evaluators = list(map(self.process, clause.clauses))
|
||||
if clause.operator is operators.or_:
|
||||
def evaluate(obj):
|
||||
has_null = False
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if value:
|
||||
return True
|
||||
has_null = has_null or value is None
|
||||
if has_null:
|
||||
return None
|
||||
return False
|
||||
elif clause.operator is operators.and_:
|
||||
def evaluate(obj):
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if not value:
|
||||
if value is None:
|
||||
return None
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate clauselist with operator %s" %
|
||||
clause.operator)
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_binary(self, clause):
|
||||
eval_left, eval_right = list(map(self.process,
|
||||
[clause.left, clause.right]))
|
||||
operator = clause.operator
|
||||
if operator is operators.is_:
|
||||
def evaluate(obj):
|
||||
return eval_left(obj) == eval_right(obj)
|
||||
elif operator is operators.isnot:
|
||||
def evaluate(obj):
|
||||
return eval_left(obj) != eval_right(obj)
|
||||
elif operator in _straight_ops:
|
||||
def evaluate(obj):
|
||||
left_val = eval_left(obj)
|
||||
right_val = eval_right(obj)
|
||||
if left_val is None or right_val is None:
|
||||
return None
|
||||
return operator(eval_left(obj), eval_right(obj))
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s with operator %s" %
|
||||
(type(clause).__name__, clause.operator))
|
||||
return evaluate
|
||||
|
||||
def visit_unary(self, clause):
|
||||
eval_inner = self.process(clause.element)
|
||||
if clause.operator is operators.inv:
|
||||
def evaluate(obj):
|
||||
value = eval_inner(obj)
|
||||
if value is None:
|
||||
return None
|
||||
return not value
|
||||
return evaluate
|
||||
raise UnevaluatableError(
|
||||
"Cannot evaluate %s with operator %s" %
|
||||
(type(clause).__name__, clause.operator))
|
||||
|
||||
def visit_bindparam(self, clause):
|
||||
val = clause.value
|
||||
return lambda obj: val
|
||||
1801
lib/python3.7/site-packages/sqlalchemy/orm/events.py
Normal file
1801
lib/python3.7/site-packages/sqlalchemy/orm/events.py
Normal file
File diff suppressed because it is too large
Load diff
165
lib/python3.7/site-packages/sqlalchemy/orm/exc.py
Normal file
165
lib/python3.7/site-packages/sqlalchemy/orm/exc.py
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
# orm/exc.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQLAlchemy ORM exceptions."""
|
||||
from .. import exc as sa_exc, util
|
||||
|
||||
NO_STATE = (AttributeError, KeyError)
|
||||
"""Exception types that may be raised by instrumentation implementations."""
|
||||
|
||||
|
||||
class StaleDataError(sa_exc.SQLAlchemyError):
|
||||
"""An operation encountered database state that is unaccounted for.
|
||||
|
||||
Conditions which cause this to happen include:
|
||||
|
||||
* A flush may have attempted to update or delete rows
|
||||
and an unexpected number of rows were matched during
|
||||
the UPDATE or DELETE statement. Note that when
|
||||
version_id_col is used, rows in UPDATE or DELETE statements
|
||||
are also matched against the current known version
|
||||
identifier.
|
||||
|
||||
* A mapped object with version_id_col was refreshed,
|
||||
and the version number coming back from the database does
|
||||
not match that of the object itself.
|
||||
|
||||
* A object is detached from its parent object, however
|
||||
the object was previously attached to a different parent
|
||||
identity which was garbage collected, and a decision
|
||||
cannot be made if the new parent was really the most
|
||||
recent "parent".
|
||||
|
||||
.. versionadded:: 0.7.4
|
||||
|
||||
"""
|
||||
|
||||
ConcurrentModificationError = StaleDataError
|
||||
|
||||
|
||||
class FlushError(sa_exc.SQLAlchemyError):
|
||||
"""A invalid condition was detected during flush()."""
|
||||
|
||||
|
||||
class UnmappedError(sa_exc.InvalidRequestError):
|
||||
"""Base for exceptions that involve expected mappings not present."""
|
||||
|
||||
|
||||
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
|
||||
"""An operation cannot complete due to an object being garbage
|
||||
collected.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DetachedInstanceError(sa_exc.SQLAlchemyError):
|
||||
"""An attempt to access unloaded attributes on a
|
||||
mapped instance that is detached."""
|
||||
|
||||
|
||||
class UnmappedInstanceError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown instance."""
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, obj, msg=None):
|
||||
if not msg:
|
||||
try:
|
||||
base.class_mapper(type(obj))
|
||||
name = _safe_cls_name(type(obj))
|
||||
msg = ("Class %r is mapped, but this instance lacks "
|
||||
"instrumentation. This occurs when the instance"
|
||||
"is created before sqlalchemy.orm.mapper(%s) "
|
||||
"was called." % (name, name))
|
||||
except UnmappedClassError:
|
||||
msg = _default_unmapped(type(obj))
|
||||
if isinstance(obj, type):
|
||||
msg += (
|
||||
'; was a class (%s) supplied where an instance was '
|
||||
'required?' % _safe_cls_name(obj))
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedClassError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown class."""
|
||||
|
||||
def __init__(self, cls, msg=None):
|
||||
if not msg:
|
||||
msg = _default_unmapped(cls)
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class ObjectDeletedError(sa_exc.InvalidRequestError):
|
||||
"""A refresh operation failed to retrieve the database
|
||||
row corresponding to an object's known primary key identity.
|
||||
|
||||
A refresh operation proceeds when an expired attribute is
|
||||
accessed on an object, or when :meth:`.Query.get` is
|
||||
used to retrieve an object which is, upon retrieval, detected
|
||||
as expired. A SELECT is emitted for the target row
|
||||
based on primary key; if no row is returned, this
|
||||
exception is raised.
|
||||
|
||||
The true meaning of this exception is simply that
|
||||
no row exists for the primary key identifier associated
|
||||
with a persistent object. The row may have been
|
||||
deleted, or in some cases the primary key updated
|
||||
to a new value, outside of the ORM's management of the target
|
||||
object.
|
||||
|
||||
"""
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, state, msg=None):
|
||||
if not msg:
|
||||
msg = "Instance '%s' has been deleted, or its "\
|
||||
"row is otherwise not present." % base.state_str(state)
|
||||
|
||||
sa_exc.InvalidRequestError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedColumnError(sa_exc.InvalidRequestError):
|
||||
"""Mapping operation was requested on an unknown column."""
|
||||
|
||||
|
||||
class NoResultFound(sa_exc.InvalidRequestError):
|
||||
"""A database result was required but none was found."""
|
||||
|
||||
|
||||
class MultipleResultsFound(sa_exc.InvalidRequestError):
|
||||
"""A single database result was required but more than one were found."""
|
||||
|
||||
|
||||
def _safe_cls_name(cls):
|
||||
try:
|
||||
cls_name = '.'.join((cls.__module__, cls.__name__))
|
||||
except AttributeError:
|
||||
cls_name = getattr(cls, '__name__', None)
|
||||
if cls_name is None:
|
||||
cls_name = repr(cls)
|
||||
return cls_name
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def _default_unmapped(base, cls):
|
||||
try:
|
||||
mappers = base.manager_of_class(cls).mappers
|
||||
except NO_STATE:
|
||||
mappers = {}
|
||||
except TypeError:
|
||||
mappers = {}
|
||||
name = _safe_cls_name(cls)
|
||||
|
||||
if not mappers:
|
||||
return "Class '%s' is not mapped" % name
|
||||
314
lib/python3.7/site-packages/sqlalchemy/orm/identity.py
Normal file
314
lib/python3.7/site-packages/sqlalchemy/orm/identity.py
Normal file
|
|
@ -0,0 +1,314 @@
|
|||
# orm/identity.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import weakref
|
||||
from . import attributes
|
||||
from .. import util
|
||||
|
||||
|
||||
class IdentityMap(object):
|
||||
def __init__(self):
|
||||
self._dict = {}
|
||||
self._modified = set()
|
||||
self._wr = weakref.ref(self)
|
||||
|
||||
def keys(self):
|
||||
return self._dict.keys()
|
||||
|
||||
def replace(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def add(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
"""optional inlined form of add() which can assume item isn't present
|
||||
in the map"""
|
||||
self.add(state)
|
||||
|
||||
def update(self, dict):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def _manage_incoming_state(self, state):
|
||||
state._instance_dict = self._wr
|
||||
|
||||
if state.modified:
|
||||
self._modified.add(state)
|
||||
|
||||
def _manage_removed_state(self, state):
|
||||
del state._instance_dict
|
||||
if state.modified:
|
||||
self._modified.discard(state)
|
||||
|
||||
def _dirty_states(self):
|
||||
return self._modified
|
||||
|
||||
def check_modified(self):
|
||||
"""return True if any InstanceStates present have been marked
|
||||
as 'modified'.
|
||||
|
||||
"""
|
||||
return bool(self._modified)
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self
|
||||
|
||||
def popitem(self):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def pop(self, key, *args):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def __len__(self):
|
||||
return len(self._dict)
|
||||
|
||||
def copy(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def __delitem__(self, key):
|
||||
raise NotImplementedError("IdentityMap uses remove() to remove data")
|
||||
|
||||
|
||||
class WeakInstanceDict(IdentityMap):
|
||||
|
||||
def __getitem__(self, key):
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
raise KeyError(key)
|
||||
return o
|
||||
|
||||
def __contains__(self, key):
|
||||
try:
|
||||
if key in self._dict:
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
else:
|
||||
return False
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return o is not None
|
||||
|
||||
def contains_state(self, state):
|
||||
return state.key in self._dict and self._dict[state.key] is state
|
||||
|
||||
def replace(self, state):
|
||||
if state.key in self._dict:
|
||||
existing = self._dict[state.key]
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
self._dict[state.key] = state
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
key = state.key
|
||||
# inline of self.__contains__
|
||||
if key in self._dict:
|
||||
try:
|
||||
existing_state = self._dict[key]
|
||||
if existing_state is not state:
|
||||
o = existing_state.obj()
|
||||
if o is not None:
|
||||
raise AssertionError(
|
||||
"A conflicting state is already "
|
||||
"present in the identity map for key %r"
|
||||
% (key, ))
|
||||
else:
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
self._dict[key] = state
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
# inlined form of add() called by loading.py
|
||||
self._dict[key] = state
|
||||
state._instance_dict = self._wr
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key not in self._dict:
|
||||
return default
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
return default
|
||||
return o
|
||||
|
||||
def items(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
if value is not None:
|
||||
result.append((state.key, value))
|
||||
return result
|
||||
|
||||
def values(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
if value is not None:
|
||||
result.append(value)
|
||||
|
||||
return result
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
if util.py2k:
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def itervalues(self):
|
||||
return iter(self.values())
|
||||
|
||||
def all_states(self):
|
||||
if util.py2k:
|
||||
return self._dict.values()
|
||||
else:
|
||||
return list(self._dict.values())
|
||||
|
||||
def _fast_discard(self, state):
|
||||
self._dict.pop(state.key, None)
|
||||
|
||||
def discard(self, state):
|
||||
st = self._dict.pop(state.key, None)
|
||||
if st:
|
||||
assert st is state
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def safe_discard(self, state):
|
||||
if state.key in self._dict:
|
||||
st = self._dict[state.key]
|
||||
if st is state:
|
||||
self._dict.pop(state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
return 0
|
||||
|
||||
|
||||
class StrongInstanceDict(IdentityMap):
|
||||
"""A 'strong-referencing' version of the identity map.
|
||||
|
||||
.. deprecated:: this object is present in order to fulfill
|
||||
the ``weak_identity_map=False`` option of the Session.
|
||||
This option is present to allow compatibility with older applications,
|
||||
but it is recommended that strong references to objects
|
||||
be maintained by the calling application
|
||||
externally to the :class:`.Session` itself, to the degree
|
||||
that is needed by the application.
|
||||
|
||||
"""
|
||||
|
||||
if util.py2k:
|
||||
def itervalues(self):
|
||||
return self._dict.itervalues()
|
||||
|
||||
def iteritems(self):
|
||||
return self._dict.iteritems()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.dict_)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dict[key]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._dict
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._dict.get(key, default)
|
||||
|
||||
def values(self):
|
||||
return self._dict.values()
|
||||
|
||||
def items(self):
|
||||
return self._dict.items()
|
||||
|
||||
def all_states(self):
|
||||
return [attributes.instance_state(o) for o in self.values()]
|
||||
|
||||
def contains_state(self, state):
|
||||
return (
|
||||
state.key in self and
|
||||
attributes.instance_state(self[state.key]) is state)
|
||||
|
||||
def replace(self, state):
|
||||
if state.key in self._dict:
|
||||
existing = self._dict[state.key]
|
||||
existing = attributes.instance_state(existing)
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
self._dict[state.key] = state.obj()
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
if state.key in self:
|
||||
if attributes.instance_state(self._dict[state.key]) is not state:
|
||||
raise AssertionError('A conflicting state is already '
|
||||
'present in the identity map for key %r'
|
||||
% (state.key, ))
|
||||
else:
|
||||
self._dict[state.key] = state.obj()
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
# inlined form of add() called by loading.py
|
||||
self._dict[key] = state.obj()
|
||||
state._instance_dict = self._wr
|
||||
|
||||
def _fast_discard(self, state):
|
||||
self._dict.pop(state.key, None)
|
||||
|
||||
def discard(self, state):
|
||||
obj = self._dict.pop(state.key, None)
|
||||
if obj is not None:
|
||||
self._manage_removed_state(state)
|
||||
st = attributes.instance_state(obj)
|
||||
assert st is state
|
||||
|
||||
def safe_discard(self, state):
|
||||
if state.key in self._dict:
|
||||
obj = self._dict[state.key]
|
||||
st = attributes.instance_state(obj)
|
||||
if st is state:
|
||||
self._dict.pop(state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
"""prune unreferenced, non-dirty states."""
|
||||
|
||||
ref_count = len(self)
|
||||
dirty = [s.obj() for s in self.all_states() if s.modified]
|
||||
|
||||
# work around http://bugs.python.org/issue6149
|
||||
keepers = weakref.WeakValueDictionary()
|
||||
keepers.update(self)
|
||||
|
||||
self._dict.clear()
|
||||
self._dict.update(keepers)
|
||||
self.modified = bool(dirty)
|
||||
return ref_count - len(self)
|
||||
528
lib/python3.7/site-packages/sqlalchemy/orm/instrumentation.py
Normal file
528
lib/python3.7/site-packages/sqlalchemy/orm/instrumentation.py
Normal file
|
|
@ -0,0 +1,528 @@
|
|||
# orm/instrumentation.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Defines SQLAlchemy's system of class instrumentation.
|
||||
|
||||
This module is usually not directly visible to user applications, but
|
||||
defines a large part of the ORM's interactivity.
|
||||
|
||||
instrumentation.py deals with registration of end-user classes
|
||||
for state tracking. It interacts closely with state.py
|
||||
and attributes.py which establish per-instance and per-class-attribute
|
||||
instrumentation, respectively.
|
||||
|
||||
The class instrumentation system can be customized on a per-class
|
||||
or global basis using the :mod:`sqlalchemy.ext.instrumentation`
|
||||
module, which provides the means to build and specify
|
||||
alternate instrumentation forms.
|
||||
|
||||
.. versionchanged: 0.8
|
||||
The instrumentation extension system was moved out of the
|
||||
ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
|
||||
package. When that package is imported, it installs
|
||||
itself within sqlalchemy.orm so that its more comprehensive
|
||||
resolution mechanics take effect.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from . import exc, collections, interfaces, state
|
||||
from .. import util
|
||||
from . import base
|
||||
|
||||
|
||||
_memoized_key_collection = util.group_expirable_memoized_property()
|
||||
|
||||
|
||||
class ClassManager(dict):
|
||||
"""tracks state information at the class level."""
|
||||
|
||||
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
|
||||
STATE_ATTR = base.DEFAULT_STATE_ATTR
|
||||
|
||||
_state_setter = staticmethod(util.attrsetter(STATE_ATTR))
|
||||
|
||||
deferred_scalar_loader = None
|
||||
|
||||
original_init = object.__init__
|
||||
|
||||
factory = None
|
||||
|
||||
def __init__(self, class_):
|
||||
self.class_ = class_
|
||||
self.info = {}
|
||||
self.new_init = None
|
||||
self.local_attrs = {}
|
||||
self.originals = {}
|
||||
|
||||
self._bases = [mgr for mgr in [
|
||||
manager_of_class(base)
|
||||
for base in self.class_.__bases__
|
||||
if isinstance(base, type)
|
||||
] if mgr is not None]
|
||||
|
||||
for base in self._bases:
|
||||
self.update(base)
|
||||
|
||||
self.dispatch._events._new_classmanager_instance(class_, self)
|
||||
# events._InstanceEventsHold.populate(class_, self)
|
||||
|
||||
for basecls in class_.__mro__:
|
||||
mgr = manager_of_class(basecls)
|
||||
if mgr is not None:
|
||||
self.dispatch._update(mgr.dispatch)
|
||||
self.manage()
|
||||
self._instrument_init()
|
||||
|
||||
if '__del__' in class_.__dict__:
|
||||
util.warn("__del__() method on class %s will "
|
||||
"cause unreachable cycles and memory leaks, "
|
||||
"as SQLAlchemy instrumentation often creates "
|
||||
"reference cycles. Please remove this method." %
|
||||
class_)
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is self
|
||||
|
||||
@property
|
||||
def is_mapped(self):
|
||||
return 'mapper' in self.__dict__
|
||||
|
||||
@_memoized_key_collection
|
||||
def _all_key_set(self):
|
||||
return frozenset(self)
|
||||
|
||||
@_memoized_key_collection
|
||||
def _collection_impl_keys(self):
|
||||
return frozenset([
|
||||
attr.key for attr in self.values() if attr.impl.collection])
|
||||
|
||||
@_memoized_key_collection
|
||||
def _scalar_loader_impls(self):
|
||||
return frozenset([
|
||||
attr.impl for attr in
|
||||
self.values() if attr.impl.accepts_scalar_loader])
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self):
|
||||
# raises unless self.mapper has been assigned
|
||||
raise exc.UnmappedClassError(self.class_)
|
||||
|
||||
def _all_sqla_attributes(self, exclude=None):
|
||||
"""return an iterator of all classbound attributes that are
|
||||
implement :class:`.InspectionAttr`.
|
||||
|
||||
This includes :class:`.QueryableAttribute` as well as extension
|
||||
types such as :class:`.hybrid_property` and
|
||||
:class:`.AssociationProxy`.
|
||||
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = set()
|
||||
for supercls in self.class_.__mro__:
|
||||
for key in set(supercls.__dict__).difference(exclude):
|
||||
exclude.add(key)
|
||||
val = supercls.__dict__[key]
|
||||
if isinstance(val, interfaces.InspectionAttr):
|
||||
yield key, val
|
||||
|
||||
def _attr_has_impl(self, key):
|
||||
"""Return True if the given attribute is fully initialized.
|
||||
|
||||
i.e. has an impl.
|
||||
"""
|
||||
|
||||
return key in self and self[key].impl is not None
|
||||
|
||||
def _subclass_manager(self, cls):
|
||||
"""Create a new ClassManager for a subclass of this ClassManager's
|
||||
class.
|
||||
|
||||
This is called automatically when attributes are instrumented so that
|
||||
the attributes can be propagated to subclasses against their own
|
||||
class-local manager, without the need for mappers etc. to have already
|
||||
pre-configured managers for the full class hierarchy. Mappers
|
||||
can post-configure the auto-generated ClassManager when needed.
|
||||
|
||||
"""
|
||||
manager = manager_of_class(cls)
|
||||
if manager is None:
|
||||
manager = _instrumentation_factory.create_manager_for_cls(cls)
|
||||
return manager
|
||||
|
||||
def _instrument_init(self):
|
||||
# TODO: self.class_.__init__ is often the already-instrumented
|
||||
# __init__ from an instrumented superclass. We still need to make
|
||||
# our own wrapper, but it would
|
||||
# be nice to wrap the original __init__ and not our existing wrapper
|
||||
# of such, since this adds method overhead.
|
||||
self.original_init = self.class_.__init__
|
||||
self.new_init = _generate_init(self.class_, self)
|
||||
self.install_member('__init__', self.new_init)
|
||||
|
||||
def _uninstrument_init(self):
|
||||
if self.new_init:
|
||||
self.uninstall_member('__init__')
|
||||
self.new_init = None
|
||||
|
||||
@util.memoized_property
|
||||
def _state_constructor(self):
|
||||
self.dispatch.first_init(self, self.class_)
|
||||
return state.InstanceState
|
||||
|
||||
def manage(self):
|
||||
"""Mark this instance as the manager for its class."""
|
||||
|
||||
setattr(self.class_, self.MANAGER_ATTR, self)
|
||||
|
||||
def dispose(self):
|
||||
"""Dissasociate this manager from its class."""
|
||||
|
||||
delattr(self.class_, self.MANAGER_ATTR)
|
||||
|
||||
@util.hybridmethod
|
||||
def manager_getter(self):
|
||||
return _default_manager_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def state_getter(self):
|
||||
"""Return a (instance) -> InstanceState callable.
|
||||
|
||||
"state getter" callables should raise either KeyError or
|
||||
AttributeError if no InstanceState could be found for the
|
||||
instance.
|
||||
"""
|
||||
|
||||
return _default_state_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def dict_getter(self):
|
||||
return _default_dict_getter
|
||||
|
||||
def instrument_attribute(self, key, inst, propagated=False):
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't override local attr with inherited attr
|
||||
else:
|
||||
self.local_attrs[key] = inst
|
||||
self.install_descriptor(key, inst)
|
||||
_memoized_key_collection.expire_instance(self)
|
||||
self[key] = inst
|
||||
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = self._subclass_manager(cls)
|
||||
manager.instrument_attribute(key, inst, True)
|
||||
|
||||
def subclass_managers(self, recursive):
|
||||
for cls in self.class_.__subclasses__():
|
||||
mgr = manager_of_class(cls)
|
||||
if mgr is not None and mgr is not self:
|
||||
yield mgr
|
||||
if recursive:
|
||||
for m in mgr.subclass_managers(True):
|
||||
yield m
|
||||
|
||||
def post_configure_attribute(self, key):
|
||||
_instrumentation_factory.dispatch.\
|
||||
attribute_instrument(self.class_, key, self[key])
|
||||
|
||||
def uninstrument_attribute(self, key, propagated=False):
|
||||
if key not in self:
|
||||
return
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't get rid of local attr
|
||||
else:
|
||||
del self.local_attrs[key]
|
||||
self.uninstall_descriptor(key)
|
||||
_memoized_key_collection.expire_instance(self)
|
||||
del self[key]
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = manager_of_class(cls)
|
||||
if manager:
|
||||
manager.uninstrument_attribute(key, True)
|
||||
|
||||
def unregister(self):
|
||||
"""remove all instrumentation established by this ClassManager."""
|
||||
|
||||
self._uninstrument_init()
|
||||
|
||||
self.mapper = self.dispatch = None
|
||||
self.info.clear()
|
||||
|
||||
for key in list(self):
|
||||
if key in self.local_attrs:
|
||||
self.uninstrument_attribute(key)
|
||||
|
||||
def install_descriptor(self, key, inst):
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError("%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." %
|
||||
key)
|
||||
setattr(self.class_, key, inst)
|
||||
|
||||
def uninstall_descriptor(self, key):
|
||||
delattr(self.class_, key)
|
||||
|
||||
def install_member(self, key, implementation):
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError("%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." %
|
||||
key)
|
||||
self.originals.setdefault(key, getattr(self.class_, key, None))
|
||||
setattr(self.class_, key, implementation)
|
||||
|
||||
def uninstall_member(self, key):
|
||||
original = self.originals.pop(key, None)
|
||||
if original is not None:
|
||||
setattr(self.class_, key, original)
|
||||
|
||||
def instrument_collection_class(self, key, collection_class):
|
||||
return collections.prepare_instrumentation(collection_class)
|
||||
|
||||
def initialize_collection(self, key, state, factory):
|
||||
user_data = factory()
|
||||
adapter = collections.CollectionAdapter(
|
||||
self.get_impl(key), state, user_data)
|
||||
return adapter, user_data
|
||||
|
||||
def is_instrumented(self, key, search=False):
|
||||
if search:
|
||||
return key in self
|
||||
else:
|
||||
return key in self.local_attrs
|
||||
|
||||
def get_impl(self, key):
|
||||
return self[key].impl
|
||||
|
||||
@property
|
||||
def attributes(self):
|
||||
return iter(self.values())
|
||||
|
||||
# InstanceState management
|
||||
|
||||
def new_instance(self, state=None):
|
||||
instance = self.class_.__new__(self.class_)
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
return instance
|
||||
|
||||
def setup_instance(self, instance, state=None):
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
|
||||
def teardown_instance(self, instance):
|
||||
delattr(instance, self.STATE_ATTR)
|
||||
|
||||
def _serialize(self, state, state_dict):
|
||||
return _SerializeManager(state, state_dict)
|
||||
|
||||
def _new_state_if_none(self, instance):
|
||||
"""Install a default InstanceState if none is present.
|
||||
|
||||
A private convenience method used by the __init__ decorator.
|
||||
|
||||
"""
|
||||
if hasattr(instance, self.STATE_ATTR):
|
||||
return False
|
||||
elif self.class_ is not instance.__class__ and \
|
||||
self.is_mapped:
|
||||
# this will create a new ClassManager for the
|
||||
# subclass, without a mapper. This is likely a
|
||||
# user error situation but allow the object
|
||||
# to be constructed, so that it is usable
|
||||
# in a non-ORM context at least.
|
||||
return self._subclass_manager(instance.__class__).\
|
||||
_new_state_if_none(instance)
|
||||
else:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
return state
|
||||
|
||||
def has_state(self, instance):
|
||||
return hasattr(instance, self.STATE_ATTR)
|
||||
|
||||
def has_parent(self, state, key, optimistic=False):
|
||||
"""TODO"""
|
||||
return self.get_impl(key).hasparent(state, optimistic=optimistic)
|
||||
|
||||
def __bool__(self):
|
||||
"""All ClassManagers are non-zero regardless of attribute state."""
|
||||
return True
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s of %r at %x>' % (
|
||||
self.__class__.__name__, self.class_, id(self))
|
||||
|
||||
|
||||
class _SerializeManager(object):
|
||||
"""Provide serialization of a :class:`.ClassManager`.
|
||||
|
||||
The :class:`.InstanceState` uses ``__init__()`` on serialize
|
||||
and ``__call__()`` on deserialize.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, state, d):
|
||||
self.class_ = state.class_
|
||||
manager = state.manager
|
||||
manager.dispatch.pickle(state, d)
|
||||
|
||||
def __call__(self, state, inst, state_dict):
|
||||
state.manager = manager = manager_of_class(self.class_)
|
||||
if manager is None:
|
||||
raise exc.UnmappedInstanceError(
|
||||
inst,
|
||||
"Cannot deserialize object of type %r - "
|
||||
"no mapper() has "
|
||||
"been configured for this class within the current "
|
||||
"Python process!" %
|
||||
self.class_)
|
||||
elif manager.is_mapped and not manager.mapper.configured:
|
||||
manager.mapper._configure_all()
|
||||
|
||||
# setup _sa_instance_state ahead of time so that
|
||||
# unpickle events can access the object normally.
|
||||
# see [ticket:2362]
|
||||
if inst is not None:
|
||||
manager.setup_instance(inst, state)
|
||||
manager.dispatch.unpickle(state, state_dict)
|
||||
|
||||
|
||||
class InstrumentationFactory(object):
|
||||
"""Factory for new ClassManager instances."""
|
||||
|
||||
def create_manager_for_cls(self, class_):
|
||||
assert class_ is not None
|
||||
assert manager_of_class(class_) is None
|
||||
|
||||
# give a more complicated subclass
|
||||
# a chance to do what it wants here
|
||||
manager, factory = self._locate_extended_factory(class_)
|
||||
|
||||
if factory is None:
|
||||
factory = ClassManager
|
||||
manager = factory(class_)
|
||||
|
||||
self._check_conflicts(class_, factory)
|
||||
|
||||
manager.factory = factory
|
||||
|
||||
self.dispatch.class_instrument(class_)
|
||||
return manager
|
||||
|
||||
def _locate_extended_factory(self, class_):
|
||||
"""Overridden by a subclass to do an extended lookup."""
|
||||
return None, None
|
||||
|
||||
def _check_conflicts(self, class_, factory):
|
||||
"""Overridden by a subclass to test for conflicting factories."""
|
||||
return
|
||||
|
||||
def unregister(self, class_):
|
||||
manager = manager_of_class(class_)
|
||||
manager.unregister()
|
||||
manager.dispose()
|
||||
self.dispatch.class_uninstrument(class_)
|
||||
if ClassManager.MANAGER_ATTR in class_.__dict__:
|
||||
delattr(class_, ClassManager.MANAGER_ATTR)
|
||||
|
||||
# this attribute is replaced by sqlalchemy.ext.instrumentation
|
||||
# when importred.
|
||||
_instrumentation_factory = InstrumentationFactory()
|
||||
|
||||
# these attributes are replaced by sqlalchemy.ext.instrumentation
|
||||
# when a non-standard InstrumentationManager class is first
|
||||
# used to instrument a class.
|
||||
instance_state = _default_state_getter = base.instance_state
|
||||
|
||||
instance_dict = _default_dict_getter = base.instance_dict
|
||||
|
||||
manager_of_class = _default_manager_getter = base.manager_of_class
|
||||
|
||||
|
||||
def register_class(class_):
|
||||
"""Register class instrumentation.
|
||||
|
||||
Returns the existing or newly created class manager.
|
||||
|
||||
"""
|
||||
|
||||
manager = manager_of_class(class_)
|
||||
if manager is None:
|
||||
manager = _instrumentation_factory.create_manager_for_cls(class_)
|
||||
return manager
|
||||
|
||||
|
||||
def unregister_class(class_):
|
||||
"""Unregister class instrumentation."""
|
||||
|
||||
_instrumentation_factory.unregister(class_)
|
||||
|
||||
|
||||
def is_instrumented(instance, key):
|
||||
"""Return True if the given attribute on the given instance is
|
||||
instrumented by the attributes package.
|
||||
|
||||
This function may be used regardless of instrumentation
|
||||
applied directly to the class, i.e. no descriptors are required.
|
||||
|
||||
"""
|
||||
return manager_of_class(instance.__class__).\
|
||||
is_instrumented(key, search=True)
|
||||
|
||||
|
||||
def _generate_init(class_, class_manager):
|
||||
"""Build an __init__ decorator that triggers ClassManager events."""
|
||||
|
||||
# TODO: we should use the ClassManager's notion of the
|
||||
# original '__init__' method, once ClassManager is fixed
|
||||
# to always reference that.
|
||||
original__init__ = class_.__init__
|
||||
assert original__init__
|
||||
|
||||
# Go through some effort here and don't change the user's __init__
|
||||
# calling signature, including the unlikely case that it has
|
||||
# a return value.
|
||||
# FIXME: need to juggle local names to avoid constructor argument
|
||||
# clashes.
|
||||
func_body = """\
|
||||
def __init__(%(apply_pos)s):
|
||||
new_state = class_manager._new_state_if_none(%(self_arg)s)
|
||||
if new_state:
|
||||
return new_state._initialize_instance(%(apply_kw)s)
|
||||
else:
|
||||
return original__init__(%(apply_kw)s)
|
||||
"""
|
||||
func_vars = util.format_argspec_init(original__init__, grouped=False)
|
||||
func_text = func_body % func_vars
|
||||
|
||||
if util.py2k:
|
||||
func = getattr(original__init__, 'im_func', original__init__)
|
||||
func_defaults = getattr(func, 'func_defaults', None)
|
||||
else:
|
||||
func_defaults = getattr(original__init__, '__defaults__', None)
|
||||
func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
|
||||
|
||||
env = locals().copy()
|
||||
exec(func_text, env)
|
||||
__init__ = env['__init__']
|
||||
__init__.__doc__ = original__init__.__doc__
|
||||
|
||||
if func_defaults:
|
||||
__init__.__defaults__ = func_defaults
|
||||
if not util.py2k and func_kw_defaults:
|
||||
__init__.__kwdefaults__ = func_kw_defaults
|
||||
|
||||
return __init__
|
||||
640
lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py
Normal file
640
lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py
Normal file
|
|
@ -0,0 +1,640 @@
|
|||
# orm/interfaces.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
Contains various base classes used throughout the ORM.
|
||||
|
||||
Defines some key base classes prominent within the internals,
|
||||
as well as the now-deprecated ORM extension classes.
|
||||
|
||||
Other than the deprecated extensions, this module and the
|
||||
classes within are mostly private, though some attributes
|
||||
are exposed when inspecting mappings.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util
|
||||
from ..sql import operators
|
||||
from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
|
||||
EXT_CONTINUE, EXT_STOP, NOT_EXTENSION)
|
||||
from .base import (InspectionAttr, InspectionAttr,
|
||||
InspectionAttrInfo, _MappedAttribute)
|
||||
import collections
|
||||
from .. import inspect
|
||||
|
||||
# imported later
|
||||
MapperExtension = SessionExtension = AttributeExtension = None
|
||||
|
||||
__all__ = (
|
||||
'AttributeExtension',
|
||||
'EXT_CONTINUE',
|
||||
'EXT_STOP',
|
||||
'ONETOMANY',
|
||||
'MANYTOMANY',
|
||||
'MANYTOONE',
|
||||
'NOT_EXTENSION',
|
||||
'LoaderStrategy',
|
||||
'MapperExtension',
|
||||
'MapperOption',
|
||||
'MapperProperty',
|
||||
'PropComparator',
|
||||
'SessionExtension',
|
||||
'StrategizedProperty',
|
||||
)
|
||||
|
||||
|
||||
class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots):
|
||||
"""Represent a particular class attribute mapped by :class:`.Mapper`.
|
||||
|
||||
The most common occurrences of :class:`.MapperProperty` are the
|
||||
mapped :class:`.Column`, which is represented in a mapping as
|
||||
an instance of :class:`.ColumnProperty`,
|
||||
and a reference to another class produced by :func:`.relationship`,
|
||||
represented in the mapping as an instance of
|
||||
:class:`.RelationshipProperty`.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'_configure_started', '_configure_finished', 'parent', 'key',
|
||||
'info'
|
||||
)
|
||||
|
||||
cascade = frozenset()
|
||||
"""The set of 'cascade' attribute names.
|
||||
|
||||
This collection is checked before the 'cascade_iterator' method is called.
|
||||
|
||||
The collection typically only applies to a RelationshipProperty.
|
||||
|
||||
"""
|
||||
|
||||
is_property = True
|
||||
"""Part of the InspectionAttr interface; states this object is a
|
||||
mapper property.
|
||||
|
||||
"""
|
||||
|
||||
def _memoized_attr_info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.InspectionAttr`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
|
||||
available on extension types via the
|
||||
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
|
||||
to a wider variety of ORM and extension constructs.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
"""Called by Query for the purposes of constructing a SQL statement.
|
||||
|
||||
Each MapperProperty associated with the target mapper processes the
|
||||
statement referenced by the query context, adding columns and/or
|
||||
criterion as appropriate.
|
||||
|
||||
"""
|
||||
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, result, adapter, populators):
|
||||
"""Produce row processing functions and append to the given
|
||||
set of populators lists.
|
||||
|
||||
"""
|
||||
|
||||
def cascade_iterator(self, type_, state, visited_instances=None,
|
||||
halt_on=None):
|
||||
"""Iterate through instances related to the given instance for
|
||||
a particular 'cascade', starting with this MapperProperty.
|
||||
|
||||
Return an iterator3-tuples (instance, mapper, state).
|
||||
|
||||
Note that the 'cascade' collection on this MapperProperty is
|
||||
checked first for the given type before cascade_iterator is called.
|
||||
|
||||
This method typically only applies to RelationshipProperty.
|
||||
|
||||
"""
|
||||
|
||||
return iter(())
|
||||
|
||||
def set_parent(self, parent, init):
|
||||
"""Set the parent mapper that references this MapperProperty.
|
||||
|
||||
This method is overridden by some subclasses to perform extra
|
||||
setup when the mapper is first known.
|
||||
|
||||
"""
|
||||
self.parent = parent
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
"""Hook called by the Mapper to the property to initiate
|
||||
instrumentation of the class attribute managed by this
|
||||
MapperProperty.
|
||||
|
||||
The MapperProperty here will typically call out to the
|
||||
attributes module to set up an InstrumentedAttribute.
|
||||
|
||||
This step is the first of two steps to set up an InstrumentedAttribute,
|
||||
and is called early in the mapper setup process.
|
||||
|
||||
The second step is typically the init_class_attribute step,
|
||||
called from StrategizedProperty via the post_instrument_class()
|
||||
hook. This step assigns additional state to the InstrumentedAttribute
|
||||
(specifically the "impl") which has been determined after the
|
||||
MapperProperty has determined what kind of persistence
|
||||
management it needs to do (e.g. scalar, object, collection, etc).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._configure_started = False
|
||||
self._configure_finished = False
|
||||
|
||||
def init(self):
|
||||
"""Called after all mappers are created to assemble
|
||||
relationships between mappers and perform other post-mapper-creation
|
||||
initialization steps.
|
||||
|
||||
"""
|
||||
self._configure_started = True
|
||||
self.do_init()
|
||||
self._configure_finished = True
|
||||
|
||||
@property
|
||||
def class_attribute(self):
|
||||
"""Return the class-bound descriptor corresponding to this
|
||||
:class:`.MapperProperty`.
|
||||
|
||||
This is basically a ``getattr()`` call::
|
||||
|
||||
return getattr(self.parent.class_, self.key)
|
||||
|
||||
I.e. if this :class:`.MapperProperty` were named ``addresses``,
|
||||
and the class to which it is mapped is ``User``, this sequence
|
||||
is possible::
|
||||
|
||||
>>> from sqlalchemy import inspect
|
||||
>>> mapper = inspect(User)
|
||||
>>> addresses_property = mapper.attrs.addresses
|
||||
>>> addresses_property.class_attribute is User.addresses
|
||||
True
|
||||
>>> User.addresses.property is addresses_property
|
||||
True
|
||||
|
||||
|
||||
"""
|
||||
|
||||
return getattr(self.parent.class_, self.key)
|
||||
|
||||
def do_init(self):
|
||||
"""Perform subclass-specific initialization post-mapper-creation
|
||||
steps.
|
||||
|
||||
This is a template method called by the ``MapperProperty``
|
||||
object's init() method.
|
||||
|
||||
"""
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
"""Perform instrumentation adjustments that need to occur
|
||||
after init() has completed.
|
||||
|
||||
The given Mapper is the Mapper invoking the operation, which
|
||||
may not be the same Mapper as self.parent in an inheritance
|
||||
scenario; however, Mapper will always at least be a sub-mapper of
|
||||
self.parent.
|
||||
|
||||
This method is typically used by StrategizedProperty, which delegates
|
||||
it to LoaderStrategy.init_class_attribute() to perform final setup
|
||||
on the class-bound InstrumentedAttribute.
|
||||
|
||||
"""
|
||||
|
||||
def merge(self, session, source_state, source_dict, dest_state,
|
||||
dest_dict, load, _recursive):
|
||||
"""Merge the attribute represented by this ``MapperProperty``
|
||||
from source to destination object.
|
||||
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s at 0x%x; %s>' % (
|
||||
self.__class__.__name__,
|
||||
id(self), getattr(self, 'key', 'no key'))
|
||||
|
||||
|
||||
class PropComparator(operators.ColumnOperators):
|
||||
"""Defines SQL operators for :class:`.MapperProperty` objects.
|
||||
|
||||
SQLAlchemy allows for operators to
|
||||
be redefined at both the Core and ORM level. :class:`.PropComparator`
|
||||
is the base class of operator redefinition for ORM-level operations,
|
||||
including those of :class:`.ColumnProperty`,
|
||||
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
|
||||
|
||||
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
|
||||
0.7, as well as Core-level operator redefinition in
|
||||
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
|
||||
instances is extremely rare. See :ref:`hybrids_toplevel` as well
|
||||
as :ref:`types_operators`.
|
||||
|
||||
User-defined subclasses of :class:`.PropComparator` may be created. The
|
||||
built-in Python comparison and math operator methods, such as
|
||||
:meth:`.operators.ColumnOperators.__eq__`,
|
||||
:meth:`.operators.ColumnOperators.__lt__`, and
|
||||
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
|
||||
new operator behavior. The custom :class:`.PropComparator` is passed to
|
||||
the :class:`.MapperProperty` instance via the ``comparator_factory``
|
||||
argument. In each case,
|
||||
the appropriate subclass of :class:`.PropComparator` should be used::
|
||||
|
||||
# definition of custom PropComparator subclasses
|
||||
|
||||
from sqlalchemy.orm.properties import \\
|
||||
ColumnProperty,\\
|
||||
CompositeProperty,\\
|
||||
RelationshipProperty
|
||||
|
||||
class MyColumnComparator(ColumnProperty.Comparator):
|
||||
def __eq__(self, other):
|
||||
return self.__clause_element__() == other
|
||||
|
||||
class MyRelationshipComparator(RelationshipProperty.Comparator):
|
||||
def any(self, expression):
|
||||
"define the 'any' operation"
|
||||
# ...
|
||||
|
||||
class MyCompositeComparator(CompositeProperty.Comparator):
|
||||
def __gt__(self, other):
|
||||
"redefine the 'greater than' operation"
|
||||
|
||||
return sql.and_(*[a>b for a, b in
|
||||
zip(self.__clause_element__().clauses,
|
||||
other.__composite_values__())])
|
||||
|
||||
|
||||
# application of custom PropComparator subclasses
|
||||
|
||||
from sqlalchemy.orm import column_property, relationship, composite
|
||||
from sqlalchemy import Column, String
|
||||
|
||||
class SomeMappedClass(Base):
|
||||
some_column = column_property(Column("some_column", String),
|
||||
comparator_factory=MyColumnComparator)
|
||||
|
||||
some_relationship = relationship(SomeOtherClass,
|
||||
comparator_factory=MyRelationshipComparator)
|
||||
|
||||
some_composite = composite(
|
||||
Column("a", String), Column("b", String),
|
||||
comparator_factory=MyCompositeComparator
|
||||
)
|
||||
|
||||
Note that for column-level operator redefinition, it's usually
|
||||
simpler to define the operators at the Core level, using the
|
||||
:attr:`.TypeEngine.comparator_factory` attribute. See
|
||||
:ref:`types_operators` for more detail.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.ColumnProperty.Comparator`
|
||||
|
||||
:class:`.RelationshipProperty.Comparator`
|
||||
|
||||
:class:`.CompositeProperty.Comparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity'
|
||||
|
||||
def __init__(self, prop, parentmapper, adapt_to_entity=None):
|
||||
self.prop = self.property = prop
|
||||
self._parententity = adapt_to_entity or parentmapper
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
|
||||
def __clause_element__(self):
|
||||
raise NotImplementedError("%r" % self)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
"""Return a copy of this PropComparator which will use the given
|
||||
:class:`.AliasedInsp` to produce corresponding expressions.
|
||||
"""
|
||||
return self.__class__(self.prop, self._parententity, adapt_to_entity)
|
||||
|
||||
@property
|
||||
def _parentmapper(self):
|
||||
"""legacy; this is renamed to _parententity to be
|
||||
compatible with QueryableAttribute."""
|
||||
return inspect(self._parententity).mapper
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
"""Produce a callable that adapts column expressions
|
||||
to suit an aliased version of this comparator.
|
||||
|
||||
"""
|
||||
if self._adapt_to_entity is None:
|
||||
return None
|
||||
else:
|
||||
return self._adapt_to_entity._adapt_element
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
return self.property.info
|
||||
|
||||
@staticmethod
|
||||
def any_op(a, b, **kwargs):
|
||||
return a.any(b, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def has_op(a, b, **kwargs):
|
||||
return a.has(b, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def of_type_op(a, class_):
|
||||
return a.of_type(class_)
|
||||
|
||||
def of_type(self, class_):
|
||||
"""Redefine this object in terms of a polymorphic subclass.
|
||||
|
||||
Returns a new PropComparator from which further criterion can be
|
||||
evaluated.
|
||||
|
||||
e.g.::
|
||||
|
||||
query.join(Company.employees.of_type(Engineer)).\\
|
||||
filter(Engineer.name=='foo')
|
||||
|
||||
:param \class_: a class or mapper indicating that criterion will be
|
||||
against this specific subclass.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.of_type_op, class_)
|
||||
|
||||
def any(self, criterion=None, **kwargs):
|
||||
"""Return true if this collection contains any member that meets the
|
||||
given criterion.
|
||||
|
||||
The usual implementation of ``any()`` is
|
||||
:meth:`.RelationshipProperty.Comparator.any`.
|
||||
|
||||
:param criterion: an optional ClauseElement formulated against the
|
||||
member class' table or attributes.
|
||||
|
||||
:param \**kwargs: key/value pairs corresponding to member class
|
||||
attribute names which will be compared via equality to the
|
||||
corresponding values.
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.any_op, criterion, **kwargs)
|
||||
|
||||
def has(self, criterion=None, **kwargs):
|
||||
"""Return true if this element references a member which meets the
|
||||
given criterion.
|
||||
|
||||
The usual implementation of ``has()`` is
|
||||
:meth:`.RelationshipProperty.Comparator.has`.
|
||||
|
||||
:param criterion: an optional ClauseElement formulated against the
|
||||
member class' table or attributes.
|
||||
|
||||
:param \**kwargs: key/value pairs corresponding to member class
|
||||
attribute names which will be compared via equality to the
|
||||
corresponding values.
|
||||
|
||||
"""
|
||||
|
||||
return self.operate(PropComparator.has_op, criterion, **kwargs)
|
||||
|
||||
|
||||
class StrategizedProperty(MapperProperty):
|
||||
"""A MapperProperty which uses selectable strategies to affect
|
||||
loading behavior.
|
||||
|
||||
There is a single strategy selected by default. Alternate
|
||||
strategies can be selected at Query time through the usage of
|
||||
``StrategizedOption`` objects via the Query.options() method.
|
||||
|
||||
The mechanics of StrategizedProperty are used for every Query
|
||||
invocation for every mapped attribute participating in that Query,
|
||||
to determine first how the attribute will be rendered in SQL
|
||||
and secondly how the attribute will retrieve a value from a result
|
||||
row and apply it to a mapped object. The routines here are very
|
||||
performance-critical.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = '_strategies', 'strategy'
|
||||
|
||||
strategy_wildcard_key = None
|
||||
|
||||
def _get_context_loader(self, context, path):
|
||||
load = None
|
||||
|
||||
# use EntityRegistry.__getitem__()->PropRegistry here so
|
||||
# that the path is stated in terms of our base
|
||||
search_path = dict.__getitem__(path, self)
|
||||
|
||||
# search among: exact match, "attr.*", "default" strategy
|
||||
# if any.
|
||||
for path_key in (
|
||||
search_path._loader_key,
|
||||
search_path._wildcard_path_loader_key,
|
||||
search_path._default_path_loader_key
|
||||
):
|
||||
if path_key in context.attributes:
|
||||
load = context.attributes[path_key]
|
||||
break
|
||||
|
||||
return load
|
||||
|
||||
def _get_strategy(self, key):
|
||||
try:
|
||||
return self._strategies[key]
|
||||
except KeyError:
|
||||
cls = self._strategy_lookup(*key)
|
||||
self._strategies[key] = self._strategies[
|
||||
cls] = strategy = cls(self)
|
||||
return strategy
|
||||
|
||||
def _get_strategy_by_cls(self, cls):
|
||||
return self._get_strategy(cls._strategy_keys[0])
|
||||
|
||||
def setup(
|
||||
self, context, entity, path, adapter, **kwargs):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
|
||||
|
||||
def create_row_processor(
|
||||
self, context, path, mapper,
|
||||
result, adapter, populators):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
strat.create_row_processor(
|
||||
context, path, loader,
|
||||
mapper, result, adapter, populators)
|
||||
|
||||
def do_init(self):
|
||||
self._strategies = {}
|
||||
self.strategy = self._get_strategy_by_cls(self.strategy_class)
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
if not self.parent.non_primary and \
|
||||
not mapper.class_manager._attr_has_impl(self.key):
|
||||
self.strategy.init_class_attribute(mapper)
|
||||
|
||||
_all_strategies = collections.defaultdict(dict)
|
||||
|
||||
@classmethod
|
||||
def strategy_for(cls, **kw):
|
||||
def decorate(dec_cls):
|
||||
# ensure each subclass of the strategy has its
|
||||
# own _strategy_keys collection
|
||||
if '_strategy_keys' not in dec_cls.__dict__:
|
||||
dec_cls._strategy_keys = []
|
||||
key = tuple(sorted(kw.items()))
|
||||
cls._all_strategies[cls][key] = dec_cls
|
||||
dec_cls._strategy_keys.append(key)
|
||||
return dec_cls
|
||||
return decorate
|
||||
|
||||
@classmethod
|
||||
def _strategy_lookup(cls, *key):
|
||||
for prop_cls in cls.__mro__:
|
||||
if prop_cls in cls._all_strategies:
|
||||
strategies = cls._all_strategies[prop_cls]
|
||||
try:
|
||||
return strategies[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise Exception("can't locate strategy for %s %s" % (cls, key))
|
||||
|
||||
|
||||
class MapperOption(object):
|
||||
"""Describe a modification to a Query."""
|
||||
|
||||
propagate_to_loaders = False
|
||||
"""if True, indicate this option should be carried along
|
||||
to "secondary" Query objects produced during lazy loads
|
||||
or refresh operations.
|
||||
|
||||
"""
|
||||
|
||||
def process_query(self, query):
|
||||
"""Apply a modification to the given :class:`.Query`."""
|
||||
|
||||
def process_query_conditionally(self, query):
|
||||
"""same as process_query(), except that this option may not
|
||||
apply to the given query.
|
||||
|
||||
This is typically used during a lazy load or scalar refresh
|
||||
operation to propagate options stated in the original Query to the
|
||||
new Query being used for the load. It occurs for those options that
|
||||
specify propagate_to_loaders=True.
|
||||
|
||||
"""
|
||||
|
||||
self.process_query(query)
|
||||
|
||||
|
||||
class LoaderStrategy(object):
|
||||
"""Describe the loading behavior of a StrategizedProperty object.
|
||||
|
||||
The ``LoaderStrategy`` interacts with the querying process in three
|
||||
ways:
|
||||
|
||||
* it controls the configuration of the ``InstrumentedAttribute``
|
||||
placed on a class to handle the behavior of the attribute. this
|
||||
may involve setting up class-level callable functions to fire
|
||||
off a select operation when the attribute is first accessed
|
||||
(i.e. a lazy load)
|
||||
|
||||
* it processes the ``QueryContext`` at statement construction time,
|
||||
where it can modify the SQL statement that is being produced.
|
||||
For example, simple column attributes will add their represented
|
||||
column to the list of selected columns, a joined eager loader
|
||||
may establish join clauses to add to the statement.
|
||||
|
||||
* It produces "row processor" functions at result fetching time.
|
||||
These "row processor" functions populate a particular attribute
|
||||
on a particular mapped instance.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'parent_property', 'is_class_level', 'parent', 'key'
|
||||
|
||||
def __init__(self, parent):
|
||||
self.parent_property = parent
|
||||
self.is_class_level = False
|
||||
self.parent = self.parent_property.parent
|
||||
self.key = self.parent_property.key
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
pass
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
|
||||
"""Establish column and other state for a given QueryContext.
|
||||
|
||||
This method fulfills the contract specified by MapperProperty.setup().
|
||||
|
||||
StrategizedProperty delegates its setup() method
|
||||
directly to this method.
|
||||
|
||||
"""
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper,
|
||||
result, adapter, populators):
|
||||
"""Establish row processing functions for a given QueryContext.
|
||||
|
||||
This method fulfills the contract specified by
|
||||
MapperProperty.create_row_processor().
|
||||
|
||||
StrategizedProperty delegates its create_row_processor() method
|
||||
directly to this method.
|
||||
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent_property)
|
||||
669
lib/python3.7/site-packages/sqlalchemy/orm/loading.py
Normal file
669
lib/python3.7/site-packages/sqlalchemy/orm/loading.py
Normal file
|
|
@ -0,0 +1,669 @@
|
|||
# orm/loading.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""private module containing functions used to convert database
|
||||
rows into object instances and associated state.
|
||||
|
||||
the functions here are called primarily by Query, Mapper,
|
||||
as well as some of the attribute loading strategies.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util
|
||||
from . import attributes, exc as orm_exc
|
||||
from ..sql import util as sql_util
|
||||
from . import strategy_options
|
||||
|
||||
from .util import _none_set, state_str
|
||||
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
|
||||
from .. import exc as sa_exc
|
||||
import collections
|
||||
|
||||
_new_runid = util.counter()
|
||||
|
||||
|
||||
def instances(query, cursor, context):
|
||||
"""Return an ORM result as an iterator."""
|
||||
|
||||
context.runid = _new_runid()
|
||||
|
||||
filter_fns = [ent.filter_fn for ent in query._entities]
|
||||
filtered = id in filter_fns
|
||||
|
||||
single_entity = len(query._entities) == 1 and \
|
||||
query._entities[0].supports_single_entity
|
||||
|
||||
if filtered:
|
||||
if single_entity:
|
||||
filter_fn = id
|
||||
else:
|
||||
def filter_fn(row):
|
||||
return tuple(fn(x) for x, fn in zip(row, filter_fns))
|
||||
|
||||
try:
|
||||
(process, labels) = \
|
||||
list(zip(*[
|
||||
query_entity.row_processor(query,
|
||||
context, cursor)
|
||||
for query_entity in query._entities
|
||||
]))
|
||||
|
||||
if not single_entity:
|
||||
keyed_tuple = util.lightweight_named_tuple('result', labels)
|
||||
|
||||
while True:
|
||||
context.partials = {}
|
||||
|
||||
if query._yield_per:
|
||||
fetch = cursor.fetchmany(query._yield_per)
|
||||
if not fetch:
|
||||
break
|
||||
else:
|
||||
fetch = cursor.fetchall()
|
||||
|
||||
if single_entity:
|
||||
proc = process[0]
|
||||
rows = [proc(row) for row in fetch]
|
||||
else:
|
||||
rows = [keyed_tuple([proc(row) for proc in process])
|
||||
for row in fetch]
|
||||
|
||||
if filtered:
|
||||
rows = util.unique_list(rows, filter_fn)
|
||||
|
||||
for row in rows:
|
||||
yield row
|
||||
|
||||
if not query._yield_per:
|
||||
break
|
||||
except Exception as err:
|
||||
cursor.close()
|
||||
util.raise_from_cause(err)
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.query")
|
||||
def merge_result(querylib, query, iterator, load=True):
|
||||
"""Merge a result into this :class:`.Query` object's Session."""
|
||||
|
||||
session = query.session
|
||||
if load:
|
||||
# flush current contents if we expect to load data
|
||||
session._autoflush()
|
||||
|
||||
autoflush = session.autoflush
|
||||
try:
|
||||
session.autoflush = False
|
||||
single_entity = len(query._entities) == 1
|
||||
if single_entity:
|
||||
if isinstance(query._entities[0], querylib._MapperEntity):
|
||||
result = [session._merge(
|
||||
attributes.instance_state(instance),
|
||||
attributes.instance_dict(instance),
|
||||
load=load, _recursive={})
|
||||
for instance in iterator]
|
||||
else:
|
||||
result = list(iterator)
|
||||
else:
|
||||
mapped_entities = [i for i, e in enumerate(query._entities)
|
||||
if isinstance(e, querylib._MapperEntity)]
|
||||
result = []
|
||||
keys = [ent._label_name for ent in query._entities]
|
||||
keyed_tuple = util.lightweight_named_tuple('result', keys)
|
||||
for row in iterator:
|
||||
newrow = list(row)
|
||||
for i in mapped_entities:
|
||||
if newrow[i] is not None:
|
||||
newrow[i] = session._merge(
|
||||
attributes.instance_state(newrow[i]),
|
||||
attributes.instance_dict(newrow[i]),
|
||||
load=load, _recursive={})
|
||||
result.append(keyed_tuple(newrow))
|
||||
|
||||
return iter(result)
|
||||
finally:
|
||||
session.autoflush = autoflush
|
||||
|
||||
|
||||
def get_from_identity(session, key, passive):
|
||||
"""Look up the given key in the given session's identity map,
|
||||
check the object for expired state if found.
|
||||
|
||||
"""
|
||||
instance = session.identity_map.get(key)
|
||||
if instance is not None:
|
||||
|
||||
state = attributes.instance_state(instance)
|
||||
|
||||
# expired - ensure it still exists
|
||||
if state.expired:
|
||||
if not passive & attributes.SQL_OK:
|
||||
# TODO: no coverage here
|
||||
return attributes.PASSIVE_NO_RESULT
|
||||
elif not passive & attributes.RELATED_OBJECT_OK:
|
||||
# this mode is used within a flush and the instance's
|
||||
# expired state will be checked soon enough, if necessary
|
||||
return instance
|
||||
try:
|
||||
state._load_expired(state, passive)
|
||||
except orm_exc.ObjectDeletedError:
|
||||
session._remove_newly_deleted([state])
|
||||
return None
|
||||
return instance
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def load_on_ident(query, key,
|
||||
refresh_state=None, lockmode=None,
|
||||
only_load_props=None):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
if key is not None:
|
||||
ident = key[1]
|
||||
else:
|
||||
ident = None
|
||||
|
||||
if refresh_state is None:
|
||||
q = query._clone()
|
||||
q._get_condition()
|
||||
else:
|
||||
q = query._clone()
|
||||
|
||||
if ident is not None:
|
||||
mapper = query._mapper_zero()
|
||||
|
||||
(_get_clause, _get_params) = mapper._get_clause
|
||||
|
||||
# None present in ident - turn those comparisons
|
||||
# into "IS NULL"
|
||||
if None in ident:
|
||||
nones = set([
|
||||
_get_params[col].key for col, value in
|
||||
zip(mapper.primary_key, ident) if value is None
|
||||
])
|
||||
_get_clause = sql_util.adapt_criterion_to_null(
|
||||
_get_clause, nones)
|
||||
|
||||
_get_clause = q._adapt_clause(_get_clause, True, False)
|
||||
q._criterion = _get_clause
|
||||
|
||||
params = dict([
|
||||
(_get_params[primary_key].key, id_val)
|
||||
for id_val, primary_key in zip(ident, mapper.primary_key)
|
||||
])
|
||||
|
||||
q._params = params
|
||||
|
||||
if lockmode is not None:
|
||||
version_check = True
|
||||
q = q.with_lockmode(lockmode)
|
||||
elif query._for_update_arg is not None:
|
||||
version_check = True
|
||||
q._for_update_arg = query._for_update_arg
|
||||
else:
|
||||
version_check = False
|
||||
|
||||
q._get_options(
|
||||
populate_existing=bool(refresh_state),
|
||||
version_check=version_check,
|
||||
only_load_props=only_load_props,
|
||||
refresh_state=refresh_state)
|
||||
q._order_by = None
|
||||
|
||||
try:
|
||||
return q.one()
|
||||
except orm_exc.NoResultFound:
|
||||
return None
|
||||
|
||||
|
||||
def _setup_entity_query(
|
||||
context, mapper, query_entity,
|
||||
path, adapter, column_collection,
|
||||
with_polymorphic=None, only_load_props=None,
|
||||
polymorphic_discriminator=None, **kw):
|
||||
|
||||
if with_polymorphic:
|
||||
poly_properties = mapper._iterate_polymorphic_properties(
|
||||
with_polymorphic)
|
||||
else:
|
||||
poly_properties = mapper._polymorphic_properties
|
||||
|
||||
quick_populators = {}
|
||||
|
||||
path.set(
|
||||
context.attributes,
|
||||
"memoized_setups",
|
||||
quick_populators)
|
||||
|
||||
for value in poly_properties:
|
||||
if only_load_props and \
|
||||
value.key not in only_load_props:
|
||||
continue
|
||||
value.setup(
|
||||
context,
|
||||
query_entity,
|
||||
path,
|
||||
adapter,
|
||||
only_load_props=only_load_props,
|
||||
column_collection=column_collection,
|
||||
memoized_populators=quick_populators,
|
||||
**kw
|
||||
)
|
||||
|
||||
if polymorphic_discriminator is not None and \
|
||||
polymorphic_discriminator \
|
||||
is not mapper.polymorphic_on:
|
||||
|
||||
if adapter:
|
||||
pd = adapter.columns[polymorphic_discriminator]
|
||||
else:
|
||||
pd = polymorphic_discriminator
|
||||
column_collection.append(pd)
|
||||
|
||||
|
||||
def _instance_processor(
|
||||
mapper, context, result, path, adapter,
|
||||
only_load_props=None, refresh_state=None,
|
||||
polymorphic_discriminator=None,
|
||||
_polymorphic_from=None):
|
||||
"""Produce a mapper level row processor callable
|
||||
which processes rows into mapped instances."""
|
||||
|
||||
# note that this method, most of which exists in a closure
|
||||
# called _instance(), resists being broken out, as
|
||||
# attempts to do so tend to add significant function
|
||||
# call overhead. _instance() is the most
|
||||
# performance-critical section in the whole ORM.
|
||||
|
||||
pk_cols = mapper.primary_key
|
||||
|
||||
if adapter:
|
||||
pk_cols = [adapter.columns[c] for c in pk_cols]
|
||||
|
||||
identity_class = mapper._identity_class
|
||||
|
||||
populators = collections.defaultdict(list)
|
||||
|
||||
props = mapper._prop_set
|
||||
if only_load_props is not None:
|
||||
props = props.intersection(
|
||||
mapper._props[k] for k in only_load_props)
|
||||
|
||||
quick_populators = path.get(
|
||||
context.attributes, "memoized_setups", _none_set)
|
||||
|
||||
for prop in props:
|
||||
if prop in quick_populators:
|
||||
# this is an inlined path just for column-based attributes.
|
||||
col = quick_populators[prop]
|
||||
if col is _DEFER_FOR_STATE:
|
||||
populators["new"].append(
|
||||
(prop.key, prop._deferred_column_loader))
|
||||
elif col is _SET_DEFERRED_EXPIRED:
|
||||
# note that in this path, we are no longer
|
||||
# searching in the result to see if the column might
|
||||
# be present in some unexpected way.
|
||||
populators["expire"].append((prop.key, False))
|
||||
else:
|
||||
if adapter:
|
||||
col = adapter.columns[col]
|
||||
getter = result._getter(col)
|
||||
if getter:
|
||||
populators["quick"].append((prop.key, getter))
|
||||
else:
|
||||
# fall back to the ColumnProperty itself, which
|
||||
# will iterate through all of its columns
|
||||
# to see if one fits
|
||||
prop.create_row_processor(
|
||||
context, path, mapper, result, adapter, populators)
|
||||
else:
|
||||
prop.create_row_processor(
|
||||
context, path, mapper, result, adapter, populators)
|
||||
|
||||
propagate_options = context.propagate_options
|
||||
if propagate_options:
|
||||
load_path = context.query._current_path + path \
|
||||
if context.query._current_path.path else path
|
||||
|
||||
session_identity_map = context.session.identity_map
|
||||
|
||||
populate_existing = context.populate_existing or mapper.always_refresh
|
||||
load_evt = bool(mapper.class_manager.dispatch.load)
|
||||
refresh_evt = bool(mapper.class_manager.dispatch.refresh)
|
||||
instance_state = attributes.instance_state
|
||||
instance_dict = attributes.instance_dict
|
||||
session_id = context.session.hash_key
|
||||
version_check = context.version_check
|
||||
runid = context.runid
|
||||
|
||||
if refresh_state:
|
||||
refresh_identity_key = refresh_state.key
|
||||
if refresh_identity_key is None:
|
||||
# super-rare condition; a refresh is being called
|
||||
# on a non-instance-key instance; this is meant to only
|
||||
# occur within a flush()
|
||||
refresh_identity_key = \
|
||||
mapper._identity_key_from_state(refresh_state)
|
||||
else:
|
||||
refresh_identity_key = None
|
||||
|
||||
if mapper.allow_partial_pks:
|
||||
is_not_primary_key = _none_set.issuperset
|
||||
else:
|
||||
is_not_primary_key = _none_set.intersection
|
||||
|
||||
def _instance(row):
|
||||
|
||||
# determine the state that we'll be populating
|
||||
if refresh_identity_key:
|
||||
# fixed state that we're refreshing
|
||||
state = refresh_state
|
||||
instance = state.obj()
|
||||
dict_ = instance_dict(instance)
|
||||
isnew = state.runid != runid
|
||||
currentload = True
|
||||
loaded_instance = False
|
||||
else:
|
||||
# look at the row, see if that identity is in the
|
||||
# session, or we have to create a new one
|
||||
identitykey = (
|
||||
identity_class,
|
||||
tuple([row[column] for column in pk_cols])
|
||||
)
|
||||
|
||||
instance = session_identity_map.get(identitykey)
|
||||
|
||||
if instance is not None:
|
||||
# existing instance
|
||||
state = instance_state(instance)
|
||||
dict_ = instance_dict(instance)
|
||||
|
||||
isnew = state.runid != runid
|
||||
currentload = not isnew
|
||||
loaded_instance = False
|
||||
|
||||
if version_check and not currentload:
|
||||
_validate_version_id(mapper, state, dict_, row, adapter)
|
||||
|
||||
else:
|
||||
# create a new instance
|
||||
|
||||
# check for non-NULL values in the primary key columns,
|
||||
# else no entity is returned for the row
|
||||
if is_not_primary_key(identitykey[1]):
|
||||
return None
|
||||
|
||||
isnew = True
|
||||
currentload = True
|
||||
loaded_instance = True
|
||||
|
||||
instance = mapper.class_manager.new_instance()
|
||||
|
||||
dict_ = instance_dict(instance)
|
||||
state = instance_state(instance)
|
||||
state.key = identitykey
|
||||
|
||||
# attach instance to session.
|
||||
state.session_id = session_id
|
||||
session_identity_map._add_unpresent(state, identitykey)
|
||||
|
||||
# populate. this looks at whether this state is new
|
||||
# for this load or was existing, and whether or not this
|
||||
# row is the first row with this identity.
|
||||
if currentload or populate_existing:
|
||||
# full population routines. Objects here are either
|
||||
# just created, or we are doing a populate_existing
|
||||
|
||||
if isnew and propagate_options:
|
||||
state.load_options = propagate_options
|
||||
state.load_path = load_path
|
||||
|
||||
_populate_full(
|
||||
context, row, state, dict_, isnew,
|
||||
loaded_instance, populate_existing, populators)
|
||||
|
||||
if isnew:
|
||||
if loaded_instance and load_evt:
|
||||
state.manager.dispatch.load(state, context)
|
||||
elif refresh_evt:
|
||||
state.manager.dispatch.refresh(
|
||||
state, context, only_load_props)
|
||||
|
||||
if populate_existing or state.modified:
|
||||
if refresh_state and only_load_props:
|
||||
state._commit(dict_, only_load_props)
|
||||
else:
|
||||
state._commit_all(dict_, session_identity_map)
|
||||
|
||||
else:
|
||||
# partial population routines, for objects that were already
|
||||
# in the Session, but a row matches them; apply eager loaders
|
||||
# on existing objects, etc.
|
||||
unloaded = state.unloaded
|
||||
isnew = state not in context.partials
|
||||
|
||||
if not isnew or unloaded or populators["eager"]:
|
||||
# state is having a partial set of its attributes
|
||||
# refreshed. Populate those attributes,
|
||||
# and add to the "context.partials" collection.
|
||||
|
||||
to_load = _populate_partial(
|
||||
context, row, state, dict_, isnew,
|
||||
unloaded, populators)
|
||||
|
||||
if isnew:
|
||||
if refresh_evt:
|
||||
state.manager.dispatch.refresh(
|
||||
state, context, to_load)
|
||||
|
||||
state._commit(dict_, to_load)
|
||||
|
||||
return instance
|
||||
|
||||
if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
|
||||
# if we are doing polymorphic, dispatch to a different _instance()
|
||||
# method specific to the subclass mapper
|
||||
_instance = _decorate_polymorphic_switch(
|
||||
_instance, context, mapper, result, path,
|
||||
polymorphic_discriminator, adapter)
|
||||
|
||||
return _instance
|
||||
|
||||
|
||||
def _populate_full(
|
||||
context, row, state, dict_, isnew,
|
||||
loaded_instance, populate_existing, populators):
|
||||
if isnew:
|
||||
# first time we are seeing a row with this identity.
|
||||
state.runid = context.runid
|
||||
|
||||
for key, getter in populators["quick"]:
|
||||
dict_[key] = getter(row)
|
||||
if populate_existing:
|
||||
for key, set_callable in populators["expire"]:
|
||||
dict_.pop(key, None)
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
else:
|
||||
for key, set_callable in populators["expire"]:
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
for key, populator in populators["new"]:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["delayed"]:
|
||||
populator(state, dict_, row)
|
||||
else:
|
||||
# have already seen rows with this identity.
|
||||
for key, populator in populators["existing"]:
|
||||
populator(state, dict_, row)
|
||||
|
||||
|
||||
def _populate_partial(
|
||||
context, row, state, dict_, isnew,
|
||||
unloaded, populators):
|
||||
if not isnew:
|
||||
to_load = context.partials[state]
|
||||
for key, populator in populators["existing"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
else:
|
||||
to_load = unloaded
|
||||
context.partials[state] = to_load
|
||||
|
||||
for key, getter in populators["quick"]:
|
||||
if key in to_load:
|
||||
dict_[key] = getter(row)
|
||||
for key, set_callable in populators["expire"]:
|
||||
if key in to_load:
|
||||
dict_.pop(key, None)
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
for key, populator in populators["new"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["delayed"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["eager"]:
|
||||
if key not in unloaded:
|
||||
populator(state, dict_, row)
|
||||
|
||||
return to_load
|
||||
|
||||
|
||||
def _validate_version_id(mapper, state, dict_, row, adapter):
|
||||
|
||||
version_id_col = mapper.version_id_col
|
||||
|
||||
if version_id_col is None:
|
||||
return
|
||||
|
||||
if adapter:
|
||||
version_id_col = adapter.columns[version_id_col]
|
||||
|
||||
if mapper._get_state_attr_by_column(
|
||||
state, dict_, mapper.version_id_col) != row[version_id_col]:
|
||||
raise orm_exc.StaleDataError(
|
||||
"Instance '%s' has version id '%s' which "
|
||||
"does not match database-loaded version id '%s'."
|
||||
% (state_str(state), mapper._get_state_attr_by_column(
|
||||
state, dict_, mapper.version_id_col),
|
||||
row[version_id_col]))
|
||||
|
||||
|
||||
def _decorate_polymorphic_switch(
|
||||
instance_fn, context, mapper, result, path,
|
||||
polymorphic_discriminator, adapter):
|
||||
if polymorphic_discriminator is not None:
|
||||
polymorphic_on = polymorphic_discriminator
|
||||
else:
|
||||
polymorphic_on = mapper.polymorphic_on
|
||||
if polymorphic_on is None:
|
||||
return instance_fn
|
||||
|
||||
if adapter:
|
||||
polymorphic_on = adapter.columns[polymorphic_on]
|
||||
|
||||
def configure_subclass_mapper(discriminator):
|
||||
try:
|
||||
sub_mapper = mapper.polymorphic_map[discriminator]
|
||||
except KeyError:
|
||||
raise AssertionError(
|
||||
"No such polymorphic_identity %r is defined" %
|
||||
discriminator)
|
||||
else:
|
||||
if sub_mapper is mapper:
|
||||
return None
|
||||
|
||||
return _instance_processor(
|
||||
sub_mapper, context, result,
|
||||
path, adapter, _polymorphic_from=mapper)
|
||||
|
||||
polymorphic_instances = util.PopulateDict(
|
||||
configure_subclass_mapper
|
||||
)
|
||||
|
||||
def polymorphic_instance(row):
|
||||
discriminator = row[polymorphic_on]
|
||||
if discriminator is not None:
|
||||
_instance = polymorphic_instances[discriminator]
|
||||
if _instance:
|
||||
return _instance(row)
|
||||
return instance_fn(row)
|
||||
return polymorphic_instance
|
||||
|
||||
|
||||
def load_scalar_attributes(mapper, state, attribute_names):
|
||||
"""initiate a column-based attribute refresh operation."""
|
||||
|
||||
# assert mapper is _state_mapper(state)
|
||||
session = state.session
|
||||
if not session:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Instance %s is not bound to a Session; "
|
||||
"attribute refresh operation cannot proceed" %
|
||||
(state_str(state)))
|
||||
|
||||
has_key = bool(state.key)
|
||||
|
||||
result = False
|
||||
|
||||
if mapper.inherits and not mapper.concrete:
|
||||
# because we are using Core to produce a select() that we
|
||||
# pass to the Query, we aren't calling setup() for mapped
|
||||
# attributes; in 1.0 this means deferred attrs won't get loaded
|
||||
# by default
|
||||
statement = mapper._optimized_get_statement(state, attribute_names)
|
||||
if statement is not None:
|
||||
result = load_on_ident(
|
||||
session.query(mapper).
|
||||
options(
|
||||
strategy_options.Load(mapper).undefer("*")
|
||||
).from_statement(statement),
|
||||
None,
|
||||
only_load_props=attribute_names,
|
||||
refresh_state=state
|
||||
)
|
||||
|
||||
if result is False:
|
||||
if has_key:
|
||||
identity_key = state.key
|
||||
else:
|
||||
# this codepath is rare - only valid when inside a flush, and the
|
||||
# object is becoming persistent but hasn't yet been assigned
|
||||
# an identity_key.
|
||||
# check here to ensure we have the attrs we need.
|
||||
pk_attrs = [mapper._columntoproperty[col].key
|
||||
for col in mapper.primary_key]
|
||||
if state.expired_attributes.intersection(pk_attrs):
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance %s cannot be refreshed - it's not "
|
||||
" persistent and does not "
|
||||
"contain a full primary key." % state_str(state))
|
||||
identity_key = mapper._identity_key_from_state(state)
|
||||
|
||||
if (_none_set.issubset(identity_key) and
|
||||
not mapper.allow_partial_pks) or \
|
||||
_none_set.issuperset(identity_key):
|
||||
util.warn_limited(
|
||||
"Instance %s to be refreshed doesn't "
|
||||
"contain a full primary key - can't be refreshed "
|
||||
"(and shouldn't be expired, either).",
|
||||
state_str(state))
|
||||
return
|
||||
|
||||
result = load_on_ident(
|
||||
session.query(mapper),
|
||||
identity_key,
|
||||
refresh_state=state,
|
||||
only_load_props=attribute_names)
|
||||
|
||||
# if instance is pending, a refresh operation
|
||||
# may not complete (even if PK attributes are assigned)
|
||||
if has_key and result is None:
|
||||
raise orm_exc.ObjectDeletedError(state)
|
||||
2909
lib/python3.7/site-packages/sqlalchemy/orm/mapper.py
Normal file
2909
lib/python3.7/site-packages/sqlalchemy/orm/mapper.py
Normal file
File diff suppressed because it is too large
Load diff
291
lib/python3.7/site-packages/sqlalchemy/orm/path_registry.py
Normal file
291
lib/python3.7/site-packages/sqlalchemy/orm/path_registry.py
Normal file
|
|
@ -0,0 +1,291 @@
|
|||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Path tracking utilities, representing mapper graph traversals.
|
||||
|
||||
"""
|
||||
|
||||
from .. import inspection
|
||||
from .. import util
|
||||
from .. import exc
|
||||
from itertools import chain
|
||||
from .base import class_mapper
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _unreduce_path(path):
|
||||
return PathRegistry.deserialize(path)
|
||||
|
||||
|
||||
_WILDCARD_TOKEN = "*"
|
||||
_DEFAULT_TOKEN = "_sa_default"
|
||||
|
||||
|
||||
class PathRegistry(object):
|
||||
"""Represent query load paths and registry functions.
|
||||
|
||||
Basically represents structures like:
|
||||
|
||||
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
|
||||
|
||||
These structures are generated by things like
|
||||
query options (joinedload(), subqueryload(), etc.) and are
|
||||
used to compose keys stored in the query._attributes dictionary
|
||||
for various options.
|
||||
|
||||
They are then re-composed at query compile/result row time as
|
||||
the query is formed and as rows are fetched, where they again
|
||||
serve to compose keys to look up options in the context.attributes
|
||||
dictionary, which is copied from query._attributes.
|
||||
|
||||
The path structure has a limited amount of caching, where each
|
||||
"root" ultimately pulls from a fixed registry associated with
|
||||
the first mapper, that also contains elements for each of its
|
||||
property keys. However paths longer than two elements, which
|
||||
are the exception rather than the rule, are generated on an
|
||||
as-needed basis.
|
||||
|
||||
"""
|
||||
|
||||
is_token = False
|
||||
is_root = False
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and \
|
||||
self.path == other.path
|
||||
|
||||
def set(self, attributes, key, value):
|
||||
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes[(key, self.path)] = value
|
||||
|
||||
def setdefault(self, attributes, key, value):
|
||||
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes.setdefault((key, self.path), value)
|
||||
|
||||
def get(self, attributes, key, value=None):
|
||||
key = (key, self.path)
|
||||
if key in attributes:
|
||||
return attributes[key]
|
||||
else:
|
||||
return value
|
||||
|
||||
def __len__(self):
|
||||
return len(self.path)
|
||||
|
||||
@property
|
||||
def length(self):
|
||||
return len(self.path)
|
||||
|
||||
def pairs(self):
|
||||
path = self.path
|
||||
for i in range(0, len(path), 2):
|
||||
yield path[i], path[i + 1]
|
||||
|
||||
def contains_mapper(self, mapper):
|
||||
for path_mapper in [
|
||||
self.path[i] for i in range(0, len(self.path), 2)
|
||||
]:
|
||||
if path_mapper.is_mapper and \
|
||||
path_mapper.isa(mapper):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def contains(self, attributes, key):
|
||||
return (key, self.path) in attributes
|
||||
|
||||
def __reduce__(self):
|
||||
return _unreduce_path, (self.serialize(), )
|
||||
|
||||
def serialize(self):
|
||||
path = self.path
|
||||
return list(zip(
|
||||
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
|
||||
[path[i].key for i in range(1, len(path), 2)] + [None]
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, path):
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
p = tuple(chain(*[(class_mapper(mcls),
|
||||
class_mapper(mcls).attrs[key]
|
||||
if key is not None else None)
|
||||
for mcls, key in path]))
|
||||
if p and p[-1] is None:
|
||||
p = p[0:-1]
|
||||
return cls.coerce(p)
|
||||
|
||||
@classmethod
|
||||
def per_mapper(cls, mapper):
|
||||
return EntityRegistry(
|
||||
cls.root, mapper
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, raw):
|
||||
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
|
||||
|
||||
def token(self, token):
|
||||
if token.endswith(':' + _WILDCARD_TOKEN):
|
||||
return TokenRegistry(self, token)
|
||||
elif token.endswith(":" + _DEFAULT_TOKEN):
|
||||
return TokenRegistry(self.root, token)
|
||||
else:
|
||||
raise exc.ArgumentError("invalid token: %s" % token)
|
||||
|
||||
def __add__(self, other):
|
||||
return util.reduce(
|
||||
lambda prev, next: prev[next],
|
||||
other.path, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self.path, )
|
||||
|
||||
|
||||
class RootRegistry(PathRegistry):
|
||||
"""Root registry, defers to mappers so that
|
||||
paths are maintained per-root-mapper.
|
||||
|
||||
"""
|
||||
path = ()
|
||||
has_entity = False
|
||||
is_aliased_class = False
|
||||
is_root = True
|
||||
|
||||
def __getitem__(self, entity):
|
||||
return entity._path_registry
|
||||
|
||||
PathRegistry.root = RootRegistry()
|
||||
|
||||
|
||||
class TokenRegistry(PathRegistry):
|
||||
def __init__(self, parent, token):
|
||||
self.token = token
|
||||
self.parent = parent
|
||||
self.path = parent.path + (token,)
|
||||
|
||||
has_entity = False
|
||||
|
||||
is_token = True
|
||||
|
||||
def generate_for_superclasses(self):
|
||||
if not self.parent.is_aliased_class and not self.parent.is_root:
|
||||
for ent in self.parent.mapper.iterate_to_root():
|
||||
yield TokenRegistry(self.parent.parent[ent], self.token)
|
||||
else:
|
||||
yield self
|
||||
|
||||
def __getitem__(self, entity):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class PropRegistry(PathRegistry):
|
||||
def __init__(self, parent, prop):
|
||||
# restate this path in terms of the
|
||||
# given MapperProperty's parent.
|
||||
insp = inspection.inspect(parent[-1])
|
||||
if not insp.is_aliased_class or insp._use_mapper_path:
|
||||
parent = parent.parent[prop.parent]
|
||||
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
|
||||
if prop.parent is not insp.mapper and \
|
||||
prop.parent in insp.with_polymorphic_mappers:
|
||||
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
|
||||
parent = parent.parent[subclass_entity]
|
||||
|
||||
self.prop = prop
|
||||
self.parent = parent
|
||||
self.path = parent.path + (prop,)
|
||||
|
||||
def __str__(self):
|
||||
return " -> ".join(
|
||||
str(elem) for elem in self.path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def has_entity(self):
|
||||
return hasattr(self.prop, "mapper")
|
||||
|
||||
@util.memoized_property
|
||||
def entity(self):
|
||||
return self.prop.mapper
|
||||
|
||||
@util.memoized_property
|
||||
def _wildcard_path_loader_key(self):
|
||||
"""Given a path (mapper A, prop X), replace the prop with the wildcard,
|
||||
e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
|
||||
return within the ("loader", path) structure.
|
||||
|
||||
"""
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (
|
||||
self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _default_path_loader_key(self):
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (self.prop.strategy_wildcard_key,
|
||||
_DEFAULT_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _loader_key(self):
|
||||
return ("loader", self.path)
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return self.entity
|
||||
|
||||
@property
|
||||
def entity_path(self):
|
||||
return self[self.entity]
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return EntityRegistry(
|
||||
self, entity
|
||||
)
|
||||
|
||||
|
||||
class EntityRegistry(PathRegistry, dict):
|
||||
is_aliased_class = False
|
||||
has_entity = True
|
||||
|
||||
def __init__(self, parent, entity):
|
||||
self.key = entity
|
||||
self.parent = parent
|
||||
self.is_aliased_class = entity.is_aliased_class
|
||||
self.entity = entity
|
||||
self.path = parent.path + (entity,)
|
||||
self.entity_path = self
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return inspection.inspect(self.entity).mapper
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return dict.__getitem__(self, entity)
|
||||
|
||||
def __missing__(self, key):
|
||||
self[key] = item = PropRegistry(self, key)
|
||||
return item
|
||||
1408
lib/python3.7/site-packages/sqlalchemy/orm/persistence.py
Normal file
1408
lib/python3.7/site-packages/sqlalchemy/orm/persistence.py
Normal file
File diff suppressed because it is too large
Load diff
276
lib/python3.7/site-packages/sqlalchemy/orm/properties.py
Normal file
276
lib/python3.7/site-packages/sqlalchemy/orm/properties.py
Normal file
|
|
@ -0,0 +1,276 @@
|
|||
# orm/properties.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""MapperProperty implementations.
|
||||
|
||||
This is a private module which defines the behavior of invidual ORM-
|
||||
mapped attributes.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util, log
|
||||
from ..sql import expression
|
||||
from . import attributes
|
||||
from .util import _orm_full_deannotate
|
||||
|
||||
from .interfaces import PropComparator, StrategizedProperty
|
||||
|
||||
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
|
||||
'ComparableProperty', 'RelationshipProperty']
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class ColumnProperty(StrategizedProperty):
|
||||
"""Describes an object attribute that corresponds to a table column.
|
||||
|
||||
Public constructor is the :func:`.orm.column_property` function.
|
||||
|
||||
"""
|
||||
|
||||
strategy_wildcard_key = 'column'
|
||||
|
||||
__slots__ = (
|
||||
'_orig_columns', 'columns', 'group', 'deferred',
|
||||
'instrument', 'comparator_factory', 'descriptor', 'extension',
|
||||
'active_history', 'expire_on_flush', 'info', 'doc',
|
||||
'strategy_class', '_creation_order', '_is_polymorphic_discriminator',
|
||||
'_mapped_by_synonym', '_deferred_column_loader')
|
||||
|
||||
def __init__(self, *columns, **kwargs):
|
||||
"""Provide a column-level property for use with a Mapper.
|
||||
|
||||
Column-based properties can normally be applied to the mapper's
|
||||
``properties`` dictionary using the :class:`.Column` element directly.
|
||||
Use this function when the given column is not directly present within
|
||||
the mapper's selectable; examples include SQL expressions, functions,
|
||||
and scalar SELECT queries.
|
||||
|
||||
Columns that aren't present in the mapper's selectable won't be
|
||||
persisted by the mapper and are effectively "read-only" attributes.
|
||||
|
||||
:param \*cols:
|
||||
list of Column objects to be mapped.
|
||||
|
||||
:param active_history=False:
|
||||
When ``True``, indicates that the "previous" value for a
|
||||
scalar attribute should be loaded when replaced, if not
|
||||
already loaded. Normally, history tracking logic for
|
||||
simple non-primary-key scalar values only needs to be
|
||||
aware of the "new" value in order to perform a flush. This
|
||||
flag is available for applications that make use of
|
||||
:func:`.attributes.get_history` or :meth:`.Session.is_modified`
|
||||
which also need to know
|
||||
the "previous" value of the attribute.
|
||||
|
||||
.. versionadded:: 0.6.6
|
||||
|
||||
:param comparator_factory: a class which extends
|
||||
:class:`.ColumnProperty.Comparator` which provides custom SQL
|
||||
clause generation for comparison operations.
|
||||
|
||||
:param group:
|
||||
a group name for this property when marked as deferred.
|
||||
|
||||
:param deferred:
|
||||
when True, the column property is "deferred", meaning that
|
||||
it does not load immediately, and is instead loaded when the
|
||||
attribute is first accessed on an instance. See also
|
||||
:func:`~sqlalchemy.orm.deferred`.
|
||||
|
||||
:param doc:
|
||||
optional string that will be applied as the doc on the
|
||||
class-bound descriptor.
|
||||
|
||||
:param expire_on_flush=True:
|
||||
Disable expiry on flush. A column_property() which refers
|
||||
to a SQL expression (and not a single table-bound column)
|
||||
is considered to be a "read only" property; populating it
|
||||
has no effect on the state of data, and it can only return
|
||||
database state. For this reason a column_property()'s value
|
||||
is expired whenever the parent object is involved in a
|
||||
flush, that is, has any kind of "dirty" state within a flush.
|
||||
Setting this parameter to ``False`` will have the effect of
|
||||
leaving any existing value present after the flush proceeds.
|
||||
Note however that the :class:`.Session` with default expiration
|
||||
settings still expires
|
||||
all attributes after a :meth:`.Session.commit` call, however.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.MapperProperty.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
:param extension:
|
||||
an
|
||||
:class:`.AttributeExtension`
|
||||
instance, or list of extensions, which will be prepended
|
||||
to the list of attribute listeners for the resulting
|
||||
descriptor placed on the class.
|
||||
**Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
super(ColumnProperty, self).__init__()
|
||||
self._orig_columns = [expression._labeled(c) for c in columns]
|
||||
self.columns = [expression._labeled(_orm_full_deannotate(c))
|
||||
for c in columns]
|
||||
self.group = kwargs.pop('group', None)
|
||||
self.deferred = kwargs.pop('deferred', False)
|
||||
self.instrument = kwargs.pop('_instrument', True)
|
||||
self.comparator_factory = kwargs.pop('comparator_factory',
|
||||
self.__class__.Comparator)
|
||||
self.descriptor = kwargs.pop('descriptor', None)
|
||||
self.extension = kwargs.pop('extension', None)
|
||||
self.active_history = kwargs.pop('active_history', False)
|
||||
self.expire_on_flush = kwargs.pop('expire_on_flush', True)
|
||||
|
||||
if 'info' in kwargs:
|
||||
self.info = kwargs.pop('info')
|
||||
|
||||
if 'doc' in kwargs:
|
||||
self.doc = kwargs.pop('doc')
|
||||
else:
|
||||
for col in reversed(self.columns):
|
||||
doc = getattr(col, 'doc', None)
|
||||
if doc is not None:
|
||||
self.doc = doc
|
||||
break
|
||||
else:
|
||||
self.doc = None
|
||||
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"%s received unexpected keyword argument(s): %s" % (
|
||||
self.__class__.__name__,
|
||||
', '.join(sorted(kwargs.keys()))))
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
self.strategy_class = self._strategy_lookup(
|
||||
("deferred", self.deferred),
|
||||
("instrument", self.instrument)
|
||||
)
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
|
||||
def _memoized_attr__deferred_column_loader(self, state, strategies):
|
||||
return state.InstanceState._instance_level_callable_processor(
|
||||
self.parent.class_manager,
|
||||
strategies.LoadDeferredColumns(self.key), self.key)
|
||||
|
||||
@property
|
||||
def expression(self):
|
||||
"""Return the primary column or expression for this ColumnProperty.
|
||||
|
||||
"""
|
||||
return self.columns[0]
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
if not self.instrument:
|
||||
return
|
||||
|
||||
attributes.register_descriptor(
|
||||
mapper.class_,
|
||||
self.key,
|
||||
comparator=self.comparator_factory(self, mapper),
|
||||
parententity=mapper,
|
||||
doc=self.doc
|
||||
)
|
||||
|
||||
def do_init(self):
|
||||
super(ColumnProperty, self).do_init()
|
||||
if len(self.columns) > 1 and \
|
||||
set(self.parent.primary_key).issuperset(self.columns):
|
||||
util.warn(
|
||||
("On mapper %s, primary key column '%s' is being combined "
|
||||
"with distinct primary key column '%s' in attribute '%s'. "
|
||||
"Use explicit properties to give each column its own mapped "
|
||||
"attribute name.") % (self.parent, self.columns[1],
|
||||
self.columns[0], self.key))
|
||||
|
||||
def copy(self):
|
||||
return ColumnProperty(
|
||||
deferred=self.deferred,
|
||||
group=self.group,
|
||||
active_history=self.active_history,
|
||||
*self.columns)
|
||||
|
||||
def _getcommitted(self, state, dict_, column,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
return state.get_impl(self.key).\
|
||||
get_committed_value(state, dict_, passive=passive)
|
||||
|
||||
def merge(self, session, source_state, source_dict, dest_state,
|
||||
dest_dict, load, _recursive):
|
||||
if not self.instrument:
|
||||
return
|
||||
elif self.key in source_dict:
|
||||
value = source_dict[self.key]
|
||||
|
||||
if not load:
|
||||
dest_dict[self.key] = value
|
||||
else:
|
||||
impl = dest_state.get_impl(self.key)
|
||||
impl.set(dest_state, dest_dict, value, None)
|
||||
elif dest_state.has_identity and self.key not in dest_dict:
|
||||
dest_state._expire_attributes(dest_dict, [self.key])
|
||||
|
||||
class Comparator(util.MemoizedSlots, PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.ColumnProperty` attributes.
|
||||
|
||||
See the documentation for :class:`.PropComparator` for a brief
|
||||
overview.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.PropComparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = '__clause_element__', 'info'
|
||||
|
||||
def _memoized_method___clause_element__(self):
|
||||
if self.adapter:
|
||||
return self.adapter(self.prop.columns[0])
|
||||
else:
|
||||
# no adapter, so we aren't aliased
|
||||
# assert self._parententity is self._parentmapper
|
||||
return self.prop.columns[0]._annotate({
|
||||
"parententity": self._parententity,
|
||||
"parentmapper": self._parententity})
|
||||
|
||||
def _memoized_attr_info(self):
|
||||
ce = self.__clause_element__()
|
||||
try:
|
||||
return ce.info
|
||||
except AttributeError:
|
||||
return self.prop.info
|
||||
|
||||
def _fallback_getattr(self, key):
|
||||
"""proxy attribute access down to the mapped column.
|
||||
|
||||
this allows user-defined comparison methods to be accessed.
|
||||
"""
|
||||
return getattr(self.__clause_element__(), key)
|
||||
|
||||
def operate(self, op, *other, **kwargs):
|
||||
return op(self.__clause_element__(), *other, **kwargs)
|
||||
|
||||
def reverse_operate(self, op, other, **kwargs):
|
||||
col = self.__clause_element__()
|
||||
return op(col._bind_param(op, other), col, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
4019
lib/python3.7/site-packages/sqlalchemy/orm/query.py
Normal file
4019
lib/python3.7/site-packages/sqlalchemy/orm/query.py
Normal file
File diff suppressed because it is too large
Load diff
2861
lib/python3.7/site-packages/sqlalchemy/orm/relationships.py
Normal file
2861
lib/python3.7/site-packages/sqlalchemy/orm/relationships.py
Normal file
File diff suppressed because it is too large
Load diff
184
lib/python3.7/site-packages/sqlalchemy/orm/scoping.py
Normal file
184
lib/python3.7/site-packages/sqlalchemy/orm/scoping.py
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
# orm/scoping.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import exc as sa_exc
|
||||
from ..util import ScopedRegistry, ThreadLocalRegistry, warn
|
||||
from . import class_mapper, exc as orm_exc
|
||||
from .session import Session
|
||||
|
||||
|
||||
__all__ = ['scoped_session']
|
||||
|
||||
|
||||
class scoped_session(object):
|
||||
"""Provides scoped management of :class:`.Session` objects.
|
||||
|
||||
See :ref:`unitofwork_contextual` for a tutorial.
|
||||
|
||||
"""
|
||||
|
||||
session_factory = None
|
||||
"""The `session_factory` provided to `__init__` is stored in this
|
||||
attribute and may be accessed at a later time. This can be useful when
|
||||
a new non-scoped :class:`.Session` or :class:`.Connection` to the
|
||||
database is needed."""
|
||||
|
||||
def __init__(self, session_factory, scopefunc=None):
|
||||
"""Construct a new :class:`.scoped_session`.
|
||||
|
||||
:param session_factory: a factory to create new :class:`.Session`
|
||||
instances. This is usually, but not necessarily, an instance
|
||||
of :class:`.sessionmaker`.
|
||||
:param scopefunc: optional function which defines
|
||||
the current scope. If not passed, the :class:`.scoped_session`
|
||||
object assumes "thread-local" scope, and will use
|
||||
a Python ``threading.local()`` in order to maintain the current
|
||||
:class:`.Session`. If passed, the function should return
|
||||
a hashable token; this token will be used as the key in a
|
||||
dictionary in order to store and retrieve the current
|
||||
:class:`.Session`.
|
||||
|
||||
"""
|
||||
self.session_factory = session_factory
|
||||
|
||||
if scopefunc:
|
||||
self.registry = ScopedRegistry(session_factory, scopefunc)
|
||||
else:
|
||||
self.registry = ThreadLocalRegistry(session_factory)
|
||||
|
||||
def __call__(self, **kw):
|
||||
"""Return the current :class:`.Session`, creating it
|
||||
using the :attr:`.scoped_session.session_factory` if not present.
|
||||
|
||||
:param \**kw: Keyword arguments will be passed to the
|
||||
:attr:`.scoped_session.session_factory` callable, if an existing
|
||||
:class:`.Session` is not present. If the :class:`.Session` is present
|
||||
and keyword arguments have been passed,
|
||||
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
|
||||
|
||||
"""
|
||||
if kw:
|
||||
scope = kw.pop('scope', False)
|
||||
if scope is not None:
|
||||
if self.registry.has():
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Scoped session is already present; "
|
||||
"no new arguments may be specified.")
|
||||
else:
|
||||
sess = self.session_factory(**kw)
|
||||
self.registry.set(sess)
|
||||
return sess
|
||||
else:
|
||||
return self.session_factory(**kw)
|
||||
else:
|
||||
return self.registry()
|
||||
|
||||
def remove(self):
|
||||
"""Dispose of the current :class:`.Session`, if present.
|
||||
|
||||
This will first call :meth:`.Session.close` method
|
||||
on the current :class:`.Session`, which releases any existing
|
||||
transactional/connection resources still being held; transactions
|
||||
specifically are rolled back. The :class:`.Session` is then
|
||||
discarded. Upon next usage within the same scope,
|
||||
the :class:`.scoped_session` will produce a new
|
||||
:class:`.Session` object.
|
||||
|
||||
"""
|
||||
|
||||
if self.registry.has():
|
||||
self.registry().close()
|
||||
self.registry.clear()
|
||||
|
||||
def configure(self, **kwargs):
|
||||
"""reconfigure the :class:`.sessionmaker` used by this
|
||||
:class:`.scoped_session`.
|
||||
|
||||
See :meth:`.sessionmaker.configure`.
|
||||
|
||||
"""
|
||||
|
||||
if self.registry.has():
|
||||
warn('At least one scoped session is already present. '
|
||||
' configure() can not affect sessions that have '
|
||||
'already been created.')
|
||||
|
||||
self.session_factory.configure(**kwargs)
|
||||
|
||||
def query_property(self, query_cls=None):
|
||||
"""return a class property which produces a :class:`.Query` object
|
||||
against the class and the current :class:`.Session` when called.
|
||||
|
||||
e.g.::
|
||||
|
||||
Session = scoped_session(sessionmaker())
|
||||
|
||||
class MyClass(object):
|
||||
query = Session.query_property()
|
||||
|
||||
# after mappers are defined
|
||||
result = MyClass.query.filter(MyClass.name=='foo').all()
|
||||
|
||||
Produces instances of the session's configured query class by
|
||||
default. To override and use a custom implementation, provide
|
||||
a ``query_cls`` callable. The callable will be invoked with
|
||||
the class's mapper as a positional argument and a session
|
||||
keyword argument.
|
||||
|
||||
There is no limit to the number of query properties placed on
|
||||
a class.
|
||||
|
||||
"""
|
||||
class query(object):
|
||||
def __get__(s, instance, owner):
|
||||
try:
|
||||
mapper = class_mapper(owner)
|
||||
if mapper:
|
||||
if query_cls:
|
||||
# custom query class
|
||||
return query_cls(mapper, session=self.registry())
|
||||
else:
|
||||
# session's configured query class
|
||||
return self.registry().query(mapper)
|
||||
except orm_exc.UnmappedClassError:
|
||||
return None
|
||||
return query()
|
||||
|
||||
ScopedSession = scoped_session
|
||||
"""Old name for backwards compatibility."""
|
||||
|
||||
|
||||
def instrument(name):
|
||||
def do(self, *args, **kwargs):
|
||||
return getattr(self.registry(), name)(*args, **kwargs)
|
||||
return do
|
||||
|
||||
for meth in Session.public_methods:
|
||||
setattr(scoped_session, meth, instrument(meth))
|
||||
|
||||
|
||||
def makeprop(name):
|
||||
def set(self, attr):
|
||||
setattr(self.registry(), name, attr)
|
||||
|
||||
def get(self):
|
||||
return getattr(self.registry(), name)
|
||||
|
||||
return property(get, set)
|
||||
|
||||
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
|
||||
'is_active', 'autoflush', 'no_autoflush', 'info'):
|
||||
setattr(scoped_session, prop, makeprop(prop))
|
||||
|
||||
|
||||
def clslevel(name):
|
||||
def do(cls, *args, **kwargs):
|
||||
return getattr(Session, name)(*args, **kwargs)
|
||||
return classmethod(do)
|
||||
|
||||
for prop in ('close_all', 'object_session', 'identity_key'):
|
||||
setattr(scoped_session, prop, clslevel(prop))
|
||||
2820
lib/python3.7/site-packages/sqlalchemy/orm/session.py
Normal file
2820
lib/python3.7/site-packages/sqlalchemy/orm/session.py
Normal file
File diff suppressed because it is too large
Load diff
729
lib/python3.7/site-packages/sqlalchemy/orm/state.py
Normal file
729
lib/python3.7/site-packages/sqlalchemy/orm/state.py
Normal file
|
|
@ -0,0 +1,729 @@
|
|||
# orm/state.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Defines instrumentation of instances.
|
||||
|
||||
This module is usually not directly visible to user applications, but
|
||||
defines a large part of the ORM's interactivity.
|
||||
|
||||
"""
|
||||
|
||||
import weakref
|
||||
from .. import util
|
||||
from . import exc as orm_exc, interfaces
|
||||
from .path_registry import PathRegistry
|
||||
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
|
||||
NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
|
||||
from . import base
|
||||
|
||||
|
||||
class InstanceState(interfaces.InspectionAttr):
|
||||
"""tracks state information at the instance level.
|
||||
|
||||
The :class:`.InstanceState` is a key object used by the
|
||||
SQLAlchemy ORM in order to track the state of an object;
|
||||
it is created the moment an object is instantiated, typically
|
||||
as a result of :term:`instrumentation` which SQLAlchemy applies
|
||||
to the ``__init__()`` method of the class.
|
||||
|
||||
:class:`.InstanceState` is also a semi-public object,
|
||||
available for runtime inspection as to the state of a
|
||||
mapped instance, including information such as its current
|
||||
status within a particular :class:`.Session` and details
|
||||
about data on individual attributes. The public API
|
||||
in order to acquire a :class:`.InstanceState` object
|
||||
is to use the :func:`.inspect` system::
|
||||
|
||||
>>> from sqlalchemy import inspect
|
||||
>>> insp = inspect(some_mapped_object)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`core_inspection_toplevel`
|
||||
|
||||
"""
|
||||
|
||||
session_id = None
|
||||
key = None
|
||||
runid = None
|
||||
load_options = util.EMPTY_SET
|
||||
load_path = ()
|
||||
insert_order = None
|
||||
_strong_obj = None
|
||||
modified = False
|
||||
expired = False
|
||||
deleted = False
|
||||
_load_pending = False
|
||||
is_instance = True
|
||||
|
||||
callables = ()
|
||||
"""A namespace where a per-state loader callable can be associated.
|
||||
|
||||
In SQLAlchemy 1.0, this is only used for lazy loaders / deferred
|
||||
loaders that were set up via query option.
|
||||
|
||||
Previously, callables was used also to indicate expired attributes
|
||||
by storing a link to the InstanceState itself in this dictionary.
|
||||
This role is now handled by the expired_attributes set.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, obj, manager):
|
||||
self.class_ = obj.__class__
|
||||
self.manager = manager
|
||||
self.obj = weakref.ref(obj, self._cleanup)
|
||||
self.committed_state = {}
|
||||
self.expired_attributes = set()
|
||||
|
||||
expired_attributes = None
|
||||
"""The set of keys which are 'expired' to be loaded by
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
|
||||
see also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs."""
|
||||
|
||||
|
||||
@util.memoized_property
|
||||
def attrs(self):
|
||||
"""Return a namespace representing each attribute on
|
||||
the mapped object, including its current value
|
||||
and history.
|
||||
|
||||
The returned object is an instance of :class:`.AttributeState`.
|
||||
This object allows inspection of the current data
|
||||
within an attribute as well as attribute history
|
||||
since the last flush.
|
||||
|
||||
"""
|
||||
return util.ImmutableProperties(
|
||||
dict(
|
||||
(key, AttributeState(self, key))
|
||||
for key in self.manager
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def transient(self):
|
||||
"""Return true if the object is :term:`transient`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
"""Return true if the object is :term:`pending`.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def persistent(self):
|
||||
"""Return true if the object is :term:`persistent`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is not None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def detached(self):
|
||||
"""Return true if the object is :term:`detached`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
return self.key is not None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def _attached(self, sessionlib):
|
||||
return self.session_id is not None and \
|
||||
self.session_id in sessionlib._sessions
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def session(self, sessionlib):
|
||||
"""Return the owning :class:`.Session` for this instance,
|
||||
or ``None`` if none available.
|
||||
|
||||
Note that the result here can in some cases be *different*
|
||||
from that of ``obj in session``; an object that's been deleted
|
||||
will report as not ``in session``, however if the transaction is
|
||||
still in progress, this attribute will still refer to that session.
|
||||
Only when the transaction is completed does the object become
|
||||
fully detached under normal circumstances.
|
||||
|
||||
"""
|
||||
return sessionlib._state_session(self)
|
||||
|
||||
@property
|
||||
def object(self):
|
||||
"""Return the mapped object represented by this
|
||||
:class:`.InstanceState`."""
|
||||
return self.obj()
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
"""Return the mapped identity of the mapped object.
|
||||
This is the primary key identity as persisted by the ORM
|
||||
which can always be passed directly to
|
||||
:meth:`.Query.get`.
|
||||
|
||||
Returns ``None`` if the object has no primary key identity.
|
||||
|
||||
.. note::
|
||||
An object which is :term:`transient` or :term:`pending`
|
||||
does **not** have a mapped identity until it is flushed,
|
||||
even if its attributes include primary key values.
|
||||
|
||||
"""
|
||||
if self.key is None:
|
||||
return None
|
||||
else:
|
||||
return self.key[1]
|
||||
|
||||
@property
|
||||
def identity_key(self):
|
||||
"""Return the identity key for the mapped object.
|
||||
|
||||
This is the key used to locate the object within
|
||||
the :attr:`.Session.identity_map` mapping. It contains
|
||||
the identity as returned by :attr:`.identity` within it.
|
||||
|
||||
|
||||
"""
|
||||
# TODO: just change .key to .identity_key across
|
||||
# the board ? probably
|
||||
return self.key
|
||||
|
||||
@util.memoized_property
|
||||
def parents(self):
|
||||
return {}
|
||||
|
||||
@util.memoized_property
|
||||
def _pending_mutations(self):
|
||||
return {}
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self):
|
||||
"""Return the :class:`.Mapper` used for this mapepd object."""
|
||||
return self.manager.mapper
|
||||
|
||||
@property
|
||||
def has_identity(self):
|
||||
"""Return ``True`` if this object has an identity key.
|
||||
|
||||
This should always have the same value as the
|
||||
expression ``state.persistent or state.detached``.
|
||||
|
||||
"""
|
||||
return bool(self.key)
|
||||
|
||||
def _detach(self):
|
||||
self.session_id = self._strong_obj = None
|
||||
|
||||
def _dispose(self):
|
||||
self._detach()
|
||||
del self.obj
|
||||
|
||||
def _cleanup(self, ref):
|
||||
"""Weakref callback cleanup.
|
||||
|
||||
This callable cleans out the state when it is being garbage
|
||||
collected.
|
||||
|
||||
this _cleanup **assumes** that there are no strong refs to us!
|
||||
Will not work otherwise!
|
||||
|
||||
"""
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict is not None:
|
||||
instance_dict._fast_discard(self)
|
||||
del self._instance_dict
|
||||
|
||||
# we can't possibly be in instance_dict._modified
|
||||
# b.c. this is weakref cleanup only, that set
|
||||
# is strong referencing!
|
||||
# assert self not in instance_dict._modified
|
||||
|
||||
self.session_id = self._strong_obj = None
|
||||
del self.obj
|
||||
|
||||
def obj(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def dict(self):
|
||||
"""Return the instance dict used by the object.
|
||||
|
||||
Under normal circumstances, this is always synonymous
|
||||
with the ``__dict__`` attribute of the mapped object,
|
||||
unless an alternative instrumentation system has been
|
||||
configured.
|
||||
|
||||
In the case that the actual object has been garbage
|
||||
collected, this accessor returns a blank dictionary.
|
||||
|
||||
"""
|
||||
o = self.obj()
|
||||
if o is not None:
|
||||
return base.instance_dict(o)
|
||||
else:
|
||||
return {}
|
||||
|
||||
def _initialize_instance(*mixed, **kwargs):
|
||||
self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa
|
||||
manager = self.manager
|
||||
|
||||
manager.dispatch.init(self, args, kwargs)
|
||||
|
||||
try:
|
||||
return manager.original_init(*mixed[1:], **kwargs)
|
||||
except:
|
||||
with util.safe_reraise():
|
||||
manager.dispatch.init_failure(self, args, kwargs)
|
||||
|
||||
def get_history(self, key, passive):
|
||||
return self.manager[key].impl.get_history(self, self.dict, passive)
|
||||
|
||||
def get_impl(self, key):
|
||||
return self.manager[key].impl
|
||||
|
||||
def _get_pending_mutation(self, key):
|
||||
if key not in self._pending_mutations:
|
||||
self._pending_mutations[key] = PendingCollection()
|
||||
return self._pending_mutations[key]
|
||||
|
||||
def __getstate__(self):
|
||||
state_dict = {'instance': self.obj()}
|
||||
state_dict.update(
|
||||
(k, self.__dict__[k]) for k in (
|
||||
'committed_state', '_pending_mutations', 'modified',
|
||||
'expired', 'callables', 'key', 'parents', 'load_options',
|
||||
'class_', 'expired_attributes'
|
||||
) if k in self.__dict__
|
||||
)
|
||||
if self.load_path:
|
||||
state_dict['load_path'] = self.load_path.serialize()
|
||||
|
||||
state_dict['manager'] = self.manager._serialize(self, state_dict)
|
||||
|
||||
return state_dict
|
||||
|
||||
def __setstate__(self, state_dict):
|
||||
inst = state_dict['instance']
|
||||
if inst is not None:
|
||||
self.obj = weakref.ref(inst, self._cleanup)
|
||||
self.class_ = inst.__class__
|
||||
else:
|
||||
# None being possible here generally new as of 0.7.4
|
||||
# due to storage of state in "parents". "class_"
|
||||
# also new.
|
||||
self.obj = None
|
||||
self.class_ = state_dict['class_']
|
||||
|
||||
self.committed_state = state_dict.get('committed_state', {})
|
||||
self._pending_mutations = state_dict.get('_pending_mutations', {})
|
||||
self.parents = state_dict.get('parents', {})
|
||||
self.modified = state_dict.get('modified', False)
|
||||
self.expired = state_dict.get('expired', False)
|
||||
if 'callables' in state_dict:
|
||||
self.callables = state_dict['callables']
|
||||
|
||||
try:
|
||||
self.expired_attributes = state_dict['expired_attributes']
|
||||
except KeyError:
|
||||
self.expired_attributes = set()
|
||||
# 0.9 and earlier compat
|
||||
for k in list(self.callables):
|
||||
if self.callables[k] is self:
|
||||
self.expired_attributes.add(k)
|
||||
del self.callables[k]
|
||||
|
||||
self.__dict__.update([
|
||||
(k, state_dict[k]) for k in (
|
||||
'key', 'load_options',
|
||||
) if k in state_dict
|
||||
])
|
||||
|
||||
if 'load_path' in state_dict:
|
||||
self.load_path = PathRegistry.\
|
||||
deserialize(state_dict['load_path'])
|
||||
|
||||
state_dict['manager'](self, inst, state_dict)
|
||||
|
||||
def _reset(self, dict_, key):
|
||||
"""Remove the given attribute and any
|
||||
callables associated with it."""
|
||||
|
||||
old = dict_.pop(key, None)
|
||||
if old is not None and self.manager[key].impl.collection:
|
||||
self.manager[key].impl._invalidate_collection(old)
|
||||
self.expired_attributes.discard(key)
|
||||
if self.callables:
|
||||
self.callables.pop(key, None)
|
||||
|
||||
@classmethod
|
||||
def _instance_level_callable_processor(cls, manager, fn, key):
|
||||
impl = manager[key].impl
|
||||
if impl.collection:
|
||||
def _set_callable(state, dict_, row):
|
||||
if 'callables' not in state.__dict__:
|
||||
state.callables = {}
|
||||
old = dict_.pop(key, None)
|
||||
if old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
state.callables[key] = fn
|
||||
else:
|
||||
def _set_callable(state, dict_, row):
|
||||
if 'callables' not in state.__dict__:
|
||||
state.callables = {}
|
||||
state.callables[key] = fn
|
||||
return _set_callable
|
||||
|
||||
def _expire(self, dict_, modified_set):
|
||||
self.expired = True
|
||||
|
||||
if self.modified:
|
||||
modified_set.discard(self)
|
||||
self.committed_state.clear()
|
||||
self.modified = False
|
||||
|
||||
self._strong_obj = None
|
||||
|
||||
if '_pending_mutations' in self.__dict__:
|
||||
del self.__dict__['_pending_mutations']
|
||||
|
||||
if 'parents' in self.__dict__:
|
||||
del self.__dict__['parents']
|
||||
|
||||
self.expired_attributes.update(
|
||||
[impl.key for impl in self.manager._scalar_loader_impls
|
||||
if impl.expire_missing or impl.key in dict_]
|
||||
)
|
||||
|
||||
if self.callables:
|
||||
for k in self.expired_attributes.intersection(self.callables):
|
||||
del self.callables[k]
|
||||
|
||||
for k in self.manager._collection_impl_keys.intersection(dict_):
|
||||
collection = dict_.pop(k)
|
||||
collection._sa_adapter.invalidated = True
|
||||
|
||||
for key in self.manager._all_key_set.intersection(dict_):
|
||||
del dict_[key]
|
||||
|
||||
self.manager.dispatch.expire(self, None)
|
||||
|
||||
def _expire_attributes(self, dict_, attribute_names):
|
||||
pending = self.__dict__.get('_pending_mutations', None)
|
||||
|
||||
callables = self.callables
|
||||
|
||||
for key in attribute_names:
|
||||
impl = self.manager[key].impl
|
||||
if impl.accepts_scalar_loader:
|
||||
self.expired_attributes.add(key)
|
||||
if callables and key in callables:
|
||||
del callables[key]
|
||||
old = dict_.pop(key, None)
|
||||
if impl.collection and old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
|
||||
self.committed_state.pop(key, None)
|
||||
if pending:
|
||||
pending.pop(key, None)
|
||||
|
||||
self.manager.dispatch.expire(self, attribute_names)
|
||||
|
||||
def _load_expired(self, state, passive):
|
||||
"""__call__ allows the InstanceState to act as a deferred
|
||||
callable for loading expired attributes, which is also
|
||||
serializable (picklable).
|
||||
|
||||
"""
|
||||
|
||||
if not passive & SQL_OK:
|
||||
return PASSIVE_NO_RESULT
|
||||
|
||||
toload = self.expired_attributes.\
|
||||
intersection(self.unmodified)
|
||||
|
||||
self.manager.deferred_scalar_loader(self, toload)
|
||||
|
||||
# if the loader failed, or this
|
||||
# instance state didn't have an identity,
|
||||
# the attributes still might be in the callables
|
||||
# dict. ensure they are removed.
|
||||
self.expired_attributes.clear()
|
||||
|
||||
return ATTR_WAS_SET
|
||||
|
||||
@property
|
||||
def unmodified(self):
|
||||
"""Return the set of keys which have no uncommitted changes"""
|
||||
|
||||
return set(self.manager).difference(self.committed_state)
|
||||
|
||||
def unmodified_intersection(self, keys):
|
||||
"""Return self.unmodified.intersection(keys)."""
|
||||
|
||||
return set(keys).intersection(self.manager).\
|
||||
difference(self.committed_state)
|
||||
|
||||
@property
|
||||
def unloaded(self):
|
||||
"""Return the set of keys which do not have a loaded value.
|
||||
|
||||
This includes expired attributes and any other attribute that
|
||||
was never populated or modified.
|
||||
|
||||
"""
|
||||
return set(self.manager).\
|
||||
difference(self.committed_state).\
|
||||
difference(self.dict)
|
||||
|
||||
@property
|
||||
def _unloaded_non_object(self):
|
||||
return self.unloaded.intersection(
|
||||
attr for attr in self.manager
|
||||
if self.manager[attr].impl.accepts_scalar_loader
|
||||
)
|
||||
|
||||
def _instance_dict(self):
|
||||
return None
|
||||
|
||||
def _modified_event(
|
||||
self, dict_, attr, previous, collection=False, force=False):
|
||||
if not attr.send_modified_events:
|
||||
return
|
||||
if attr.key not in self.committed_state or force:
|
||||
if collection:
|
||||
if previous is NEVER_SET:
|
||||
if attr.key in dict_:
|
||||
previous = dict_[attr.key]
|
||||
|
||||
if previous not in (None, NO_VALUE, NEVER_SET):
|
||||
previous = attr.copy(previous)
|
||||
|
||||
self.committed_state[attr.key] = previous
|
||||
|
||||
# assert self._strong_obj is None or self.modified
|
||||
|
||||
if (self.session_id and self._strong_obj is None) \
|
||||
or not self.modified:
|
||||
self.modified = True
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict:
|
||||
instance_dict._modified.add(self)
|
||||
|
||||
# only create _strong_obj link if attached
|
||||
# to a session
|
||||
|
||||
inst = self.obj()
|
||||
if self.session_id:
|
||||
self._strong_obj = inst
|
||||
|
||||
if inst is None:
|
||||
raise orm_exc.ObjectDereferencedError(
|
||||
"Can't emit change event for attribute '%s' - "
|
||||
"parent object of type %s has been garbage "
|
||||
"collected."
|
||||
% (
|
||||
self.manager[attr.key],
|
||||
base.state_class_str(self)
|
||||
))
|
||||
|
||||
def _commit(self, dict_, keys):
|
||||
"""Commit attributes.
|
||||
|
||||
This is used by a partial-attribute load operation to mark committed
|
||||
those attributes which were refreshed from the database.
|
||||
|
||||
Attributes marked as "expired" can potentially remain "expired" after
|
||||
this step if a value was not populated in state.dict.
|
||||
|
||||
"""
|
||||
for key in keys:
|
||||
self.committed_state.pop(key, None)
|
||||
|
||||
self.expired = False
|
||||
|
||||
self.expired_attributes.difference_update(
|
||||
set(keys).intersection(dict_))
|
||||
|
||||
# the per-keys commit removes object-level callables,
|
||||
# while that of commit_all does not. it's not clear
|
||||
# if this behavior has a clear rationale, however tests do
|
||||
# ensure this is what it does.
|
||||
if self.callables:
|
||||
for key in set(self.callables).\
|
||||
intersection(keys).\
|
||||
intersection(dict_):
|
||||
del self.callables[key]
|
||||
|
||||
def _commit_all(self, dict_, instance_dict=None):
|
||||
"""commit all attributes unconditionally.
|
||||
|
||||
This is used after a flush() or a full load/refresh
|
||||
to remove all pending state from the instance.
|
||||
|
||||
- all attributes are marked as "committed"
|
||||
- the "strong dirty reference" is removed
|
||||
- the "modified" flag is set to False
|
||||
- any "expired" markers for scalar attributes loaded are removed.
|
||||
- lazy load callables for objects / collections *stay*
|
||||
|
||||
Attributes marked as "expired" can potentially remain
|
||||
"expired" after this step if a value was not populated in state.dict.
|
||||
|
||||
"""
|
||||
self._commit_all_states([(self, dict_)], instance_dict)
|
||||
|
||||
@classmethod
|
||||
def _commit_all_states(self, iter, instance_dict=None):
|
||||
"""Mass / highly inlined version of commit_all()."""
|
||||
|
||||
for state, dict_ in iter:
|
||||
state_dict = state.__dict__
|
||||
|
||||
state.committed_state.clear()
|
||||
|
||||
if '_pending_mutations' in state_dict:
|
||||
del state_dict['_pending_mutations']
|
||||
|
||||
state.expired_attributes.difference_update(dict_)
|
||||
|
||||
if instance_dict and state.modified:
|
||||
instance_dict._modified.discard(state)
|
||||
|
||||
state.modified = state.expired = False
|
||||
state._strong_obj = None
|
||||
|
||||
|
||||
class AttributeState(object):
|
||||
"""Provide an inspection interface corresponding
|
||||
to a particular attribute on a particular mapped object.
|
||||
|
||||
The :class:`.AttributeState` object is accessed
|
||||
via the :attr:`.InstanceState.attrs` collection
|
||||
of a particular :class:`.InstanceState`::
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
insp = inspect(some_mapped_object)
|
||||
attr_state = insp.attrs.some_attribute
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, state, key):
|
||||
self.state = state
|
||||
self.key = key
|
||||
|
||||
@property
|
||||
def loaded_value(self):
|
||||
"""The current value of this attribute as loaded from the database.
|
||||
|
||||
If the value has not been loaded, or is otherwise not present
|
||||
in the object's dictionary, returns NO_VALUE.
|
||||
|
||||
"""
|
||||
return self.state.dict.get(self.key, NO_VALUE)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""Return the value of this attribute.
|
||||
|
||||
This operation is equivalent to accessing the object's
|
||||
attribute directly or via ``getattr()``, and will fire
|
||||
off any pending loader callables if needed.
|
||||
|
||||
"""
|
||||
return self.state.manager[self.key].__get__(
|
||||
self.state.obj(), self.state.class_)
|
||||
|
||||
@property
|
||||
def history(self):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method will **not** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.AttributeState.load_history` - retrieve history
|
||||
using loader callables if the value is not locally present.
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_NO_INITIALIZE)
|
||||
|
||||
def load_history(self):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method **will** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.AttributeState.history`
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_OFF ^ INIT_OK)
|
||||
|
||||
|
||||
class PendingCollection(object):
|
||||
"""A writable placeholder for an unloaded collection.
|
||||
|
||||
Stores items appended to and removed from a collection that has not yet
|
||||
been loaded. When the collection is loaded, the changes stored in
|
||||
PendingCollection are applied to it to produce the final result.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.deleted_items = util.IdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
|
||||
def append(self, value):
|
||||
if value in self.deleted_items:
|
||||
self.deleted_items.remove(value)
|
||||
else:
|
||||
self.added_items.add(value)
|
||||
|
||||
def remove(self, value):
|
||||
if value in self.added_items:
|
||||
self.added_items.remove(value)
|
||||
else:
|
||||
self.deleted_items.add(value)
|
||||
1618
lib/python3.7/site-packages/sqlalchemy/orm/strategies.py
Normal file
1618
lib/python3.7/site-packages/sqlalchemy/orm/strategies.py
Normal file
File diff suppressed because it is too large
Load diff
1056
lib/python3.7/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
1056
lib/python3.7/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
File diff suppressed because it is too large
Load diff
140
lib/python3.7/site-packages/sqlalchemy/orm/sync.py
Normal file
140
lib/python3.7/site-packages/sqlalchemy/orm/sync.py
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
# orm/sync.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""private module containing functions used for copying data
|
||||
between instances based on join conditions.
|
||||
|
||||
"""
|
||||
|
||||
from . import exc, util as orm_util, attributes
|
||||
|
||||
|
||||
def populate(source, source_mapper, dest, dest_mapper,
|
||||
synchronize_pairs, uowcommit, flag_cascaded_pks):
|
||||
source_dict = source.dict
|
||||
dest_dict = dest.dict
|
||||
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
# inline of source_mapper._get_state_attr_by_column
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source.manager[prop.key].impl.get(source, source_dict,
|
||||
attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
|
||||
|
||||
try:
|
||||
# inline of dest_mapper._set_state_attr_by_column
|
||||
prop = dest_mapper._columntoproperty[r]
|
||||
dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, source_mapper, l, dest_mapper, r)
|
||||
|
||||
# technically the "r.primary_key" check isn't
|
||||
# needed here, but we check for this condition to limit
|
||||
# how often this logic is invoked for memory/performance
|
||||
# reasons, since we only need this info for a primary key
|
||||
# destination.
|
||||
if flag_cascaded_pks and l.primary_key and \
|
||||
r.primary_key and \
|
||||
r.references(l):
|
||||
uowcommit.attributes[("pk_cascaded", dest, r)] = True
|
||||
|
||||
|
||||
def bulk_populate_inherit_keys(
|
||||
source_dict, source_mapper, synchronize_pairs):
|
||||
# a simplified version of populate() used by bulk insert mode
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source_dict[prop.key]
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, source_mapper, r)
|
||||
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[r]
|
||||
source_dict[prop.key] = value
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, source_mapper, l, source_mapper, r)
|
||||
|
||||
|
||||
def clear(dest, dest_mapper, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
if r.primary_key and \
|
||||
dest_mapper._get_state_attr_by_column(
|
||||
dest, dest.dict, r) not in orm_util._none_set:
|
||||
|
||||
raise AssertionError(
|
||||
"Dependency rule tried to blank-out primary key "
|
||||
"column '%s' on instance '%s'" %
|
||||
(r, orm_util.state_str(dest))
|
||||
)
|
||||
try:
|
||||
dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, None, l, dest_mapper, r)
|
||||
|
||||
|
||||
def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
oldvalue = source_mapper._get_committed_attr_by_column(
|
||||
source.obj(), l)
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l, passive=attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
dest[r.key] = value
|
||||
dest[old_prefix + r.key] = oldvalue
|
||||
|
||||
|
||||
def populate_dict(source, source_mapper, dict_, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l, passive=attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
|
||||
dict_[r.key] = value
|
||||
|
||||
|
||||
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
|
||||
"""return true if the source object has changes from an old to a
|
||||
new value on the given synchronize pairs
|
||||
|
||||
"""
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
history = uowcommit.get_attribute_history(
|
||||
source, prop.key, attributes.PASSIVE_NO_INITIALIZE)
|
||||
if bool(history.deleted):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _raise_col_to_prop(isdest, source_mapper, source_column,
|
||||
dest_mapper, dest_column):
|
||||
if isdest:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"destination column '%s'; mapper '%s' does not map "
|
||||
"this column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include this column (or use "
|
||||
"a viewonly=True relation)." % (dest_column, dest_mapper))
|
||||
else:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"source column '%s'; mapper '%s' does not map this "
|
||||
"column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include destination column "
|
||||
"'%s' (or use a viewonly=True relation)." %
|
||||
(source_column, source_mapper, dest_column))
|
||||
656
lib/python3.7/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
656
lib/python3.7/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
|
|
@ -0,0 +1,656 @@
|
|||
# orm/unitofwork.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""The internals for the unit of work system.
|
||||
|
||||
The session's flush() process passes objects to a contextual object
|
||||
here, which assembles flush tasks based on mappers and their properties,
|
||||
organizes them in order of dependency, and executes.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util, event
|
||||
from ..util import topological
|
||||
from . import attributes, persistence, util as orm_util
|
||||
import itertools
|
||||
|
||||
|
||||
def track_cascade_events(descriptor, prop):
|
||||
"""Establish event listeners on object attributes which handle
|
||||
cascade-on-set/append.
|
||||
|
||||
"""
|
||||
key = prop.key
|
||||
|
||||
def append(state, item, initiator):
|
||||
# process "save_update" cascade rules for when
|
||||
# an instance is appended to the list of another instance
|
||||
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("collection append")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
item_state = attributes.instance_state(item)
|
||||
if prop._cascade.save_update and \
|
||||
(prop.cascade_backrefs or key == initiator.key) and \
|
||||
not sess._contains_state(item_state):
|
||||
sess._save_or_update_state(item_state)
|
||||
return item
|
||||
|
||||
def remove(state, item, initiator):
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning(
|
||||
"collection remove"
|
||||
if prop.uselist
|
||||
else "related attribute delete")
|
||||
|
||||
# expunge pending orphans
|
||||
item_state = attributes.instance_state(item)
|
||||
if prop._cascade.delete_orphan and \
|
||||
item_state in sess._new and \
|
||||
prop.mapper._is_orphan(item_state):
|
||||
sess.expunge(item)
|
||||
|
||||
def set_(state, newvalue, oldvalue, initiator):
|
||||
# process "save_update" cascade rules for when an instance
|
||||
# is attached to another instance
|
||||
if oldvalue is newvalue:
|
||||
return newvalue
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("related attribute set")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
if newvalue is not None:
|
||||
newvalue_state = attributes.instance_state(newvalue)
|
||||
if prop._cascade.save_update and \
|
||||
(prop.cascade_backrefs or key == initiator.key) and \
|
||||
not sess._contains_state(newvalue_state):
|
||||
sess._save_or_update_state(newvalue_state)
|
||||
|
||||
if oldvalue is not None and \
|
||||
oldvalue is not attributes.NEVER_SET and \
|
||||
oldvalue is not attributes.PASSIVE_NO_RESULT and \
|
||||
prop._cascade.delete_orphan:
|
||||
# possible to reach here with attributes.NEVER_SET ?
|
||||
oldvalue_state = attributes.instance_state(oldvalue)
|
||||
|
||||
if oldvalue_state in sess._new and \
|
||||
prop.mapper._is_orphan(oldvalue_state):
|
||||
sess.expunge(oldvalue)
|
||||
return newvalue
|
||||
|
||||
event.listen(descriptor, 'append', append, raw=True, retval=True)
|
||||
event.listen(descriptor, 'remove', remove, raw=True, retval=True)
|
||||
event.listen(descriptor, 'set', set_, raw=True, retval=True)
|
||||
|
||||
|
||||
class UOWTransaction(object):
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
# dictionary used by external actors to
|
||||
# store arbitrary state information.
|
||||
self.attributes = {}
|
||||
|
||||
# dictionary of mappers to sets of
|
||||
# DependencyProcessors, which are also
|
||||
# set to be part of the sorted flush actions,
|
||||
# which have that mapper as a parent.
|
||||
self.deps = util.defaultdict(set)
|
||||
|
||||
# dictionary of mappers to sets of InstanceState
|
||||
# items pending for flush which have that mapper
|
||||
# as a parent.
|
||||
self.mappers = util.defaultdict(set)
|
||||
|
||||
# a dictionary of Preprocess objects, which gather
|
||||
# additional states impacted by the flush
|
||||
# and determine if a flush action is needed
|
||||
self.presort_actions = {}
|
||||
|
||||
# dictionary of PostSortRec objects, each
|
||||
# one issues work during the flush within
|
||||
# a certain ordering.
|
||||
self.postsort_actions = {}
|
||||
|
||||
# a set of 2-tuples, each containing two
|
||||
# PostSortRec objects where the second
|
||||
# is dependent on the first being executed
|
||||
# first
|
||||
self.dependencies = set()
|
||||
|
||||
# dictionary of InstanceState-> (isdelete, listonly)
|
||||
# tuples, indicating if this state is to be deleted
|
||||
# or insert/updated, or just refreshed
|
||||
self.states = {}
|
||||
|
||||
# tracks InstanceStates which will be receiving
|
||||
# a "post update" call. Keys are mappers,
|
||||
# values are a set of states and a set of the
|
||||
# columns which should be included in the update.
|
||||
self.post_update_states = util.defaultdict(lambda: (set(), set()))
|
||||
|
||||
@property
|
||||
def has_work(self):
|
||||
return bool(self.states)
|
||||
|
||||
def is_deleted(self, state):
|
||||
"""return true if the given state is marked as deleted
|
||||
within this uowtransaction."""
|
||||
|
||||
return state in self.states and self.states[state][0]
|
||||
|
||||
def memo(self, key, callable_):
|
||||
if key in self.attributes:
|
||||
return self.attributes[key]
|
||||
else:
|
||||
self.attributes[key] = ret = callable_()
|
||||
return ret
|
||||
|
||||
def remove_state_actions(self, state):
|
||||
"""remove pending actions for a state from the uowtransaction."""
|
||||
|
||||
isdelete = self.states[state][0]
|
||||
|
||||
self.states[state] = (isdelete, True)
|
||||
|
||||
def get_attribute_history(self, state, key,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE):
|
||||
"""facade to attributes.get_state_history(), including
|
||||
caching of results."""
|
||||
|
||||
hashkey = ("history", state, key)
|
||||
|
||||
# cache the objects, not the states; the strong reference here
|
||||
# prevents newly loaded objects from being dereferenced during the
|
||||
# flush process
|
||||
|
||||
if hashkey in self.attributes:
|
||||
history, state_history, cached_passive = self.attributes[hashkey]
|
||||
# if the cached lookup was "passive" and now
|
||||
# we want non-passive, do a non-passive lookup and re-cache
|
||||
|
||||
if not cached_passive & attributes.SQL_OK \
|
||||
and passive & attributes.SQL_OK:
|
||||
impl = state.manager[key].impl
|
||||
history = impl.get_history(state, state.dict,
|
||||
attributes.PASSIVE_OFF |
|
||||
attributes.LOAD_AGAINST_COMMITTED)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history, passive)
|
||||
else:
|
||||
impl = state.manager[key].impl
|
||||
# TODO: store the history as (state, object) tuples
|
||||
# so we don't have to keep converting here
|
||||
history = impl.get_history(state, state.dict, passive |
|
||||
attributes.LOAD_AGAINST_COMMITTED)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history,
|
||||
passive)
|
||||
|
||||
return state_history
|
||||
|
||||
def has_dep(self, processor):
|
||||
return (processor, True) in self.presort_actions
|
||||
|
||||
def register_preprocessor(self, processor, fromparent):
|
||||
key = (processor, fromparent)
|
||||
if key not in self.presort_actions:
|
||||
self.presort_actions[key] = Preprocess(processor, fromparent)
|
||||
|
||||
def register_object(self, state, isdelete=False,
|
||||
listonly=False, cancel_delete=False,
|
||||
operation=None, prop=None):
|
||||
if not self.session._contains_state(state):
|
||||
if not state.deleted and operation is not None:
|
||||
util.warn("Object of type %s not in session, %s operation "
|
||||
"along '%s' will not proceed" %
|
||||
(orm_util.state_class_str(state), operation, prop))
|
||||
return False
|
||||
|
||||
if state not in self.states:
|
||||
mapper = state.manager.mapper
|
||||
|
||||
if mapper not in self.mappers:
|
||||
self._per_mapper_flush_actions(mapper)
|
||||
|
||||
self.mappers[mapper].add(state)
|
||||
self.states[state] = (isdelete, listonly)
|
||||
else:
|
||||
if not listonly and (isdelete or cancel_delete):
|
||||
self.states[state] = (isdelete, False)
|
||||
return True
|
||||
|
||||
def issue_post_update(self, state, post_update_cols):
|
||||
mapper = state.manager.mapper.base_mapper
|
||||
states, cols = self.post_update_states[mapper]
|
||||
states.add(state)
|
||||
cols.update(post_update_cols)
|
||||
|
||||
def _per_mapper_flush_actions(self, mapper):
|
||||
saves = SaveUpdateAll(self, mapper.base_mapper)
|
||||
deletes = DeleteAll(self, mapper.base_mapper)
|
||||
self.dependencies.add((saves, deletes))
|
||||
|
||||
for dep in mapper._dependency_processors:
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
for prop in mapper.relationships:
|
||||
if prop.viewonly:
|
||||
continue
|
||||
dep = prop._dependency_processor
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
@util.memoized_property
|
||||
def _mapper_for_dep(self):
|
||||
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
|
||||
True or False, indicating if the DependencyProcessor operates
|
||||
on objects of that Mapper.
|
||||
|
||||
The result is stored in the dictionary persistently once
|
||||
calculated.
|
||||
|
||||
"""
|
||||
return util.PopulateDict(
|
||||
lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
|
||||
)
|
||||
|
||||
def filter_states_for_dep(self, dep, states):
|
||||
"""Filter the given list of InstanceStates to those relevant to the
|
||||
given DependencyProcessor.
|
||||
|
||||
"""
|
||||
mapper_for_dep = self._mapper_for_dep
|
||||
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
|
||||
|
||||
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
|
||||
checktup = (isdelete, listonly)
|
||||
for mapper in mapper.base_mapper.self_and_descendants:
|
||||
for state in self.mappers[mapper]:
|
||||
if self.states[state] == checktup:
|
||||
yield state
|
||||
|
||||
def _generate_actions(self):
|
||||
"""Generate the full, unsorted collection of PostSortRecs as
|
||||
well as dependency pairs for this UOWTransaction.
|
||||
|
||||
"""
|
||||
# execute presort_actions, until all states
|
||||
# have been processed. a presort_action might
|
||||
# add new states to the uow.
|
||||
while True:
|
||||
ret = False
|
||||
for action in list(self.presort_actions.values()):
|
||||
if action.execute(self):
|
||||
ret = True
|
||||
if not ret:
|
||||
break
|
||||
|
||||
# see if the graph of mapper dependencies has cycles.
|
||||
self.cycles = cycles = topological.find_cycles(
|
||||
self.dependencies,
|
||||
list(self.postsort_actions.values()))
|
||||
|
||||
if cycles:
|
||||
# if yes, break the per-mapper actions into
|
||||
# per-state actions
|
||||
convert = dict(
|
||||
(rec, set(rec.per_state_flush_actions(self)))
|
||||
for rec in cycles
|
||||
)
|
||||
|
||||
# rewrite the existing dependencies to point to
|
||||
# the per-state actions for those per-mapper actions
|
||||
# that were broken up.
|
||||
for edge in list(self.dependencies):
|
||||
if None in edge or \
|
||||
edge[0].disabled or edge[1].disabled or \
|
||||
cycles.issuperset(edge):
|
||||
self.dependencies.remove(edge)
|
||||
elif edge[0] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[0]]:
|
||||
self.dependencies.add((dep, edge[1]))
|
||||
elif edge[1] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[1]]:
|
||||
self.dependencies.add((edge[0], dep))
|
||||
|
||||
return set([a for a in self.postsort_actions.values()
|
||||
if not a.disabled
|
||||
]
|
||||
).difference(cycles)
|
||||
|
||||
def execute(self):
|
||||
postsort_actions = self._generate_actions()
|
||||
|
||||
# sort = topological.sort(self.dependencies, postsort_actions)
|
||||
# print "--------------"
|
||||
# print "\ndependencies:", self.dependencies
|
||||
# print "\ncycles:", self.cycles
|
||||
# print "\nsort:", list(sort)
|
||||
# print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
|
||||
|
||||
# execute
|
||||
if self.cycles:
|
||||
for set_ in topological.sort_as_subsets(
|
||||
self.dependencies,
|
||||
postsort_actions):
|
||||
while set_:
|
||||
n = set_.pop()
|
||||
n.execute_aggregate(self, set_)
|
||||
else:
|
||||
for rec in topological.sort(
|
||||
self.dependencies,
|
||||
postsort_actions):
|
||||
rec.execute(self)
|
||||
|
||||
def finalize_flush_changes(self):
|
||||
"""mark processed objects as clean / deleted after a successful
|
||||
flush().
|
||||
|
||||
this method is called within the flush() method after the
|
||||
execute() method has succeeded and the transaction has been committed.
|
||||
|
||||
"""
|
||||
if not self.states:
|
||||
return
|
||||
|
||||
states = set(self.states)
|
||||
isdel = set(
|
||||
s for (s, (isdelete, listonly)) in self.states.items()
|
||||
if isdelete
|
||||
)
|
||||
other = states.difference(isdel)
|
||||
if isdel:
|
||||
self.session._remove_newly_deleted(isdel)
|
||||
if other:
|
||||
self.session._register_newly_persistent(other)
|
||||
|
||||
|
||||
class IterateMappersMixin(object):
|
||||
def _mappers(self, uow):
|
||||
if self.fromparent:
|
||||
return iter(
|
||||
m for m in
|
||||
self.dependency_processor.parent.self_and_descendants
|
||||
if uow._mapper_for_dep[(m, self.dependency_processor)]
|
||||
)
|
||||
else:
|
||||
return self.dependency_processor.mapper.self_and_descendants
|
||||
|
||||
|
||||
class Preprocess(IterateMappersMixin):
|
||||
def __init__(self, dependency_processor, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.fromparent = fromparent
|
||||
self.processed = set()
|
||||
self.setup_flush_actions = False
|
||||
|
||||
def execute(self, uow):
|
||||
delete_states = set()
|
||||
save_states = set()
|
||||
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper].difference(self.processed):
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if not listonly:
|
||||
if isdelete:
|
||||
delete_states.add(state)
|
||||
else:
|
||||
save_states.add(state)
|
||||
|
||||
if delete_states:
|
||||
self.dependency_processor.presort_deletes(uow, delete_states)
|
||||
self.processed.update(delete_states)
|
||||
if save_states:
|
||||
self.dependency_processor.presort_saves(uow, save_states)
|
||||
self.processed.update(save_states)
|
||||
|
||||
if (delete_states or save_states):
|
||||
if not self.setup_flush_actions and (
|
||||
self.dependency_processor.
|
||||
prop_has_changes(uow, delete_states, True) or
|
||||
self.dependency_processor.
|
||||
prop_has_changes(uow, save_states, False)
|
||||
):
|
||||
self.dependency_processor.per_property_flush_actions(uow)
|
||||
self.setup_flush_actions = True
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class PostSortRec(object):
|
||||
disabled = False
|
||||
|
||||
def __new__(cls, uow, *args):
|
||||
key = (cls, ) + args
|
||||
if key in uow.postsort_actions:
|
||||
return uow.postsort_actions[key]
|
||||
else:
|
||||
uow.postsort_actions[key] = \
|
||||
ret = \
|
||||
object.__new__(cls)
|
||||
return ret
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
self.execute(uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
",".join(str(x) for x in self.__dict__.values())
|
||||
)
|
||||
|
||||
|
||||
class ProcessAll(IterateMappersMixin, PostSortRec):
|
||||
def __init__(self, uow, dependency_processor, delete, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.delete = delete
|
||||
self.fromparent = fromparent
|
||||
uow.deps[dependency_processor.parent.base_mapper].\
|
||||
add(dependency_processor)
|
||||
|
||||
def execute(self, uow):
|
||||
states = self._elements(uow)
|
||||
if self.delete:
|
||||
self.dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
self.dependency_processor.process_saves(uow, states)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
# this is handled by SaveUpdateAll and DeleteAll,
|
||||
# since a ProcessAll should unconditionally be pulled
|
||||
# into per-state if either the parent/child mappers
|
||||
# are part of a cycle
|
||||
return iter([])
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, delete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
self.delete
|
||||
)
|
||||
|
||||
def _elements(self, uow):
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper]:
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if isdelete == self.delete and not listonly:
|
||||
yield state
|
||||
|
||||
|
||||
class IssuePostUpdate(PostSortRec):
|
||||
def __init__(self, uow, mapper, isdelete):
|
||||
self.mapper = mapper
|
||||
self.isdelete = isdelete
|
||||
|
||||
def execute(self, uow):
|
||||
states, cols = uow.post_update_states[self.mapper]
|
||||
states = [s for s in states if uow.states[s][0] == self.isdelete]
|
||||
|
||||
persistence.post_update(self.mapper, states, uow, cols)
|
||||
|
||||
|
||||
class SaveUpdateAll(PostSortRec):
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
def execute(self, uow):
|
||||
persistence.save_obj(self.mapper,
|
||||
uow.states_for_mapper_hierarchy(
|
||||
self.mapper, False, False),
|
||||
uow
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(uow.states_for_mapper_hierarchy(
|
||||
self.mapper, False, False))
|
||||
base_mapper = self.mapper.base_mapper
|
||||
delete_all = DeleteAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = SaveUpdateState(uow, state, base_mapper)
|
||||
uow.dependencies.add((action, delete_all))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, False)
|
||||
|
||||
|
||||
class DeleteAll(PostSortRec):
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
def execute(self, uow):
|
||||
persistence.delete_obj(self.mapper,
|
||||
uow.states_for_mapper_hierarchy(
|
||||
self.mapper, True, False),
|
||||
uow
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(uow.states_for_mapper_hierarchy(
|
||||
self.mapper, True, False))
|
||||
base_mapper = self.mapper.base_mapper
|
||||
save_all = SaveUpdateAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = DeleteState(uow, state, base_mapper)
|
||||
uow.dependencies.add((save_all, action))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, True)
|
||||
|
||||
|
||||
class ProcessState(PostSortRec):
|
||||
def __init__(self, uow, dependency_processor, delete, state):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.delete = delete
|
||||
self.state = state
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
dependency_processor = self.dependency_processor
|
||||
delete = self.delete
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.dependency_processor is dependency_processor and
|
||||
r.delete is delete]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
if delete:
|
||||
dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
dependency_processor.process_saves(uow, states)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, %s, delete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
orm_util.state_str(self.state),
|
||||
self.delete
|
||||
)
|
||||
|
||||
|
||||
class SaveUpdateState(PostSortRec):
|
||||
def __init__(self, uow, state, mapper):
|
||||
self.state = state
|
||||
self.mapper = mapper
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.mapper is mapper]
|
||||
recs.difference_update(our_recs)
|
||||
persistence.save_obj(mapper,
|
||||
[self.state] +
|
||||
[r.state for r in our_recs],
|
||||
uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state)
|
||||
)
|
||||
|
||||
|
||||
class DeleteState(PostSortRec):
|
||||
def __init__(self, uow, state, mapper):
|
||||
self.state = state
|
||||
self.mapper = mapper
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [r for r in recs
|
||||
if r.__class__ is cls_ and
|
||||
r.mapper is mapper]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
persistence.delete_obj(mapper,
|
||||
[s for s in states if uow.states[s][0]],
|
||||
uow)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state)
|
||||
)
|
||||
1034
lib/python3.7/site-packages/sqlalchemy/orm/util.py
Normal file
1034
lib/python3.7/site-packages/sqlalchemy/orm/util.py
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue