update sqlalchemy
This commit is contained in:
parent
22cbffb8a3
commit
e4bd5b5042
362 changed files with 37677 additions and 11013 deletions
|
|
@ -1,5 +1,5 @@
|
|||
# orm/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -149,7 +149,12 @@ def backref(name, **kwargs):
|
|||
'items':relationship(
|
||||
SomeItem, backref=backref('parent', lazy='subquery'))
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`relationships_backref`
|
||||
|
||||
"""
|
||||
|
||||
return (name, kwargs)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/attributes.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -30,9 +30,8 @@ from .base import state_str, instance_str
|
|||
|
||||
@inspection._self_inspects
|
||||
class QueryableAttribute(interfaces._MappedAttribute,
|
||||
interfaces._InspectionAttr,
|
||||
interfaces.InspectionAttr,
|
||||
interfaces.PropComparator):
|
||||
|
||||
"""Base class for :term:`descriptor` objects that intercept
|
||||
attribute events on behalf of a :class:`.MapperProperty`
|
||||
object. The actual :class:`.MapperProperty` is accessible
|
||||
|
|
@ -212,7 +211,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
|||
|
||||
|
||||
class InstrumentedAttribute(QueryableAttribute):
|
||||
|
||||
"""Class bound instrumented attribute which adds basic
|
||||
:term:`descriptor` methods.
|
||||
|
||||
|
|
@ -250,7 +248,6 @@ def create_proxied_attribute(descriptor):
|
|||
# function is removed from ext/hybrid.py
|
||||
|
||||
class Proxy(QueryableAttribute):
|
||||
|
||||
"""Presents the :class:`.QueryableAttribute` interface as a
|
||||
proxy on top of a Python descriptor / :class:`.PropComparator`
|
||||
combination.
|
||||
|
|
@ -330,7 +327,6 @@ OP_REPLACE = util.symbol("REPLACE")
|
|||
|
||||
|
||||
class Event(object):
|
||||
|
||||
"""A token propagated throughout the course of a chain of attribute
|
||||
events.
|
||||
|
||||
|
|
@ -349,23 +345,26 @@ class Event(object):
|
|||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
:var impl: The :class:`.AttributeImpl` which is the current event
|
||||
initiator.
|
||||
|
||||
impl = None
|
||||
"""The :class:`.AttributeImpl` which is the current event initiator.
|
||||
"""
|
||||
|
||||
op = None
|
||||
"""The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`,
|
||||
indicating the source operation.
|
||||
:var op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or
|
||||
:attr:`.OP_REPLACE`, indicating the source operation.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'impl', 'op', 'parent_token'
|
||||
|
||||
def __init__(self, attribute_impl, op):
|
||||
self.impl = attribute_impl
|
||||
self.op = op
|
||||
self.parent_token = self.impl.parent_token
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Event) and \
|
||||
other.impl is self.impl and \
|
||||
other.op == self.op
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self.impl.key
|
||||
|
|
@ -375,7 +374,6 @@ class Event(object):
|
|||
|
||||
|
||||
class AttributeImpl(object):
|
||||
|
||||
"""internal implementation for instrumented attributes."""
|
||||
|
||||
def __init__(self, class_, key,
|
||||
|
|
@ -455,6 +453,11 @@ class AttributeImpl(object):
|
|||
|
||||
self.expire_missing = expire_missing
|
||||
|
||||
__slots__ = (
|
||||
'class_', 'key', 'callable_', 'dispatch', 'trackparent',
|
||||
'parent_token', 'send_modified_events', 'is_equal', 'expire_missing'
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "%s.%s" % (self.class_.__name__, self.key)
|
||||
|
||||
|
|
@ -524,23 +527,6 @@ class AttributeImpl(object):
|
|||
|
||||
state.parents[id_] = False
|
||||
|
||||
def set_callable(self, state, callable_):
|
||||
"""Set a callable function for this attribute on the given object.
|
||||
|
||||
This callable will be executed when the attribute is next
|
||||
accessed, and is assumed to construct part of the instances
|
||||
previously stored state. When its value or values are loaded,
|
||||
they will be established as part of the instance's *committed
|
||||
state*. While *trackparent* information will be assembled for
|
||||
these instances, attribute-level event handlers will not be
|
||||
fired.
|
||||
|
||||
The callable overrides the class level callable set in the
|
||||
``InstrumentedAttribute`` constructor.
|
||||
|
||||
"""
|
||||
state.callables[self.key] = callable_
|
||||
|
||||
def get_history(self, state, dict_, passive=PASSIVE_OFF):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
|
@ -565,7 +551,11 @@ class AttributeImpl(object):
|
|||
def initialize(self, state, dict_):
|
||||
"""Initialize the given state's attribute with an empty value."""
|
||||
|
||||
dict_[self.key] = None
|
||||
# As of 1.0, we don't actually set a value in
|
||||
# dict_. This is so that the state of the object does not get
|
||||
# modified without emitting the appropriate events.
|
||||
|
||||
|
||||
return None
|
||||
|
||||
def get(self, state, dict_, passive=PASSIVE_OFF):
|
||||
|
|
@ -584,7 +574,9 @@ class AttributeImpl(object):
|
|||
if not passive & CALLABLES_OK:
|
||||
return PASSIVE_NO_RESULT
|
||||
|
||||
if key in state.callables:
|
||||
if key in state.expired_attributes:
|
||||
value = state._load_expired(state, passive)
|
||||
elif key in state.callables:
|
||||
callable_ = state.callables[key]
|
||||
value = callable_(state, passive)
|
||||
elif self.callable_:
|
||||
|
|
@ -632,7 +624,7 @@ class AttributeImpl(object):
|
|||
|
||||
if self.key in state.committed_state:
|
||||
value = state.committed_state[self.key]
|
||||
if value is NO_VALUE:
|
||||
if value in (NO_VALUE, NEVER_SET):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
|
@ -648,7 +640,6 @@ class AttributeImpl(object):
|
|||
|
||||
|
||||
class ScalarAttributeImpl(AttributeImpl):
|
||||
|
||||
"""represents a scalar value-holding InstrumentedAttribute."""
|
||||
|
||||
accepts_scalar_loader = True
|
||||
|
|
@ -656,6 +647,23 @@ class ScalarAttributeImpl(AttributeImpl):
|
|||
supports_population = True
|
||||
collection = False
|
||||
|
||||
__slots__ = '_replace_token', '_append_token', '_remove_token'
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(ScalarAttributeImpl, self).__init__(*arg, **kw)
|
||||
self._replace_token = self._append_token = None
|
||||
self._remove_token = None
|
||||
|
||||
def _init_append_token(self):
|
||||
self._replace_token = self._append_token = Event(self, OP_REPLACE)
|
||||
return self._replace_token
|
||||
|
||||
_init_append_or_replace_token = _init_append_token
|
||||
|
||||
def _init_remove_token(self):
|
||||
self._remove_token = Event(self, OP_REMOVE)
|
||||
return self._remove_token
|
||||
|
||||
def delete(self, state, dict_):
|
||||
|
||||
# TODO: catch key errors, convert to attributeerror?
|
||||
|
|
@ -694,27 +702,18 @@ class ScalarAttributeImpl(AttributeImpl):
|
|||
state._modified_event(dict_, self, old)
|
||||
dict_[self.key] = value
|
||||
|
||||
@util.memoized_property
|
||||
def _replace_token(self):
|
||||
return Event(self, OP_REPLACE)
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return Event(self, OP_REPLACE)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return Event(self, OP_REMOVE)
|
||||
|
||||
def fire_replace_event(self, state, dict_, value, previous, initiator):
|
||||
for fn in self.dispatch.set:
|
||||
value = fn(
|
||||
state, value, previous, initiator or self._replace_token)
|
||||
state, value, previous,
|
||||
initiator or self._replace_token or
|
||||
self._init_append_or_replace_token())
|
||||
return value
|
||||
|
||||
def fire_remove_event(self, state, dict_, value, initiator):
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value,
|
||||
initiator or self._remove_token or self._init_remove_token())
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
|
|
@ -722,7 +721,6 @@ class ScalarAttributeImpl(AttributeImpl):
|
|||
|
||||
|
||||
class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
||||
|
||||
"""represents a scalar-holding InstrumentedAttribute,
|
||||
where the target object is also instrumented.
|
||||
|
||||
|
|
@ -735,9 +733,13 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
|||
supports_population = True
|
||||
collection = False
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def delete(self, state, dict_):
|
||||
old = self.get(state, dict_)
|
||||
self.fire_remove_event(state, dict_, old, self._remove_token)
|
||||
self.fire_remove_event(
|
||||
state, dict_, old,
|
||||
self._remove_token or self._init_remove_token())
|
||||
del dict_[self.key]
|
||||
|
||||
def get_history(self, state, dict_, passive=PASSIVE_OFF):
|
||||
|
|
@ -787,14 +789,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
|||
old = self.get(
|
||||
state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
|
||||
else:
|
||||
# would like to call with PASSIVE_NO_FETCH ^ INIT_OK. However,
|
||||
# we have a long-standing behavior that a "get()" on never set
|
||||
# should implicitly set the value to None. Leaving INIT_OK
|
||||
# set here means we are consistent whether or not we did a get
|
||||
# first.
|
||||
# see test_use_object_set_None vs.
|
||||
# test_use_object_get_first_set_None in test_attributes.py
|
||||
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH)
|
||||
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK)
|
||||
|
||||
if check_old is not None and \
|
||||
old is not PASSIVE_NO_RESULT and \
|
||||
|
|
@ -817,7 +812,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
|||
self.sethasparent(instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value, initiator or
|
||||
self._remove_token or self._init_remove_token())
|
||||
|
||||
state._modified_event(dict_, self, value)
|
||||
|
||||
|
|
@ -829,7 +825,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
|||
|
||||
for fn in self.dispatch.set:
|
||||
value = fn(
|
||||
state, value, previous, initiator or self._replace_token)
|
||||
state, value, previous, initiator or
|
||||
self._replace_token or self._init_append_or_replace_token())
|
||||
|
||||
state._modified_event(dict_, self, previous)
|
||||
|
||||
|
|
@ -841,7 +838,6 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
|||
|
||||
|
||||
class CollectionAttributeImpl(AttributeImpl):
|
||||
|
||||
"""A collection-holding attribute that instruments changes in membership.
|
||||
|
||||
Only handles collections of instrumented objects.
|
||||
|
|
@ -857,6 +853,8 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
supports_population = True
|
||||
collection = True
|
||||
|
||||
__slots__ = 'copy', 'collection_factory', '_append_token', '_remove_token'
|
||||
|
||||
def __init__(self, class_, key, callable_, dispatch,
|
||||
typecallable=None, trackparent=False, extension=None,
|
||||
copy_function=None, compare_function=None, **kwargs):
|
||||
|
|
@ -873,6 +871,26 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
copy_function = self.__copy
|
||||
self.copy = copy_function
|
||||
self.collection_factory = typecallable
|
||||
self._append_token = None
|
||||
self._remove_token = None
|
||||
|
||||
if getattr(self.collection_factory, "_sa_linker", None):
|
||||
|
||||
@event.listens_for(self, "init_collection")
|
||||
def link(target, collection, collection_adapter):
|
||||
collection._sa_linker(collection_adapter)
|
||||
|
||||
@event.listens_for(self, "dispose_collection")
|
||||
def unlink(target, collection, collection_adapter):
|
||||
collection._sa_linker(None)
|
||||
|
||||
def _init_append_token(self):
|
||||
self._append_token = Event(self, OP_APPEND)
|
||||
return self._append_token
|
||||
|
||||
def _init_remove_token(self):
|
||||
self._remove_token = Event(self, OP_REMOVE)
|
||||
return self._remove_token
|
||||
|
||||
def __copy(self, item):
|
||||
return [y for y in collections.collection_adapter(item)]
|
||||
|
|
@ -916,17 +934,11 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
|
||||
return [(instance_state(o), o) for o in current]
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return Event(self, OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return Event(self, OP_REMOVE)
|
||||
|
||||
def fire_append_event(self, state, dict_, value, initiator):
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
value = fn(
|
||||
state, value,
|
||||
initiator or self._append_token or self._init_append_token())
|
||||
|
||||
state._modified_event(dict_, self, NEVER_SET, True)
|
||||
|
||||
|
|
@ -943,7 +955,8 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
self.sethasparent(instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value,
|
||||
initiator or self._remove_token or self._init_remove_token())
|
||||
|
||||
state._modified_event(dict_, self, NEVER_SET, True)
|
||||
|
||||
|
|
@ -966,9 +979,14 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
return user_data
|
||||
|
||||
def _initialize_collection(self, state):
|
||||
return state.manager.initialize_collection(
|
||||
|
||||
adapter, collection = state.manager.initialize_collection(
|
||||
self.key, state, self.collection_factory)
|
||||
|
||||
self.dispatch.init_collection(state, collection, adapter)
|
||||
|
||||
return adapter, collection
|
||||
|
||||
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
|
||||
collection = self.get_collection(state, dict_, passive=passive)
|
||||
if collection is PASSIVE_NO_RESULT:
|
||||
|
|
@ -1037,12 +1055,14 @@ class CollectionAttributeImpl(AttributeImpl):
|
|||
# place a copy of "old" in state.committed_state
|
||||
state._modified_event(dict_, self, old, True)
|
||||
|
||||
old_collection = getattr(old, '_sa_adapter')
|
||||
old_collection = old._sa_adapter
|
||||
|
||||
dict_[self.key] = user_data
|
||||
|
||||
collections.bulk_replace(new_values, old_collection, new_collection)
|
||||
old_collection.unlink(old)
|
||||
|
||||
del old._sa_adapter
|
||||
self.dispatch.dispose_collection(state, old, old_collection)
|
||||
|
||||
def _invalidate_collection(self, collection):
|
||||
adapter = getattr(collection, '_sa_adapter')
|
||||
|
|
@ -1128,7 +1148,8 @@ def backref_listeners(attribute, key, uselist):
|
|||
impl.pop(old_state,
|
||||
old_dict,
|
||||
state.obj(),
|
||||
parent_impl._append_token,
|
||||
parent_impl._append_token or
|
||||
parent_impl._init_append_token(),
|
||||
passive=PASSIVE_NO_FETCH)
|
||||
|
||||
if child is not None:
|
||||
|
|
@ -1208,7 +1229,6 @@ History = util.namedtuple("History", [
|
|||
|
||||
|
||||
class History(History):
|
||||
|
||||
"""A 3-tuple of added, unchanged and deleted values,
|
||||
representing the changes which have occurred on an instrumented
|
||||
attribute.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -144,40 +144,50 @@ _INSTRUMENTOR = ('mapper', 'instrumentor')
|
|||
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
|
||||
EXT_STOP = util.symbol('EXT_STOP')
|
||||
|
||||
ONETOMANY = util.symbol('ONETOMANY',
|
||||
"""Indicates the one-to-many direction for a :func:`.relationship`.
|
||||
ONETOMANY = util.symbol(
|
||||
'ONETOMANY',
|
||||
"""Indicates the one-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
""")
|
||||
|
||||
MANYTOONE = util.symbol('MANYTOONE',
|
||||
"""Indicates the many-to-one direction for a :func:`.relationship`.
|
||||
MANYTOONE = util.symbol(
|
||||
'MANYTOONE',
|
||||
"""Indicates the many-to-one direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
""")
|
||||
|
||||
MANYTOMANY = util.symbol('MANYTOMANY',
|
||||
"""Indicates the many-to-many direction for a :func:`.relationship`.
|
||||
MANYTOMANY = util.symbol(
|
||||
'MANYTOMANY',
|
||||
"""Indicates the many-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
""")
|
||||
|
||||
NOT_EXTENSION = util.symbol('NOT_EXTENSION',
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
NOT_EXTENSION = util.symbol(
|
||||
'NOT_EXTENSION',
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
attibute.
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
""")
|
||||
""")
|
||||
|
||||
_none_set = frozenset([None])
|
||||
_never_set = frozenset([NEVER_SET])
|
||||
|
||||
_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
|
||||
|
||||
_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
|
||||
|
||||
_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
|
||||
|
||||
|
||||
def _generative(*assertions):
|
||||
|
|
@ -319,10 +329,9 @@ def _is_mapped_class(entity):
|
|||
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
hasattr(insp, "mapper") and \
|
||||
not insp.is_clause_element and \
|
||||
(
|
||||
insp.is_mapper
|
||||
or insp.is_aliased_class
|
||||
insp.is_mapper or insp.is_aliased_class
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -419,7 +428,7 @@ def class_mapper(class_, configure=True):
|
|||
return mapper
|
||||
|
||||
|
||||
class _InspectionAttr(object):
|
||||
class InspectionAttr(object):
|
||||
"""A base class applied to all ORM objects that can be returned
|
||||
by the :func:`.inspect` function.
|
||||
|
||||
|
|
@ -433,6 +442,7 @@ class _InspectionAttr(object):
|
|||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of :class:`.Selectable`."""
|
||||
|
|
@ -456,7 +466,7 @@ class _InspectionAttr(object):
|
|||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`._InspectionAttr.extension_type` will refer to a constant
|
||||
The :attr:`.InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
|
@ -485,8 +495,46 @@ class _InspectionAttr(object):
|
|||
"""
|
||||
|
||||
|
||||
class InspectionAttrInfo(InspectionAttr):
|
||||
"""Adds the ``.info`` attribute to :class:`.InspectionAttr`.
|
||||
|
||||
The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo`
|
||||
is that the former is compatible as a mixin for classes that specify
|
||||
``__slots__``; this is essentially an implementation artifact.
|
||||
|
||||
"""
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.InspectionAttr`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
|
||||
available on extension types via the
|
||||
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
|
||||
to a wider variety of ORM and extension constructs.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
||||
class _MappedAttribute(object):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/collections.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -111,6 +111,7 @@ from ..sql import expression
|
|||
from .. import util, exc as sa_exc
|
||||
from . import base
|
||||
|
||||
from sqlalchemy.util.compat import inspect_getargspec
|
||||
|
||||
__all__ = ['collection', 'collection_adapter',
|
||||
'mapped_collection', 'column_mapped_collection',
|
||||
|
|
@ -429,6 +430,10 @@ class collection(object):
|
|||
the instance. A single argument is passed: the collection adapter
|
||||
that has been linked, or None if unlinking.
|
||||
|
||||
.. deprecated:: 1.0.0 - the :meth:`.collection.linker` handler
|
||||
is superseded by the :meth:`.AttributeEvents.init_collection`
|
||||
and :meth:`.AttributeEvents.dispose_collection` handlers.
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'linker'
|
||||
return fn
|
||||
|
|
@ -575,7 +580,7 @@ class CollectionAdapter(object):
|
|||
self._key = attr.key
|
||||
self._data = weakref.ref(data)
|
||||
self.owner_state = owner_state
|
||||
self.link_to_self(data)
|
||||
data._sa_adapter = self
|
||||
|
||||
def _warn_invalidated(self):
|
||||
util.warn("This collection has been invalidated.")
|
||||
|
|
@ -585,24 +590,20 @@ class CollectionAdapter(object):
|
|||
"The entity collection being adapted."
|
||||
return self._data()
|
||||
|
||||
@property
|
||||
def _referenced_by_owner(self):
|
||||
"""return True if the owner state still refers to this collection.
|
||||
|
||||
This will return False within a bulk replace operation,
|
||||
where this collection is the one being replaced.
|
||||
|
||||
"""
|
||||
return self.owner_state.dict[self._key] is self._data()
|
||||
|
||||
@util.memoized_property
|
||||
def attr(self):
|
||||
return self.owner_state.manager[self._key].impl
|
||||
|
||||
def link_to_self(self, data):
|
||||
"""Link a collection to this adapter"""
|
||||
|
||||
data._sa_adapter = self
|
||||
if data._sa_linker:
|
||||
data._sa_linker(self)
|
||||
|
||||
def unlink(self, data):
|
||||
"""Unlink a collection from any adapter"""
|
||||
|
||||
del data._sa_adapter
|
||||
if data._sa_linker:
|
||||
data._sa_linker(None)
|
||||
|
||||
def adapt_like_to_iterable(self, obj):
|
||||
"""Converts collection-compatible objects to an iterable of values.
|
||||
|
||||
|
|
@ -861,11 +862,24 @@ def _instrument_class(cls):
|
|||
"Can not instrument a built-in type. Use a "
|
||||
"subclass, even a trivial one.")
|
||||
|
||||
roles, methods = _locate_roles_and_methods(cls)
|
||||
|
||||
_setup_canned_roles(cls, roles, methods)
|
||||
|
||||
_assert_required_roles(cls, roles, methods)
|
||||
|
||||
_set_collection_attributes(cls, roles, methods)
|
||||
|
||||
|
||||
def _locate_roles_and_methods(cls):
|
||||
"""search for _sa_instrument_role-decorated methods in
|
||||
method resolution order, assign to roles.
|
||||
|
||||
"""
|
||||
|
||||
roles = {}
|
||||
methods = {}
|
||||
|
||||
# search for _sa_instrument_role-decorated methods in
|
||||
# method resolution order, assign to roles
|
||||
for supercls in cls.__mro__:
|
||||
for name, method in vars(supercls).items():
|
||||
if not util.callable(method):
|
||||
|
|
@ -890,14 +904,19 @@ def _instrument_class(cls):
|
|||
assert op in ('fire_append_event', 'fire_remove_event')
|
||||
after = op
|
||||
if before:
|
||||
methods[name] = before[0], before[1], after
|
||||
methods[name] = before + (after, )
|
||||
elif after:
|
||||
methods[name] = None, None, after
|
||||
return roles, methods
|
||||
|
||||
# see if this class has "canned" roles based on a known
|
||||
# collection type (dict, set, list). Apply those roles
|
||||
# as needed to the "roles" dictionary, and also
|
||||
# prepare "decorator" methods
|
||||
|
||||
def _setup_canned_roles(cls, roles, methods):
|
||||
"""see if this class has "canned" roles based on a known
|
||||
collection type (dict, set, list). Apply those roles
|
||||
as needed to the "roles" dictionary, and also
|
||||
prepare "decorator" methods
|
||||
|
||||
"""
|
||||
collection_type = util.duck_type_collection(cls)
|
||||
if collection_type in __interfaces:
|
||||
canned_roles, decorators = __interfaces[collection_type]
|
||||
|
|
@ -911,8 +930,12 @@ def _instrument_class(cls):
|
|||
not hasattr(fn, '_sa_instrumented')):
|
||||
setattr(cls, method, decorator(fn))
|
||||
|
||||
# ensure all roles are present, and apply implicit instrumentation if
|
||||
# needed
|
||||
|
||||
def _assert_required_roles(cls, roles, methods):
|
||||
"""ensure all roles are present, and apply implicit instrumentation if
|
||||
needed
|
||||
|
||||
"""
|
||||
if 'appender' not in roles or not hasattr(cls, roles['appender']):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Type %s must elect an appender method to be "
|
||||
|
|
@ -934,8 +957,12 @@ def _instrument_class(cls):
|
|||
"Type %s must elect an iterator method to be "
|
||||
"a collection class" % cls.__name__)
|
||||
|
||||
# apply ad-hoc instrumentation from decorators, class-level defaults
|
||||
# and implicit role declarations
|
||||
|
||||
def _set_collection_attributes(cls, roles, methods):
|
||||
"""apply ad-hoc instrumentation from decorators, class-level defaults
|
||||
and implicit role declarations
|
||||
|
||||
"""
|
||||
for method_name, (before, argument, after) in methods.items():
|
||||
setattr(cls, method_name,
|
||||
_instrument_membership_mutator(getattr(cls, method_name),
|
||||
|
|
@ -945,8 +972,7 @@ def _instrument_class(cls):
|
|||
setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
|
||||
|
||||
cls._sa_adapter = None
|
||||
if not hasattr(cls, '_sa_linker'):
|
||||
cls._sa_linker = None
|
||||
|
||||
if not hasattr(cls, '_sa_converter'):
|
||||
cls._sa_converter = None
|
||||
cls._sa_instrumented = id(cls)
|
||||
|
|
@ -957,7 +983,7 @@ def _instrument_membership_mutator(method, before, argument, after):
|
|||
adapter."""
|
||||
# This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
|
||||
if before:
|
||||
fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0]))
|
||||
fn_args = list(util.flatten_iterator(inspect_getargspec(method)[0]))
|
||||
if isinstance(argument, int):
|
||||
pos_arg = argument
|
||||
named_arg = len(fn_args) > argument and fn_args[argument] or None
|
||||
|
|
@ -1482,8 +1508,8 @@ class MappedCollection(dict):
|
|||
def __init__(self, keyfunc):
|
||||
"""Create a new collection with keying provided by keyfunc.
|
||||
|
||||
keyfunc may be any callable any callable that takes an object and
|
||||
returns an object for use as a dictionary key.
|
||||
keyfunc may be any callable that takes an object and returns an object
|
||||
for use as a dictionary key.
|
||||
|
||||
The keyfunc will be called every time the ORM needs to add a member by
|
||||
value-only (such as when loading instances from the database) or
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/dependency.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -303,9 +303,9 @@ class DependencyProcessor(object):
|
|||
set
|
||||
)
|
||||
|
||||
def _post_update(self, state, uowcommit, related):
|
||||
def _post_update(self, state, uowcommit, related, is_m2o_delete=False):
|
||||
for x in related:
|
||||
if x is not None:
|
||||
if not is_m2o_delete or x is not None:
|
||||
uowcommit.issue_post_update(
|
||||
state,
|
||||
[r for l, r in self.prop.synchronize_pairs]
|
||||
|
|
@ -740,7 +740,9 @@ class ManyToOneDP(DependencyProcessor):
|
|||
self.key,
|
||||
self._passive_delete_flag)
|
||||
if history:
|
||||
self._post_update(state, uowcommit, history.sum())
|
||||
self._post_update(
|
||||
state, uowcommit, history.sum(),
|
||||
is_m2o_delete=True)
|
||||
|
||||
def process_saves(self, uowcommit, states):
|
||||
for state in states:
|
||||
|
|
@ -1119,6 +1121,7 @@ class ManyToManyDP(DependencyProcessor):
|
|||
if c.key in associationrow
|
||||
]))
|
||||
result = connection.execute(statement, secondary_update)
|
||||
|
||||
if result.supports_sane_multi_rowcount() and \
|
||||
result.rowcount != len(secondary_update):
|
||||
raise exc.StaleDataError(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/deprecated_interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -67,10 +67,6 @@ class MapperExtension(object):
|
|||
(
|
||||
'init_instance',
|
||||
'init_failed',
|
||||
'translate_row',
|
||||
'create_instance',
|
||||
'append_result',
|
||||
'populate_instance',
|
||||
'reconstruct_instance',
|
||||
'before_insert',
|
||||
'after_insert',
|
||||
|
|
@ -156,108 +152,6 @@ class MapperExtension(object):
|
|||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def translate_row(self, mapper, context, row):
|
||||
"""Perform pre-processing on the given result row and return a
|
||||
new row instance.
|
||||
|
||||
This is called when the mapper first receives a row, before
|
||||
the object identity or the instance itself has been derived
|
||||
from that row. The given row may or may not be a
|
||||
``RowProxy`` object - it will always be a dictionary-like
|
||||
object which contains mapped columns as keys. The
|
||||
returned object should also be a dictionary-like object
|
||||
which recognizes mapped columns as keys.
|
||||
|
||||
If the ultimate return value is EXT_CONTINUE, the row
|
||||
is not translated.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def create_instance(self, mapper, selectcontext, row, class_):
|
||||
"""Receive a row when a new object instance is about to be
|
||||
created from that row.
|
||||
|
||||
The method can choose to create the instance itself, or it can return
|
||||
EXT_CONTINUE to indicate normal object creation should take place.
|
||||
|
||||
mapper
|
||||
The mapper doing the operation
|
||||
|
||||
selectcontext
|
||||
The QueryContext generated from the Query.
|
||||
|
||||
row
|
||||
The result row from the database
|
||||
|
||||
class\_
|
||||
The class we are mapping.
|
||||
|
||||
return value
|
||||
A new object instance, or EXT_CONTINUE
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def append_result(self, mapper, selectcontext, row, instance,
|
||||
result, **flags):
|
||||
"""Receive an object instance before that instance is appended
|
||||
to a result list.
|
||||
|
||||
If this method returns EXT_CONTINUE, result appending will proceed
|
||||
normally. if this method returns any other value or None,
|
||||
result appending will not proceed for this instance, giving
|
||||
this extension an opportunity to do the appending itself, if
|
||||
desired.
|
||||
|
||||
mapper
|
||||
The mapper doing the operation.
|
||||
|
||||
selectcontext
|
||||
The QueryContext generated from the Query.
|
||||
|
||||
row
|
||||
The result row from the database.
|
||||
|
||||
instance
|
||||
The object instance to be appended to the result.
|
||||
|
||||
result
|
||||
List to which results are being appended.
|
||||
|
||||
\**flags
|
||||
extra information about the row, same as criterion in
|
||||
``create_row_processor()`` method of
|
||||
:class:`~sqlalchemy.orm.interfaces.MapperProperty`
|
||||
"""
|
||||
|
||||
return EXT_CONTINUE
|
||||
|
||||
def populate_instance(self, mapper, selectcontext, row,
|
||||
instance, **flags):
|
||||
"""Receive an instance before that instance has
|
||||
its attributes populated.
|
||||
|
||||
This usually corresponds to a newly loaded instance but may
|
||||
also correspond to an already-loaded instance which has
|
||||
unloaded attributes to be populated. The method may be called
|
||||
many times for a single instance, as multiple result rows are
|
||||
used to populate eagerly loaded collections.
|
||||
|
||||
If this method returns EXT_CONTINUE, instance population will
|
||||
proceed normally. If any other value or None is returned,
|
||||
instance population will not proceed, giving this extension an
|
||||
opportunity to populate the instance itself, if desired.
|
||||
|
||||
.. deprecated:: 0.5
|
||||
Most usages of this hook are obsolete. For a
|
||||
generic "object has been newly created from a row" hook, use
|
||||
``reconstruct_instance()``, or the ``@orm.reconstructor``
|
||||
decorator.
|
||||
|
||||
"""
|
||||
return EXT_CONTINUE
|
||||
|
||||
def reconstruct_instance(self, mapper, instance):
|
||||
"""Receive an object instance after it has been created via
|
||||
``__new__``, and after initial attribute population has
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/descriptor_props.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -143,6 +143,7 @@ class CompositeProperty(DescriptorProperty):
|
|||
class. **Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
super(CompositeProperty, self).__init__()
|
||||
|
||||
self.attrs = attrs
|
||||
self.composite_class = class_
|
||||
|
|
@ -372,9 +373,9 @@ class CompositeProperty(DescriptorProperty):
|
|||
property.key, *expr)
|
||||
|
||||
def create_row_processor(self, query, procs, labels):
|
||||
def proc(row, result):
|
||||
def proc(row):
|
||||
return self.property.composite_class(
|
||||
*[proc(row, result) for proc in procs])
|
||||
*[proc(row) for proc in procs])
|
||||
return proc
|
||||
|
||||
class Comparator(PropComparator):
|
||||
|
|
@ -471,6 +472,7 @@ class ConcreteInheritedProperty(DescriptorProperty):
|
|||
return comparator_callable
|
||||
|
||||
def __init__(self):
|
||||
super(ConcreteInheritedProperty, self).__init__()
|
||||
def warn():
|
||||
raise AttributeError("Concrete %s does not implement "
|
||||
"attribute %r at the instance level. Add "
|
||||
|
|
@ -496,7 +498,7 @@ class SynonymProperty(DescriptorProperty):
|
|||
|
||||
def __init__(self, name, map_column=None,
|
||||
descriptor=None, comparator_factory=None,
|
||||
doc=None):
|
||||
doc=None, info=None):
|
||||
"""Denote an attribute name as a synonym to a mapped property,
|
||||
in that the attribute will mirror the value and expression behavior
|
||||
of another attribute.
|
||||
|
|
@ -531,6 +533,11 @@ class SynonymProperty(DescriptorProperty):
|
|||
conjunction with the ``descriptor`` argument in order to link a
|
||||
user-defined descriptor as a "wrapper" for an existing column.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.InspectionAttr.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param comparator_factory: A subclass of :class:`.PropComparator`
|
||||
that will provide custom comparison behavior at the SQL expression
|
||||
level.
|
||||
|
|
@ -550,12 +557,15 @@ class SynonymProperty(DescriptorProperty):
|
|||
more complicated attribute-wrapping schemes than synonyms.
|
||||
|
||||
"""
|
||||
super(SynonymProperty, self).__init__()
|
||||
|
||||
self.name = name
|
||||
self.map_column = map_column
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
if info:
|
||||
self.info = info
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
|
|
@ -608,7 +618,8 @@ class SynonymProperty(DescriptorProperty):
|
|||
class ComparableProperty(DescriptorProperty):
|
||||
"""Instruments a Python property for use in query expressions."""
|
||||
|
||||
def __init__(self, comparator_factory, descriptor=None, doc=None):
|
||||
def __init__(
|
||||
self, comparator_factory, descriptor=None, doc=None, info=None):
|
||||
"""Provides a method of applying a :class:`.PropComparator`
|
||||
to any Python descriptor attribute.
|
||||
|
||||
|
|
@ -670,10 +681,18 @@ class ComparableProperty(DescriptorProperty):
|
|||
The like-named descriptor will be automatically retrieved from the
|
||||
mapped class if left blank in a ``properties`` declaration.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.InspectionAttr.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
super(ComparableProperty, self).__init__()
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
if info:
|
||||
self.info = info
|
||||
util.set_creation_order(self)
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/dynamic.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -221,10 +221,8 @@ class AppenderMixin(object):
|
|||
|
||||
mapper = object_mapper(instance)
|
||||
prop = mapper._props[self.attr.key]
|
||||
self._criterion = prop.compare(
|
||||
operators.eq,
|
||||
self._criterion = prop._with_parent(
|
||||
instance,
|
||||
value_is_parent=True,
|
||||
alias_secondary=False)
|
||||
|
||||
if self.attr.order_by:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/evaluator.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
import operator
|
||||
from ..sql import operators
|
||||
from .. import util
|
||||
|
||||
|
||||
class UnevaluatableError(Exception):
|
||||
|
|
@ -27,7 +26,6 @@ _notimplemented_ops = set(getattr(operators, op)
|
|||
|
||||
|
||||
class EvaluatorCompiler(object):
|
||||
|
||||
def __init__(self, target_cls=None):
|
||||
self.target_cls = target_cls
|
||||
|
||||
|
|
@ -55,14 +53,9 @@ class EvaluatorCompiler(object):
|
|||
parentmapper = clause._annotations['parentmapper']
|
||||
if self.target_cls and not issubclass(
|
||||
self.target_cls, parentmapper.class_):
|
||||
util.warn(
|
||||
"Can't do in-Python evaluation of criteria against "
|
||||
"alternate class %s; "
|
||||
"expiration of objects will not be accurate "
|
||||
"and/or may fail. synchronize_session should be set to "
|
||||
"False or 'fetch'. "
|
||||
"This warning will be an exception "
|
||||
"in 1.0." % parentmapper.class_
|
||||
raise UnevaluatableError(
|
||||
"Can't evaluate criteria against alternate class %s" %
|
||||
parentmapper.class_
|
||||
)
|
||||
key = parentmapper._columntoproperty[clause].key
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/events.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -17,7 +17,8 @@ from . import mapperlib, instrumentation
|
|||
from .session import Session, sessionmaker
|
||||
from .scoping import scoped_session
|
||||
from .attributes import QueryableAttribute
|
||||
|
||||
from .query import Query
|
||||
from sqlalchemy.util.compat import inspect_getargspec
|
||||
|
||||
class InstrumentationEvents(event.Events):
|
||||
"""Events related to class instrumentation events.
|
||||
|
|
@ -61,7 +62,8 @@ class InstrumentationEvents(event.Events):
|
|||
@classmethod
|
||||
def _listen(cls, event_key, propagate=True, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
event_key.dispatch_target, event_key.identifier, \
|
||||
event_key._listen_fn
|
||||
|
||||
def listen(target_cls, *arg):
|
||||
listen_cls = target()
|
||||
|
|
@ -192,7 +194,8 @@ class InstanceEvents(event.Events):
|
|||
@classmethod
|
||||
def _listen(cls, event_key, raw=False, propagate=False, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
event_key.dispatch_target, event_key.identifier, \
|
||||
event_key._listen_fn
|
||||
|
||||
if not raw:
|
||||
def wrap(state, *arg, **kw):
|
||||
|
|
@ -214,14 +217,41 @@ class InstanceEvents(event.Events):
|
|||
def first_init(self, manager, cls):
|
||||
"""Called when the first instance of a particular mapping is called.
|
||||
|
||||
This event is called when the ``__init__`` method of a class
|
||||
is called the first time for that particular class. The event
|
||||
invokes before ``__init__`` actually proceeds as well as before
|
||||
the :meth:`.InstanceEvents.init` event is invoked.
|
||||
|
||||
"""
|
||||
|
||||
def init(self, target, args, kwargs):
|
||||
"""Receive an instance when its constructor is called.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
an object, in conjunction with the object's constructor, e.g.
|
||||
its ``__init__`` method. It is not called when an object is
|
||||
loaded from the database; see the :meth:`.InstanceEvents.load`
|
||||
event in order to intercept a database load.
|
||||
|
||||
The event is called before the actual ``__init__`` constructor
|
||||
of the object is called. The ``kwargs`` dictionary may be
|
||||
modified in-place in order to affect what is passed to
|
||||
``__init__``.
|
||||
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param args: positional arguments passed to the ``__init__`` method.
|
||||
This is passed as a tuple and is currently immutable.
|
||||
:param kwargs: keyword arguments passed to the ``__init__`` method.
|
||||
This structure *can* be altered in place.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.InstanceEvents.init_failure`
|
||||
|
||||
:meth:`.InstanceEvents.load`
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -230,8 +260,31 @@ class InstanceEvents(event.Events):
|
|||
and raised an exception.
|
||||
|
||||
This method is only called during a userland construction of
|
||||
an object. It is not called when an object is loaded from the
|
||||
database.
|
||||
an object, in conjunction with the object's constructor, e.g.
|
||||
its ``__init__`` method. It is not called when an object is loaded
|
||||
from the database.
|
||||
|
||||
The event is invoked after an exception raised by the ``__init__``
|
||||
method is caught. After the event
|
||||
is invoked, the original exception is re-raised outwards, so that
|
||||
the construction of the object still raises an exception. The
|
||||
actual exception and stack trace raised should be present in
|
||||
``sys.exc_info()``.
|
||||
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param args: positional arguments that were passed to the ``__init__``
|
||||
method.
|
||||
:param kwargs: keyword arguments that were passed to the ``__init__``
|
||||
method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.InstanceEvents.init`
|
||||
|
||||
:meth:`.InstanceEvents.load`
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -258,22 +311,58 @@ class InstanceEvents(event.Events):
|
|||
``None`` if the load does not correspond to a :class:`.Query`,
|
||||
such as during :meth:`.Session.merge`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.InstanceEvents.init`
|
||||
|
||||
:meth:`.InstanceEvents.refresh`
|
||||
|
||||
"""
|
||||
|
||||
def refresh(self, target, context, attrs):
|
||||
"""Receive an object instance after one or more attributes have
|
||||
been refreshed from a query.
|
||||
|
||||
Contrast this to the :meth:`.InstanceEvents.load` method, which
|
||||
is invoked when the object is first loaded from a query.
|
||||
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param context: the :class:`.QueryContext` corresponding to the
|
||||
current :class:`.Query` in progress.
|
||||
:param attrs: iterable collection of attribute names which
|
||||
:param attrs: sequence of attribute names which
|
||||
were populated, or None if all column-mapped, non-deferred
|
||||
attributes were populated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.InstanceEvents.load`
|
||||
|
||||
"""
|
||||
|
||||
def refresh_flush(self, target, flush_context, attrs):
|
||||
"""Receive an object instance after one or more attributes have
|
||||
been refreshed within the persistence of the object.
|
||||
|
||||
This event is the same as :meth:`.InstanceEvents.refresh` except
|
||||
it is invoked within the unit of work flush process, and the values
|
||||
here typically come from the process of handling an INSERT or
|
||||
UPDATE, such as via the RETURNING clause or from Python-side default
|
||||
values.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param flush_context: Internal :class:`.UOWTransaction` object
|
||||
which handles the details of the flush.
|
||||
:param attrs: sequence of attribute names which
|
||||
were populated.
|
||||
|
||||
"""
|
||||
|
||||
def expire(self, target, attrs):
|
||||
|
|
@ -287,24 +376,12 @@ class InstanceEvents(event.Events):
|
|||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param attrs: iterable collection of attribute
|
||||
:param attrs: sequence of attribute
|
||||
names which were expired, or None if all attributes were
|
||||
expired.
|
||||
|
||||
"""
|
||||
|
||||
def resurrect(self, target):
|
||||
"""Receive an object instance as it is 'resurrected' from
|
||||
garbage collection, which occurs when a "dirty" state falls
|
||||
out of scope.
|
||||
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
|
||||
"""
|
||||
|
||||
def pickle(self, target, state_dict):
|
||||
"""Receive an object instance when its associated state is
|
||||
being pickled.
|
||||
|
|
@ -510,7 +587,8 @@ class MapperEvents(event.Events):
|
|||
def _listen(
|
||||
cls, event_key, raw=False, retval=False, propagate=False, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
event_key.dispatch_target, event_key.identifier, \
|
||||
event_key._listen_fn
|
||||
|
||||
if identifier in ("before_configured", "after_configured") and \
|
||||
target is not mapperlib.Mapper:
|
||||
|
|
@ -524,7 +602,7 @@ class MapperEvents(event.Events):
|
|||
meth = getattr(cls, identifier)
|
||||
try:
|
||||
target_index = \
|
||||
inspect.getargspec(meth)[0].index('target') - 1
|
||||
inspect_getargspec(meth)[0].index('target') - 1
|
||||
except ValueError:
|
||||
target_index = None
|
||||
|
||||
|
|
@ -575,32 +653,67 @@ class MapperEvents(event.Events):
|
|||
"""
|
||||
|
||||
def mapper_configured(self, mapper, class_):
|
||||
"""Called when the mapper for the class is fully configured.
|
||||
"""Called when a specific mapper has completed its own configuration
|
||||
within the scope of the :func:`.configure_mappers` call.
|
||||
|
||||
This event is the latest phase of mapper construction, and
|
||||
is invoked when the mapped classes are first used, so that
|
||||
relationships between mappers can be resolved. When the event is
|
||||
called, the mapper should be in its final state.
|
||||
The :meth:`.MapperEvents.mapper_configured` event is invoked
|
||||
for each mapper that is encountered when the
|
||||
:func:`.orm.configure_mappers` function proceeds through the current
|
||||
list of not-yet-configured mappers.
|
||||
:func:`.orm.configure_mappers` is typically invoked
|
||||
automatically as mappings are first used, as well as each time
|
||||
new mappers have been made available and new mapper use is
|
||||
detected.
|
||||
|
||||
While the configuration event normally occurs automatically,
|
||||
it can be forced to occur ahead of time, in the case where the event
|
||||
is needed before any actual mapper usage, by using the
|
||||
:func:`.configure_mappers` function.
|
||||
When the event is called, the mapper should be in its final
|
||||
state, but **not including backrefs** that may be invoked from
|
||||
other mappers; they might still be pending within the
|
||||
configuration operation. Bidirectional relationships that
|
||||
are instead configured via the
|
||||
:paramref:`.orm.relationship.back_populates` argument
|
||||
*will* be fully available, since this style of relationship does not
|
||||
rely upon other possibly-not-configured mappers to know that they
|
||||
exist.
|
||||
|
||||
For an event that is guaranteed to have **all** mappers ready
|
||||
to go including backrefs that are defined only on other
|
||||
mappings, use the :meth:`.MapperEvents.after_configured`
|
||||
event; this event invokes only after all known mappings have been
|
||||
fully configured.
|
||||
|
||||
The :meth:`.MapperEvents.mapper_configured` event, unlike
|
||||
:meth:`.MapperEvents.before_configured` or
|
||||
:meth:`.MapperEvents.after_configured`,
|
||||
is called for each mapper/class individually, and the mapper is
|
||||
passed to the event itself. It also is called exactly once for
|
||||
a particular mapper. The event is therefore useful for
|
||||
configurational steps that benefit from being invoked just once
|
||||
on a specific mapper basis, which don't require that "backref"
|
||||
configurations are necessarily ready yet.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
:param class\_: the mapped class.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.MapperEvents.before_configured`
|
||||
|
||||
:meth:`.MapperEvents.after_configured`
|
||||
|
||||
"""
|
||||
# TODO: need coverage for this event
|
||||
|
||||
def before_configured(self):
|
||||
"""Called before a series of mappers have been configured.
|
||||
|
||||
This corresponds to the :func:`.orm.configure_mappers` call, which
|
||||
note is usually called automatically as mappings are first
|
||||
used.
|
||||
The :meth:`.MapperEvents.before_configured` event is invoked
|
||||
each time the :func:`.orm.configure_mappers` function is
|
||||
invoked, before the function has done any of its work.
|
||||
:func:`.orm.configure_mappers` is typically invoked
|
||||
automatically as mappings are first used, as well as each time
|
||||
new mappers have been made available and new mapper use is
|
||||
detected.
|
||||
|
||||
This event can **only** be applied to the :class:`.Mapper` class
|
||||
or :func:`.mapper` function, and not to individual mappings or
|
||||
|
|
@ -612,11 +725,16 @@ class MapperEvents(event.Events):
|
|||
def go():
|
||||
# ...
|
||||
|
||||
Constrast this event to :meth:`.MapperEvents.after_configured`,
|
||||
which is invoked after the series of mappers has been configured,
|
||||
as well as :meth:`.MapperEvents.mapper_configured`, which is invoked
|
||||
on a per-mapper basis as each one is configured to the extent possible.
|
||||
|
||||
Theoretically this event is called once per
|
||||
application, but is actually called any time new mappers
|
||||
are to be affected by a :func:`.orm.configure_mappers`
|
||||
call. If new mappings are constructed after existing ones have
|
||||
already been used, this event can be called again. To ensure
|
||||
already been used, this event will likely be called again. To ensure
|
||||
that a particular event is only called once and no further, the
|
||||
``once=True`` argument (new in 0.9.4) can be applied::
|
||||
|
||||
|
|
@ -629,14 +747,33 @@ class MapperEvents(event.Events):
|
|||
|
||||
.. versionadded:: 0.9.3
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.MapperEvents.mapper_configured`
|
||||
|
||||
:meth:`.MapperEvents.after_configured`
|
||||
|
||||
"""
|
||||
|
||||
def after_configured(self):
|
||||
"""Called after a series of mappers have been configured.
|
||||
|
||||
This corresponds to the :func:`.orm.configure_mappers` call, which
|
||||
note is usually called automatically as mappings are first
|
||||
used.
|
||||
The :meth:`.MapperEvents.after_configured` event is invoked
|
||||
each time the :func:`.orm.configure_mappers` function is
|
||||
invoked, after the function has completed its work.
|
||||
:func:`.orm.configure_mappers` is typically invoked
|
||||
automatically as mappings are first used, as well as each time
|
||||
new mappers have been made available and new mapper use is
|
||||
detected.
|
||||
|
||||
Contrast this event to the :meth:`.MapperEvents.mapper_configured`
|
||||
event, which is called on a per-mapper basis while the configuration
|
||||
operation proceeds; unlike that event, when this event is invoked,
|
||||
all cross-configurations (e.g. backrefs) will also have been made
|
||||
available for any mappers that were pending.
|
||||
Also constrast to :meth:`.MapperEvents.before_configured`,
|
||||
which is invoked before the series of mappers has been configured.
|
||||
|
||||
This event can **only** be applied to the :class:`.Mapper` class
|
||||
or :func:`.mapper` function, and not to individual mappings or
|
||||
|
|
@ -652,7 +789,7 @@ class MapperEvents(event.Events):
|
|||
application, but is actually called any time new mappers
|
||||
have been affected by a :func:`.orm.configure_mappers`
|
||||
call. If new mappings are constructed after existing ones have
|
||||
already been used, this event can be called again. To ensure
|
||||
already been used, this event will likely be called again. To ensure
|
||||
that a particular event is only called once and no further, the
|
||||
``once=True`` argument (new in 0.9.4) can be applied::
|
||||
|
||||
|
|
@ -662,144 +799,11 @@ class MapperEvents(event.Events):
|
|||
def go():
|
||||
# ...
|
||||
|
||||
"""
|
||||
.. seealso::
|
||||
|
||||
def translate_row(self, mapper, context, row):
|
||||
"""Perform pre-processing on the given result row and return a
|
||||
new row instance.
|
||||
:meth:`.MapperEvents.mapper_configured`
|
||||
|
||||
.. deprecated:: 0.9 the :meth:`.translate_row` event should
|
||||
be considered as legacy. The row as delivered in a mapper
|
||||
load operation typically requires that highly technical
|
||||
details be accommodated in order to identity the correct
|
||||
column keys are present in the row, rendering this particular
|
||||
event hook as difficult to use and unreliable.
|
||||
|
||||
This listener is typically registered with ``retval=True``.
|
||||
It is called when the mapper first receives a row, before
|
||||
the object identity or the instance itself has been derived
|
||||
from that row. The given row may or may not be a
|
||||
:class:`.RowProxy` object - it will always be a dictionary-like
|
||||
object which contains mapped columns as keys. The
|
||||
returned object should also be a dictionary-like object
|
||||
which recognizes mapped columns as keys.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
:param context: the :class:`.QueryContext`, which includes
|
||||
a handle to the current :class:`.Query` in progress as well
|
||||
as additional state information.
|
||||
:param row: the result row being handled. This may be
|
||||
an actual :class:`.RowProxy` or may be a dictionary containing
|
||||
:class:`.Column` objects as keys.
|
||||
:return: When configured with ``retval=True``, the function
|
||||
should return a dictionary-like row object, or ``EXT_CONTINUE``,
|
||||
indicating the original row should be used.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def create_instance(self, mapper, context, row, class_):
|
||||
"""Receive a row when a new object instance is about to be
|
||||
created from that row.
|
||||
|
||||
.. deprecated:: 0.9 the :meth:`.create_instance` event should
|
||||
be considered as legacy. Manipulation of the object construction
|
||||
mechanics during a load should not be necessary.
|
||||
|
||||
The method can choose to create the instance itself, or it can return
|
||||
EXT_CONTINUE to indicate normal object creation should take place.
|
||||
This listener is typically registered with ``retval=True``.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
:param context: the :class:`.QueryContext`, which includes
|
||||
a handle to the current :class:`.Query` in progress as well
|
||||
as additional state information.
|
||||
:param row: the result row being handled. This may be
|
||||
an actual :class:`.RowProxy` or may be a dictionary containing
|
||||
:class:`.Column` objects as keys.
|
||||
:param class\_: the mapped class.
|
||||
:return: When configured with ``retval=True``, the return value
|
||||
should be a newly created instance of the mapped class,
|
||||
or ``EXT_CONTINUE`` indicating that default object construction
|
||||
should take place.
|
||||
|
||||
"""
|
||||
|
||||
def append_result(self, mapper, context, row, target,
|
||||
result, **flags):
|
||||
"""Receive an object instance before that instance is appended
|
||||
to a result list.
|
||||
|
||||
.. deprecated:: 0.9 the :meth:`.append_result` event should
|
||||
be considered as legacy. It is a difficult to use method
|
||||
whose original purpose is better suited by custom collection
|
||||
classes.
|
||||
|
||||
This is a rarely used hook which can be used to alter
|
||||
the construction of a result list returned by :class:`.Query`.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
:param context: the :class:`.QueryContext`, which includes
|
||||
a handle to the current :class:`.Query` in progress as well
|
||||
as additional state information.
|
||||
:param row: the result row being handled. This may be
|
||||
an actual :class:`.RowProxy` or may be a dictionary containing
|
||||
:class:`.Column` objects as keys.
|
||||
:param target: the mapped instance being populated. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:param result: a list-like object where results are being
|
||||
appended.
|
||||
:param \**flags: Additional state information about the
|
||||
current handling of the row.
|
||||
:return: If this method is registered with ``retval=True``,
|
||||
a return value of ``EXT_STOP`` will prevent the instance
|
||||
from being appended to the given result list, whereas a
|
||||
return value of ``EXT_CONTINUE`` will result in the default
|
||||
behavior of appending the value to the result list.
|
||||
|
||||
"""
|
||||
|
||||
def populate_instance(self, mapper, context, row,
|
||||
target, **flags):
|
||||
"""Receive an instance before that instance has
|
||||
its attributes populated.
|
||||
|
||||
.. deprecated:: 0.9 the :meth:`.populate_instance` event should
|
||||
be considered as legacy. The mechanics of instance population
|
||||
should not need modification; special "on load" rules can as always
|
||||
be accommodated by the :class:`.InstanceEvents.load` event.
|
||||
|
||||
This usually corresponds to a newly loaded instance but may
|
||||
also correspond to an already-loaded instance which has
|
||||
unloaded attributes to be populated. The method may be called
|
||||
many times for a single instance, as multiple result rows are
|
||||
used to populate eagerly loaded collections.
|
||||
|
||||
Most usages of this hook are obsolete. For a
|
||||
generic "object has been newly created from a row" hook, use
|
||||
:meth:`.InstanceEvents.load`.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
:param context: the :class:`.QueryContext`, which includes
|
||||
a handle to the current :class:`.Query` in progress as well
|
||||
as additional state information.
|
||||
:param row: the result row being handled. This may be
|
||||
an actual :class:`.RowProxy` or may be a dictionary containing
|
||||
:class:`.Column` objects as keys.
|
||||
:param target: the mapped instance. If
|
||||
the event is configured with ``raw=True``, this will
|
||||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:return: When configured with ``retval=True``, a return
|
||||
value of ``EXT_STOP`` will bypass instance population by
|
||||
the mapper. A value of ``EXT_CONTINUE`` indicates that
|
||||
default instance population should take place.
|
||||
:meth:`.MapperEvents.before_configured`
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -822,30 +826,14 @@ class MapperEvents(event.Events):
|
|||
steps.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given**
|
||||
:class:`.Connection` **only.** Handlers here should **not** make
|
||||
alterations to the state of the :class:`.Session` overall, and
|
||||
in general should not affect any :func:`.relationship` -mapped
|
||||
attributes, as session cascade rules will not function properly,
|
||||
nor is it always known if the related class has already been
|
||||
handled. Operations that **are not supported in mapper
|
||||
events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself, or
|
||||
another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -859,6 +847,10 @@ class MapperEvents(event.Events):
|
|||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_insert(self, mapper, connection, target):
|
||||
|
|
@ -880,30 +872,14 @@ class MapperEvents(event.Events):
|
|||
event->persist->event steps.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given**
|
||||
:class:`.Connection` **only.** Handlers here should **not** make
|
||||
alterations to the state of the :class:`.Session` overall, and in
|
||||
general should not affect any :func:`.relationship` -mapped
|
||||
attributes, as session cascade rules will not function properly,
|
||||
nor is it always known if the related class has already been
|
||||
handled. Operations that **are not supported in mapper
|
||||
events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself,
|
||||
or another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -917,6 +893,10 @@ class MapperEvents(event.Events):
|
|||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def before_update(self, mapper, connection, target):
|
||||
|
|
@ -957,29 +937,14 @@ class MapperEvents(event.Events):
|
|||
steps.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given** :class:`.Connection`
|
||||
**only.** Handlers here should **not** make alterations to the
|
||||
state of the :class:`.Session` overall, and in general should not
|
||||
affect any :func:`.relationship` -mapped attributes, as
|
||||
session cascade rules will not function properly, nor is it
|
||||
always known if the related class has already been handled.
|
||||
Operations that **are not supported in mapper events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself,
|
||||
or another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -992,6 +957,11 @@ class MapperEvents(event.Events):
|
|||
instead be the :class:`.InstanceState` state-management
|
||||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_update(self, mapper, connection, target):
|
||||
|
|
@ -1031,29 +1001,14 @@ class MapperEvents(event.Events):
|
|||
steps.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given** :class:`.Connection`
|
||||
**only.** Handlers here should **not** make alterations to the
|
||||
state of the :class:`.Session` overall, and in general should not
|
||||
affect any :func:`.relationship` -mapped attributes, as
|
||||
session cascade rules will not function properly, nor is it
|
||||
always known if the related class has already been handled.
|
||||
Operations that **are not supported in mapper events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself,
|
||||
or another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -1067,6 +1022,10 @@ class MapperEvents(event.Events):
|
|||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def before_delete(self, mapper, connection, target):
|
||||
|
|
@ -1082,29 +1041,14 @@ class MapperEvents(event.Events):
|
|||
once in a later step.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given** :class:`.Connection`
|
||||
**only.** Handlers here should **not** make alterations to the
|
||||
state of the :class:`.Session` overall, and in general should not
|
||||
affect any :func:`.relationship` -mapped attributes, as
|
||||
session cascade rules will not function properly, nor is it
|
||||
always known if the related class has already been handled.
|
||||
Operations that **are not supported in mapper events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself,
|
||||
or another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -1118,6 +1062,10 @@ class MapperEvents(event.Events):
|
|||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_delete(self, mapper, connection, target):
|
||||
|
|
@ -1133,29 +1081,14 @@ class MapperEvents(event.Events):
|
|||
once in a previous step.
|
||||
|
||||
.. warning::
|
||||
Mapper-level flush events are designed to operate **on attributes
|
||||
local to the immediate object being handled
|
||||
and via SQL operations with the given** :class:`.Connection`
|
||||
**only.** Handlers here should **not** make alterations to the
|
||||
state of the :class:`.Session` overall, and in general should not
|
||||
affect any :func:`.relationship` -mapped attributes, as
|
||||
session cascade rules will not function properly, nor is it
|
||||
always known if the related class has already been handled.
|
||||
Operations that **are not supported in mapper events** include:
|
||||
|
||||
* :meth:`.Session.add`
|
||||
* :meth:`.Session.delete`
|
||||
* Mapped collection append, add, remove, delete, discard, etc.
|
||||
* Mapped relationship attribute set/del events,
|
||||
i.e. ``someobject.related = someotherobject``
|
||||
|
||||
Operations which manipulate the state of the object
|
||||
relative to other objects are better handled:
|
||||
|
||||
* In the ``__init__()`` method of the mapped object itself,
|
||||
or another method designed to establish some particular state.
|
||||
* In a ``@validates`` handler, see :ref:`simple_validators`
|
||||
* Within the :meth:`.SessionEvents.before_flush` event.
|
||||
Mapper-level flush events only allow **very limited operations**,
|
||||
on attributes local to the row being operated upon only,
|
||||
as well as allowing any SQL to be emitted on the given
|
||||
:class:`.Connection`. **Please read fully** the notes
|
||||
at :ref:`session_persistence_mapper` for guidelines on using
|
||||
these methods; generally, the :meth:`.SessionEvents.before_flush`
|
||||
method should be preferred for general on-flush changes.
|
||||
|
||||
:param mapper: the :class:`.Mapper` which is the target
|
||||
of this event.
|
||||
|
|
@ -1169,6 +1102,10 @@ class MapperEvents(event.Events):
|
|||
object associated with the instance.
|
||||
:return: No return value is supported by this event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
|
|
@ -1409,6 +1346,8 @@ class SessionEvents(event.Events):
|
|||
|
||||
:meth:`~.SessionEvents.after_flush_postexec`
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_flush(self, session, flush_context):
|
||||
|
|
@ -1429,6 +1368,8 @@ class SessionEvents(event.Events):
|
|||
|
||||
:meth:`~.SessionEvents.after_flush_postexec`
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_flush_postexec(self, session, flush_context):
|
||||
|
|
@ -1451,6 +1392,8 @@ class SessionEvents(event.Events):
|
|||
|
||||
:meth:`~.SessionEvents.after_flush`
|
||||
|
||||
:ref:`session_persistence_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_begin(self, session, transaction, connection):
|
||||
|
|
@ -1488,6 +1431,8 @@ class SessionEvents(event.Events):
|
|||
|
||||
:meth:`~.SessionEvents.after_attach`
|
||||
|
||||
:ref:`session_lifecycle_events`
|
||||
|
||||
"""
|
||||
|
||||
def after_attach(self, session, instance):
|
||||
|
|
@ -1510,6 +1455,8 @@ class SessionEvents(event.Events):
|
|||
|
||||
:meth:`~.SessionEvents.before_attach`
|
||||
|
||||
:ref:`session_lifecycle_events`
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature("0.9",
|
||||
|
|
@ -1627,8 +1574,9 @@ class AttributeEvents(event.Events):
|
|||
|
||||
@staticmethod
|
||||
def _set_dispatch(cls, dispatch_cls):
|
||||
event.Events._set_dispatch(cls, dispatch_cls)
|
||||
dispatch = event.Events._set_dispatch(cls, dispatch_cls)
|
||||
dispatch_cls._active_history = False
|
||||
return dispatch
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
|
|
@ -1644,7 +1592,8 @@ class AttributeEvents(event.Events):
|
|||
propagate=False):
|
||||
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
event_key.dispatch_target, event_key.identifier, \
|
||||
event_key._listen_fn
|
||||
|
||||
if active_history:
|
||||
target.dispatch._active_history = True
|
||||
|
|
@ -1744,3 +1693,109 @@ class AttributeEvents(event.Events):
|
|||
the given value, or a new effective value, should be returned.
|
||||
|
||||
"""
|
||||
|
||||
def init_collection(self, target, collection, collection_adapter):
|
||||
"""Receive a 'collection init' event.
|
||||
|
||||
This event is triggered for a collection-based attribute, when
|
||||
the initial "empty collection" is first generated for a blank
|
||||
attribute, as well as for when the collection is replaced with
|
||||
a new one, such as via a set event.
|
||||
|
||||
E.g., given that ``User.addresses`` is a relationship-based
|
||||
collection, the event is triggered here::
|
||||
|
||||
u1 = User()
|
||||
u1.addresses.append(a1) # <- new collection
|
||||
|
||||
and also during replace operations::
|
||||
|
||||
u1.addresses = [a2, a3] # <- new collection
|
||||
|
||||
:param target: the object instance receiving the event.
|
||||
If the listener is registered with ``raw=True``, this will
|
||||
be the :class:`.InstanceState` object.
|
||||
:param collection: the new collection. This will always be generated
|
||||
from what was specified as
|
||||
:paramref:`.RelationshipProperty.collection_class`, and will always
|
||||
be empty.
|
||||
:param collection_adpater: the :class:`.CollectionAdapter` that will
|
||||
mediate internal access to the collection.
|
||||
|
||||
.. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection`
|
||||
and :meth:`.AttributeEvents.dispose_collection` events supersede
|
||||
the :class:`.collection.linker` hook.
|
||||
|
||||
"""
|
||||
|
||||
def dispose_collection(self, target, collection, collection_adpater):
|
||||
"""Receive a 'collection dispose' event.
|
||||
|
||||
This event is triggered for a collection-based attribute when
|
||||
a collection is replaced, that is::
|
||||
|
||||
u1.addresses.append(a1)
|
||||
|
||||
u1.addresses = [a2, a3] # <- old collection is disposed
|
||||
|
||||
The mechanics of the event will typically include that the given
|
||||
collection is empty, even if it stored objects while being replaced.
|
||||
|
||||
.. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection`
|
||||
and :meth:`.AttributeEvents.dispose_collection` events supersede
|
||||
the :class:`.collection.linker` hook.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class QueryEvents(event.Events):
|
||||
"""Represent events within the construction of a :class:`.Query` object.
|
||||
|
||||
The events here are intended to be used with an as-yet-unreleased
|
||||
inspection system for :class:`.Query`. Some very basic operations
|
||||
are possible now, however the inspection system is intended to allow
|
||||
complex query manipulations to be automated.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeQuery"
|
||||
_dispatch_target = Query
|
||||
|
||||
def before_compile(self, query):
|
||||
"""Receive the :class:`.Query` object before it is composed into a
|
||||
core :class:`.Select` object.
|
||||
|
||||
This event is intended to allow changes to the query given::
|
||||
|
||||
@event.listens_for(Query, "before_compile", retval=True)
|
||||
def no_deleted(query):
|
||||
for desc in query.column_descriptions:
|
||||
if desc['type'] is User:
|
||||
entity = desc['entity']
|
||||
query = query.filter(entity.deleted == False)
|
||||
return query
|
||||
|
||||
The event should normally be listened with the ``retval=True``
|
||||
parameter set, so that the modified query may be returned.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _listen(
|
||||
cls, event_key, retval=False, **kw):
|
||||
fn = event_key._listen_fn
|
||||
|
||||
if not retval:
|
||||
def wrap(*arg, **kw):
|
||||
if not retval:
|
||||
query = arg[0]
|
||||
fn(*arg, **kw)
|
||||
return query
|
||||
else:
|
||||
return fn(*arg, **kw)
|
||||
event_key = event_key.with_wrapper(wrap)
|
||||
|
||||
event_key.base_listen(**kw)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/exc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/identity.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -10,18 +10,26 @@ from . import attributes
|
|||
from .. import util
|
||||
|
||||
|
||||
class IdentityMap(dict):
|
||||
|
||||
class IdentityMap(object):
|
||||
def __init__(self):
|
||||
self._dict = {}
|
||||
self._modified = set()
|
||||
self._wr = weakref.ref(self)
|
||||
|
||||
def keys(self):
|
||||
return self._dict.keys()
|
||||
|
||||
def replace(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def add(self, state):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
"""optional inlined form of add() which can assume item isn't present
|
||||
in the map"""
|
||||
self.add(state)
|
||||
|
||||
def update(self, dict):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
|
|
@ -36,7 +44,8 @@ class IdentityMap(dict):
|
|||
|
||||
def _manage_removed_state(self, state):
|
||||
del state._instance_dict
|
||||
self._modified.discard(state)
|
||||
if state.modified:
|
||||
self._modified.discard(state)
|
||||
|
||||
def _dirty_states(self):
|
||||
return self._modified
|
||||
|
|
@ -60,6 +69,9 @@ class IdentityMap(dict):
|
|||
def setdefault(self, key, default=None):
|
||||
raise NotImplementedError("IdentityMap uses add() to insert data")
|
||||
|
||||
def __len__(self):
|
||||
return len(self._dict)
|
||||
|
||||
def copy(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
|
@ -72,11 +84,8 @@ class IdentityMap(dict):
|
|||
|
||||
class WeakInstanceDict(IdentityMap):
|
||||
|
||||
def __init__(self):
|
||||
IdentityMap.__init__(self)
|
||||
|
||||
def __getitem__(self, key):
|
||||
state = dict.__getitem__(self, key)
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
raise KeyError(key)
|
||||
|
|
@ -84,8 +93,8 @@ class WeakInstanceDict(IdentityMap):
|
|||
|
||||
def __contains__(self, key):
|
||||
try:
|
||||
if dict.__contains__(self, key):
|
||||
state = dict.__getitem__(self, key)
|
||||
if key in self._dict:
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
else:
|
||||
return False
|
||||
|
|
@ -95,25 +104,25 @@ class WeakInstanceDict(IdentityMap):
|
|||
return o is not None
|
||||
|
||||
def contains_state(self, state):
|
||||
return dict.get(self, state.key) is state
|
||||
return state.key in self._dict and self._dict[state.key] is state
|
||||
|
||||
def replace(self, state):
|
||||
if dict.__contains__(self, state.key):
|
||||
existing = dict.__getitem__(self, state.key)
|
||||
if state.key in self._dict:
|
||||
existing = self._dict[state.key]
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
dict.__setitem__(self, state.key, state)
|
||||
self._dict[state.key] = state
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
key = state.key
|
||||
# inline of self.__contains__
|
||||
if dict.__contains__(self, key):
|
||||
if key in self._dict:
|
||||
try:
|
||||
existing_state = dict.__getitem__(self, key)
|
||||
existing_state = self._dict[key]
|
||||
if existing_state is not state:
|
||||
o = existing_state.obj()
|
||||
if o is not None:
|
||||
|
|
@ -125,19 +134,24 @@ class WeakInstanceDict(IdentityMap):
|
|||
return
|
||||
except KeyError:
|
||||
pass
|
||||
dict.__setitem__(self, key, state)
|
||||
self._dict[key] = state
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
# inlined form of add() called by loading.py
|
||||
self._dict[key] = state
|
||||
state._instance_dict = self._wr
|
||||
|
||||
def get(self, key, default=None):
|
||||
state = dict.get(self, key, default)
|
||||
if state is default:
|
||||
if key not in self._dict:
|
||||
return default
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
return default
|
||||
return o
|
||||
|
||||
def _items(self):
|
||||
def items(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
|
|
@ -146,7 +160,7 @@ class WeakInstanceDict(IdentityMap):
|
|||
result.append((state.key, value))
|
||||
return result
|
||||
|
||||
def _values(self):
|
||||
def values(self):
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
|
|
@ -156,39 +170,80 @@ class WeakInstanceDict(IdentityMap):
|
|||
|
||||
return result
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
if util.py2k:
|
||||
items = _items
|
||||
values = _values
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def itervalues(self):
|
||||
return iter(self.values())
|
||||
else:
|
||||
def items(self):
|
||||
return iter(self._items())
|
||||
|
||||
def values(self):
|
||||
return iter(self._values())
|
||||
|
||||
def all_states(self):
|
||||
if util.py2k:
|
||||
return dict.values(self)
|
||||
return self._dict.values()
|
||||
else:
|
||||
return list(dict.values(self))
|
||||
return list(self._dict.values())
|
||||
|
||||
def _fast_discard(self, state):
|
||||
self._dict.pop(state.key, None)
|
||||
|
||||
def discard(self, state):
|
||||
st = dict.get(self, state.key, None)
|
||||
if st is state:
|
||||
dict.pop(self, state.key, None)
|
||||
st = self._dict.pop(state.key, None)
|
||||
if st:
|
||||
assert st is state
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def safe_discard(self, state):
|
||||
if state.key in self._dict:
|
||||
st = self._dict[state.key]
|
||||
if st is state:
|
||||
self._dict.pop(state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
return 0
|
||||
|
||||
|
||||
class StrongInstanceDict(IdentityMap):
|
||||
"""A 'strong-referencing' version of the identity map.
|
||||
|
||||
.. deprecated:: this object is present in order to fulfill
|
||||
the ``weak_identity_map=False`` option of the Session.
|
||||
This option is present to allow compatibility with older applications,
|
||||
but it is recommended that strong references to objects
|
||||
be maintained by the calling application
|
||||
externally to the :class:`.Session` itself, to the degree
|
||||
that is needed by the application.
|
||||
|
||||
"""
|
||||
|
||||
if util.py2k:
|
||||
def itervalues(self):
|
||||
return self._dict.itervalues()
|
||||
|
||||
def iteritems(self):
|
||||
return self._dict.iteritems()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.dict_)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dict[key]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._dict
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._dict.get(key, default)
|
||||
|
||||
def values(self):
|
||||
return self._dict.values()
|
||||
|
||||
def items(self):
|
||||
return self._dict.items()
|
||||
|
||||
def all_states(self):
|
||||
return [attributes.instance_state(o) for o in self.values()]
|
||||
|
|
@ -199,36 +254,48 @@ class StrongInstanceDict(IdentityMap):
|
|||
attributes.instance_state(self[state.key]) is state)
|
||||
|
||||
def replace(self, state):
|
||||
if dict.__contains__(self, state.key):
|
||||
existing = dict.__getitem__(self, state.key)
|
||||
if state.key in self._dict:
|
||||
existing = self._dict[state.key]
|
||||
existing = attributes.instance_state(existing)
|
||||
if existing is not state:
|
||||
self._manage_removed_state(existing)
|
||||
else:
|
||||
return
|
||||
|
||||
dict.__setitem__(self, state.key, state.obj())
|
||||
self._dict[state.key] = state.obj()
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def add(self, state):
|
||||
if state.key in self:
|
||||
if attributes.instance_state(
|
||||
dict.__getitem__(
|
||||
self,
|
||||
state.key)) is not state:
|
||||
if attributes.instance_state(self._dict[state.key]) is not state:
|
||||
raise AssertionError('A conflicting state is already '
|
||||
'present in the identity map for key %r'
|
||||
% (state.key, ))
|
||||
else:
|
||||
dict.__setitem__(self, state.key, state.obj())
|
||||
self._dict[state.key] = state.obj()
|
||||
self._manage_incoming_state(state)
|
||||
|
||||
def _add_unpresent(self, state, key):
|
||||
# inlined form of add() called by loading.py
|
||||
self._dict[key] = state.obj()
|
||||
state._instance_dict = self._wr
|
||||
|
||||
def _fast_discard(self, state):
|
||||
self._dict.pop(state.key, None)
|
||||
|
||||
def discard(self, state):
|
||||
obj = dict.get(self, state.key, None)
|
||||
obj = self._dict.pop(state.key, None)
|
||||
if obj is not None:
|
||||
self._manage_removed_state(state)
|
||||
st = attributes.instance_state(obj)
|
||||
assert st is state
|
||||
|
||||
def safe_discard(self, state):
|
||||
if state.key in self._dict:
|
||||
obj = self._dict[state.key]
|
||||
st = attributes.instance_state(obj)
|
||||
if st is state:
|
||||
dict.pop(self, state.key, None)
|
||||
self._dict.pop(state.key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
def prune(self):
|
||||
|
|
@ -241,7 +308,7 @@ class StrongInstanceDict(IdentityMap):
|
|||
keepers = weakref.WeakValueDictionary()
|
||||
keepers.update(self)
|
||||
|
||||
dict.clear(self)
|
||||
dict.update(self, keepers)
|
||||
self._dict.clear()
|
||||
self._dict.update(keepers)
|
||||
self.modified = bool(dirty)
|
||||
return ref_count - len(self)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/instrumentation.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -35,13 +35,17 @@ from .. import util
|
|||
from . import base
|
||||
|
||||
|
||||
class ClassManager(dict):
|
||||
_memoized_key_collection = util.group_expirable_memoized_property()
|
||||
|
||||
|
||||
class ClassManager(dict):
|
||||
"""tracks state information at the class level."""
|
||||
|
||||
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
|
||||
STATE_ATTR = base.DEFAULT_STATE_ATTR
|
||||
|
||||
_state_setter = staticmethod(util.attrsetter(STATE_ATTR))
|
||||
|
||||
deferred_scalar_loader = None
|
||||
|
||||
original_init = object.__init__
|
||||
|
|
@ -91,6 +95,21 @@ class ClassManager(dict):
|
|||
def is_mapped(self):
|
||||
return 'mapper' in self.__dict__
|
||||
|
||||
@_memoized_key_collection
|
||||
def _all_key_set(self):
|
||||
return frozenset(self)
|
||||
|
||||
@_memoized_key_collection
|
||||
def _collection_impl_keys(self):
|
||||
return frozenset([
|
||||
attr.key for attr in self.values() if attr.impl.collection])
|
||||
|
||||
@_memoized_key_collection
|
||||
def _scalar_loader_impls(self):
|
||||
return frozenset([
|
||||
attr.impl for attr in
|
||||
self.values() if attr.impl.accepts_scalar_loader])
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self):
|
||||
# raises unless self.mapper has been assigned
|
||||
|
|
@ -98,7 +117,7 @@ class ClassManager(dict):
|
|||
|
||||
def _all_sqla_attributes(self, exclude=None):
|
||||
"""return an iterator of all classbound attributes that are
|
||||
implement :class:`._InspectionAttr`.
|
||||
implement :class:`.InspectionAttr`.
|
||||
|
||||
This includes :class:`.QueryableAttribute` as well as extension
|
||||
types such as :class:`.hybrid_property` and
|
||||
|
|
@ -111,7 +130,7 @@ class ClassManager(dict):
|
|||
for key in set(supercls.__dict__).difference(exclude):
|
||||
exclude.add(key)
|
||||
val = supercls.__dict__[key]
|
||||
if isinstance(val, interfaces._InspectionAttr):
|
||||
if isinstance(val, interfaces.InspectionAttr):
|
||||
yield key, val
|
||||
|
||||
def _attr_has_impl(self, key):
|
||||
|
|
@ -194,6 +213,7 @@ class ClassManager(dict):
|
|||
else:
|
||||
self.local_attrs[key] = inst
|
||||
self.install_descriptor(key, inst)
|
||||
_memoized_key_collection.expire_instance(self)
|
||||
self[key] = inst
|
||||
|
||||
for cls in self.class_.__subclasses__():
|
||||
|
|
@ -222,6 +242,7 @@ class ClassManager(dict):
|
|||
else:
|
||||
del self.local_attrs[key]
|
||||
self.uninstall_descriptor(key)
|
||||
_memoized_key_collection.expire_instance(self)
|
||||
del self[key]
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = manager_of_class(cls)
|
||||
|
|
@ -289,13 +310,15 @@ class ClassManager(dict):
|
|||
|
||||
def new_instance(self, state=None):
|
||||
instance = self.class_.__new__(self.class_)
|
||||
setattr(instance, self.STATE_ATTR,
|
||||
state or self._state_constructor(instance, self))
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
return instance
|
||||
|
||||
def setup_instance(self, instance, state=None):
|
||||
setattr(instance, self.STATE_ATTR,
|
||||
state or self._state_constructor(instance, self))
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
|
||||
def teardown_instance(self, instance):
|
||||
delattr(instance, self.STATE_ATTR)
|
||||
|
|
@ -322,7 +345,7 @@ class ClassManager(dict):
|
|||
_new_state_if_none(instance)
|
||||
else:
|
||||
state = self._state_constructor(instance, self)
|
||||
setattr(instance, self.STATE_ATTR, state)
|
||||
self._state_setter(instance, state)
|
||||
return state
|
||||
|
||||
def has_state(self, instance):
|
||||
|
|
@ -344,7 +367,6 @@ class ClassManager(dict):
|
|||
|
||||
|
||||
class _SerializeManager(object):
|
||||
|
||||
"""Provide serialization of a :class:`.ClassManager`.
|
||||
|
||||
The :class:`.InstanceState` uses ``__init__()`` on serialize
|
||||
|
|
@ -379,7 +401,6 @@ class _SerializeManager(object):
|
|||
|
||||
|
||||
class InstrumentationFactory(object):
|
||||
|
||||
"""Factory for new ClassManager instances."""
|
||||
|
||||
def create_manager_for_cls(self, class_):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -9,25 +9,28 @@
|
|||
|
||||
Contains various base classes used throughout the ORM.
|
||||
|
||||
Defines the now deprecated ORM extension classes as well
|
||||
as ORM internals.
|
||||
Defines some key base classes prominent within the internals,
|
||||
as well as the now-deprecated ORM extension classes.
|
||||
|
||||
Other than the deprecated extensions, this module and the
|
||||
classes within should be considered mostly private.
|
||||
classes within are mostly private, though some attributes
|
||||
are exposed when inspecting mappings.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import exc as sa_exc, util, inspect
|
||||
from .. import util
|
||||
from ..sql import operators
|
||||
from collections import deque
|
||||
from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
|
||||
EXT_CONTINUE, EXT_STOP, NOT_EXTENSION)
|
||||
from .base import _InspectionAttr, _MappedAttribute
|
||||
from .path_registry import PathRegistry
|
||||
from .base import (InspectionAttr, InspectionAttr,
|
||||
InspectionAttrInfo, _MappedAttribute)
|
||||
import collections
|
||||
from .. import inspect
|
||||
|
||||
# imported later
|
||||
MapperExtension = SessionExtension = AttributeExtension = None
|
||||
|
||||
__all__ = (
|
||||
'AttributeExtension',
|
||||
|
|
@ -47,11 +50,8 @@ __all__ = (
|
|||
)
|
||||
|
||||
|
||||
class MapperProperty(_MappedAttribute, _InspectionAttr):
|
||||
"""Manage the relationship of a ``Mapper`` to a single class
|
||||
attribute, as well as that attribute as it appears on individual
|
||||
instances of the class, including attribute instrumentation,
|
||||
attribute access, loading behavior, and dependency calculations.
|
||||
class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots):
|
||||
"""Represent a particular class attribute mapped by :class:`.Mapper`.
|
||||
|
||||
The most common occurrences of :class:`.MapperProperty` are the
|
||||
mapped :class:`.Column`, which is represented in a mapping as
|
||||
|
|
@ -62,14 +62,51 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'_configure_started', '_configure_finished', 'parent', 'key',
|
||||
'info'
|
||||
)
|
||||
|
||||
cascade = frozenset()
|
||||
"""The set of 'cascade' attribute names.
|
||||
|
||||
This collection is checked before the 'cascade_iterator' method is called.
|
||||
|
||||
The collection typically only applies to a RelationshipProperty.
|
||||
|
||||
"""
|
||||
|
||||
is_property = True
|
||||
"""Part of the InspectionAttr interface; states this object is a
|
||||
mapper property.
|
||||
|
||||
"""
|
||||
|
||||
def _memoized_attr_info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.InspectionAttr`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
|
||||
available on extension types via the
|
||||
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
|
||||
to a wider variety of ORM and extension constructs.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
"""Called by Query for the purposes of constructing a SQL statement.
|
||||
|
|
@ -77,16 +114,15 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
|
|||
Each MapperProperty associated with the target mapper processes the
|
||||
statement referenced by the query context, adding columns and/or
|
||||
criterion as appropriate.
|
||||
"""
|
||||
|
||||
pass
|
||||
"""
|
||||
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, row, adapter):
|
||||
"""Return a 3-tuple consisting of three row processing functions.
|
||||
mapper, result, adapter, populators):
|
||||
"""Produce row processing functions and append to the given
|
||||
set of populators lists.
|
||||
|
||||
"""
|
||||
return None, None, None
|
||||
|
||||
def cascade_iterator(self, type_, state, visited_instances=None,
|
||||
halt_on=None):
|
||||
|
|
@ -98,41 +134,44 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
|
|||
Note that the 'cascade' collection on this MapperProperty is
|
||||
checked first for the given type before cascade_iterator is called.
|
||||
|
||||
See PropertyLoader for the related instance implementation.
|
||||
This method typically only applies to RelationshipProperty.
|
||||
|
||||
"""
|
||||
|
||||
return iter(())
|
||||
|
||||
def set_parent(self, parent, init):
|
||||
self.parent = parent
|
||||
"""Set the parent mapper that references this MapperProperty.
|
||||
|
||||
def instrument_class(self, mapper): # pragma: no-coverage
|
||||
raise NotImplementedError()
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.MapperProperty`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
|
||||
functions.
|
||||
|
||||
.. versionadded:: 0.8 Added support for .info to all
|
||||
:class:`.MapperProperty` subclasses.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
This method is overridden by some subclasses to perform extra
|
||||
setup when the mapper is first known.
|
||||
|
||||
"""
|
||||
return {}
|
||||
self.parent = parent
|
||||
|
||||
_configure_started = False
|
||||
_configure_finished = False
|
||||
def instrument_class(self, mapper):
|
||||
"""Hook called by the Mapper to the property to initiate
|
||||
instrumentation of the class attribute managed by this
|
||||
MapperProperty.
|
||||
|
||||
The MapperProperty here will typically call out to the
|
||||
attributes module to set up an InstrumentedAttribute.
|
||||
|
||||
This step is the first of two steps to set up an InstrumentedAttribute,
|
||||
and is called early in the mapper setup process.
|
||||
|
||||
The second step is typically the init_class_attribute step,
|
||||
called from StrategizedProperty via the post_instrument_class()
|
||||
hook. This step assigns additional state to the InstrumentedAttribute
|
||||
(specifically the "impl") which has been determined after the
|
||||
MapperProperty has determined what kind of persistence
|
||||
management it needs to do (e.g. scalar, object, collection, etc).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._configure_started = False
|
||||
self._configure_finished = False
|
||||
|
||||
def init(self):
|
||||
"""Called after all mappers are created to assemble
|
||||
|
|
@ -179,45 +218,28 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
|
|||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
"""Perform instrumentation adjustments that need to occur
|
||||
after init() has completed.
|
||||
|
||||
The given Mapper is the Mapper invoking the operation, which
|
||||
may not be the same Mapper as self.parent in an inheritance
|
||||
scenario; however, Mapper will always at least be a sub-mapper of
|
||||
self.parent.
|
||||
|
||||
This method is typically used by StrategizedProperty, which delegates
|
||||
it to LoaderStrategy.init_class_attribute() to perform final setup
|
||||
on the class-bound InstrumentedAttribute.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def is_primary(self):
|
||||
"""Return True if this ``MapperProperty``'s mapper is the
|
||||
primary mapper for its class.
|
||||
|
||||
This flag is used to indicate that the ``MapperProperty`` can
|
||||
define attribute instrumentation for the class at the class
|
||||
level (as opposed to the individual instance level).
|
||||
"""
|
||||
|
||||
return not self.parent.non_primary
|
||||
|
||||
def merge(self, session, source_state, source_dict, dest_state,
|
||||
dest_dict, load, _recursive):
|
||||
"""Merge the attribute represented by this ``MapperProperty``
|
||||
from source to destination object"""
|
||||
from source to destination object.
|
||||
|
||||
pass
|
||||
|
||||
def compare(self, operator, value, **kw):
|
||||
"""Return a compare operation for the columns represented by
|
||||
this ``MapperProperty`` to the given value, which may be a
|
||||
column value or an instance. 'operator' is an operator from
|
||||
the operators module, or from sql.Comparator.
|
||||
|
||||
By default uses the PropComparator attached to this MapperProperty
|
||||
under the attribute name "comparator".
|
||||
"""
|
||||
|
||||
return operator(self.comparator, value)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s at 0x%x; %s>' % (
|
||||
self.__class__.__name__,
|
||||
|
|
@ -225,8 +247,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
|
|||
|
||||
|
||||
class PropComparator(operators.ColumnOperators):
|
||||
"""Defines boolean, comparison, and other operators for
|
||||
:class:`.MapperProperty` objects.
|
||||
"""Defines SQL operators for :class:`.MapperProperty` objects.
|
||||
|
||||
SQLAlchemy allows for operators to
|
||||
be redefined at both the Core and ORM level. :class:`.PropComparator`
|
||||
|
|
@ -313,9 +334,11 @@ class PropComparator(operators.ColumnOperators):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity'
|
||||
|
||||
def __init__(self, prop, parentmapper, adapt_to_entity=None):
|
||||
self.prop = self.property = prop
|
||||
self._parentmapper = parentmapper
|
||||
self._parententity = adapt_to_entity or parentmapper
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
|
||||
def __clause_element__(self):
|
||||
|
|
@ -328,7 +351,13 @@ class PropComparator(operators.ColumnOperators):
|
|||
"""Return a copy of this PropComparator which will use the given
|
||||
:class:`.AliasedInsp` to produce corresponding expressions.
|
||||
"""
|
||||
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
|
||||
return self.__class__(self.prop, self._parententity, adapt_to_entity)
|
||||
|
||||
@property
|
||||
def _parentmapper(self):
|
||||
"""legacy; this is renamed to _parententity to be
|
||||
compatible with QueryableAttribute."""
|
||||
return inspect(self._parententity).mapper
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
|
|
@ -341,7 +370,7 @@ class PropComparator(operators.ColumnOperators):
|
|||
else:
|
||||
return self._adapt_to_entity._adapt_element
|
||||
|
||||
@util.memoized_property
|
||||
@property
|
||||
def info(self):
|
||||
return self.property.info
|
||||
|
||||
|
|
@ -421,8 +450,17 @@ class StrategizedProperty(MapperProperty):
|
|||
strategies can be selected at Query time through the usage of
|
||||
``StrategizedOption`` objects via the Query.options() method.
|
||||
|
||||
The mechanics of StrategizedProperty are used for every Query
|
||||
invocation for every mapped attribute participating in that Query,
|
||||
to determine first how the attribute will be rendered in SQL
|
||||
and secondly how the attribute will retrieve a value from a result
|
||||
row and apply it to a mapped object. The routines here are very
|
||||
performance-critical.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = '_strategies', 'strategy'
|
||||
|
||||
strategy_wildcard_key = None
|
||||
|
||||
def _get_context_loader(self, context, path):
|
||||
|
|
@ -457,7 +495,8 @@ class StrategizedProperty(MapperProperty):
|
|||
def _get_strategy_by_cls(self, cls):
|
||||
return self._get_strategy(cls._strategy_keys[0])
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
def setup(
|
||||
self, context, entity, path, adapter, **kwargs):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
|
|
@ -465,32 +504,38 @@ class StrategizedProperty(MapperProperty):
|
|||
strat = self.strategy
|
||||
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
|
||||
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
def create_row_processor(
|
||||
self, context, path, mapper,
|
||||
result, adapter, populators):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
return strat.create_row_processor(context, path, loader,
|
||||
mapper, row, adapter)
|
||||
strat.create_row_processor(
|
||||
context, path, loader,
|
||||
mapper, result, adapter, populators)
|
||||
|
||||
def do_init(self):
|
||||
self._strategies = {}
|
||||
self.strategy = self._get_strategy_by_cls(self.strategy_class)
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
if self.is_primary() and \
|
||||
if not self.parent.non_primary and \
|
||||
not mapper.class_manager._attr_has_impl(self.key):
|
||||
self.strategy.init_class_attribute(mapper)
|
||||
|
||||
_strategies = collections.defaultdict(dict)
|
||||
_all_strategies = collections.defaultdict(dict)
|
||||
|
||||
@classmethod
|
||||
def strategy_for(cls, **kw):
|
||||
def decorate(dec_cls):
|
||||
dec_cls._strategy_keys = []
|
||||
# ensure each subclass of the strategy has its
|
||||
# own _strategy_keys collection
|
||||
if '_strategy_keys' not in dec_cls.__dict__:
|
||||
dec_cls._strategy_keys = []
|
||||
key = tuple(sorted(kw.items()))
|
||||
cls._strategies[cls][key] = dec_cls
|
||||
cls._all_strategies[cls][key] = dec_cls
|
||||
dec_cls._strategy_keys.append(key)
|
||||
return dec_cls
|
||||
return decorate
|
||||
|
|
@ -498,8 +543,8 @@ class StrategizedProperty(MapperProperty):
|
|||
@classmethod
|
||||
def _strategy_lookup(cls, *key):
|
||||
for prop_cls in cls.__mro__:
|
||||
if prop_cls in cls._strategies:
|
||||
strategies = cls._strategies[prop_cls]
|
||||
if prop_cls in cls._all_strategies:
|
||||
strategies = cls._all_strategies[prop_cls]
|
||||
try:
|
||||
return strategies[key]
|
||||
except KeyError:
|
||||
|
|
@ -512,18 +557,24 @@ class MapperOption(object):
|
|||
|
||||
propagate_to_loaders = False
|
||||
"""if True, indicate this option should be carried along
|
||||
Query object generated by scalar or object lazy loaders.
|
||||
to "secondary" Query objects produced during lazy loads
|
||||
or refresh operations.
|
||||
|
||||
"""
|
||||
|
||||
def process_query(self, query):
|
||||
pass
|
||||
"""Apply a modification to the given :class:`.Query`."""
|
||||
|
||||
def process_query_conditionally(self, query):
|
||||
"""same as process_query(), except that this option may not
|
||||
apply to the given query.
|
||||
|
||||
Used when secondary loaders resend existing options to a new
|
||||
Query."""
|
||||
This is typically used during a lazy load or scalar refresh
|
||||
operation to propagate options stated in the original Query to the
|
||||
new Query being used for the load. It occurs for those options that
|
||||
specify propagate_to_loaders=True.
|
||||
|
||||
"""
|
||||
|
||||
self.process_query(query)
|
||||
|
||||
|
|
@ -542,9 +593,9 @@ class LoaderStrategy(object):
|
|||
|
||||
* it processes the ``QueryContext`` at statement construction time,
|
||||
where it can modify the SQL statement that is being produced.
|
||||
Simple column attributes may add their represented column to the
|
||||
list of selected columns, *eager loading* properties may add
|
||||
``LEFT OUTER JOIN`` clauses to the statement.
|
||||
For example, simple column attributes will add their represented
|
||||
column to the list of selected columns, a joined eager loader
|
||||
may establish join clauses to add to the statement.
|
||||
|
||||
* It produces "row processor" functions at result fetching time.
|
||||
These "row processor" functions populate a particular attribute
|
||||
|
|
@ -552,6 +603,8 @@ class LoaderStrategy(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'parent_property', 'is_class_level', 'parent', 'key'
|
||||
|
||||
def __init__(self, parent):
|
||||
self.parent_property = parent
|
||||
self.is_class_level = False
|
||||
|
|
@ -562,17 +615,26 @@ class LoaderStrategy(object):
|
|||
pass
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
|
||||
pass
|
||||
"""Establish column and other state for a given QueryContext.
|
||||
|
||||
This method fulfills the contract specified by MapperProperty.setup().
|
||||
|
||||
StrategizedProperty delegates its setup() method
|
||||
directly to this method.
|
||||
|
||||
"""
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper,
|
||||
row, adapter):
|
||||
"""Return row processing functions which fulfill the contract
|
||||
specified by MapperProperty.create_row_processor.
|
||||
result, adapter, populators):
|
||||
"""Establish row processing functions for a given QueryContext.
|
||||
|
||||
StrategizedProperty delegates its create_row_processor method
|
||||
directly to this method. """
|
||||
This method fulfills the contract specified by
|
||||
MapperProperty.create_row_processor().
|
||||
|
||||
return None, None, None
|
||||
StrategizedProperty delegates its create_row_processor() method
|
||||
directly to this method.
|
||||
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent_property)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/loading.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -12,26 +12,27 @@ the functions here are called primarily by Query, Mapper,
|
|||
as well as some of the attribute loading strategies.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util
|
||||
from . import attributes, exc as orm_exc, state as statelib
|
||||
from .interfaces import EXT_CONTINUE
|
||||
from . import attributes, exc as orm_exc
|
||||
from ..sql import util as sql_util
|
||||
from . import strategy_options
|
||||
|
||||
from .util import _none_set, state_str
|
||||
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
|
||||
from .. import exc as sa_exc
|
||||
import collections
|
||||
|
||||
_new_runid = util.counter()
|
||||
|
||||
|
||||
def instances(query, cursor, context):
|
||||
"""Return an ORM result as an iterator."""
|
||||
session = query.session
|
||||
|
||||
context.runid = _new_runid()
|
||||
|
||||
filter_fns = [ent.filter_fn
|
||||
for ent in query._entities]
|
||||
filter_fns = [ent.filter_fn for ent in query._entities]
|
||||
filtered = id in filter_fns
|
||||
|
||||
single_entity = len(query._entities) == 1 and \
|
||||
|
|
@ -44,59 +45,45 @@ def instances(query, cursor, context):
|
|||
def filter_fn(row):
|
||||
return tuple(fn(x) for x, fn in zip(row, filter_fns))
|
||||
|
||||
custom_rows = single_entity and \
|
||||
query._entities[0].custom_rows
|
||||
try:
|
||||
(process, labels) = \
|
||||
list(zip(*[
|
||||
query_entity.row_processor(query,
|
||||
context, cursor)
|
||||
for query_entity in query._entities
|
||||
]))
|
||||
|
||||
(process, labels) = \
|
||||
list(zip(*[
|
||||
query_entity.row_processor(query,
|
||||
context, custom_rows)
|
||||
for query_entity in query._entities
|
||||
]))
|
||||
if not single_entity:
|
||||
keyed_tuple = util.lightweight_named_tuple('result', labels)
|
||||
|
||||
while True:
|
||||
context.progress = {}
|
||||
context.partials = {}
|
||||
while True:
|
||||
context.partials = {}
|
||||
|
||||
if query._yield_per:
|
||||
fetch = cursor.fetchmany(query._yield_per)
|
||||
if not fetch:
|
||||
if query._yield_per:
|
||||
fetch = cursor.fetchmany(query._yield_per)
|
||||
if not fetch:
|
||||
break
|
||||
else:
|
||||
fetch = cursor.fetchall()
|
||||
|
||||
if single_entity:
|
||||
proc = process[0]
|
||||
rows = [proc(row) for row in fetch]
|
||||
else:
|
||||
rows = [keyed_tuple([proc(row) for proc in process])
|
||||
for row in fetch]
|
||||
|
||||
if filtered:
|
||||
rows = util.unique_list(rows, filter_fn)
|
||||
|
||||
for row in rows:
|
||||
yield row
|
||||
|
||||
if not query._yield_per:
|
||||
break
|
||||
else:
|
||||
fetch = cursor.fetchall()
|
||||
|
||||
if custom_rows:
|
||||
rows = []
|
||||
for row in fetch:
|
||||
process[0](row, rows)
|
||||
elif single_entity:
|
||||
rows = [process[0](row, None) for row in fetch]
|
||||
else:
|
||||
rows = [util.KeyedTuple([proc(row, None) for proc in process],
|
||||
labels) for row in fetch]
|
||||
|
||||
if filtered:
|
||||
rows = util.unique_list(rows, filter_fn)
|
||||
|
||||
if context.refresh_state and query._only_load_props \
|
||||
and context.refresh_state in context.progress:
|
||||
context.refresh_state._commit(
|
||||
context.refresh_state.dict, query._only_load_props)
|
||||
context.progress.pop(context.refresh_state)
|
||||
|
||||
statelib.InstanceState._commit_all_states(
|
||||
list(context.progress.items()),
|
||||
session.identity_map
|
||||
)
|
||||
|
||||
for state, (dict_, attrs) in context.partials.items():
|
||||
state._commit(dict_, attrs)
|
||||
|
||||
for row in rows:
|
||||
yield row
|
||||
|
||||
if not query._yield_per:
|
||||
break
|
||||
except Exception as err:
|
||||
cursor.close()
|
||||
util.raise_from_cause(err)
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.query")
|
||||
|
|
@ -126,6 +113,7 @@ def merge_result(querylib, query, iterator, load=True):
|
|||
if isinstance(e, querylib._MapperEntity)]
|
||||
result = []
|
||||
keys = [ent._label_name for ent in query._entities]
|
||||
keyed_tuple = util.lightweight_named_tuple('result', keys)
|
||||
for row in iterator:
|
||||
newrow = list(row)
|
||||
for i in mapped_entities:
|
||||
|
|
@ -134,7 +122,7 @@ def merge_result(querylib, query, iterator, load=True):
|
|||
attributes.instance_state(newrow[i]),
|
||||
attributes.instance_dict(newrow[i]),
|
||||
load=load, _recursive={})
|
||||
result.append(util.KeyedTuple(newrow, keys))
|
||||
result.append(keyed_tuple(newrow))
|
||||
|
||||
return iter(result)
|
||||
finally:
|
||||
|
|
@ -161,7 +149,7 @@ def get_from_identity(session, key, passive):
|
|||
# expired state will be checked soon enough, if necessary
|
||||
return instance
|
||||
try:
|
||||
state(state, passive)
|
||||
state._load_expired(state, passive)
|
||||
except orm_exc.ObjectDeletedError:
|
||||
session._remove_newly_deleted([state])
|
||||
return None
|
||||
|
|
@ -233,11 +221,56 @@ def load_on_ident(query, key,
|
|||
return None
|
||||
|
||||
|
||||
def instance_processor(mapper, context, path, adapter,
|
||||
polymorphic_from=None,
|
||||
only_load_props=None,
|
||||
refresh_state=None,
|
||||
polymorphic_discriminator=None):
|
||||
def _setup_entity_query(
|
||||
context, mapper, query_entity,
|
||||
path, adapter, column_collection,
|
||||
with_polymorphic=None, only_load_props=None,
|
||||
polymorphic_discriminator=None, **kw):
|
||||
|
||||
if with_polymorphic:
|
||||
poly_properties = mapper._iterate_polymorphic_properties(
|
||||
with_polymorphic)
|
||||
else:
|
||||
poly_properties = mapper._polymorphic_properties
|
||||
|
||||
quick_populators = {}
|
||||
|
||||
path.set(
|
||||
context.attributes,
|
||||
"memoized_setups",
|
||||
quick_populators)
|
||||
|
||||
for value in poly_properties:
|
||||
if only_load_props and \
|
||||
value.key not in only_load_props:
|
||||
continue
|
||||
value.setup(
|
||||
context,
|
||||
query_entity,
|
||||
path,
|
||||
adapter,
|
||||
only_load_props=only_load_props,
|
||||
column_collection=column_collection,
|
||||
memoized_populators=quick_populators,
|
||||
**kw
|
||||
)
|
||||
|
||||
if polymorphic_discriminator is not None and \
|
||||
polymorphic_discriminator \
|
||||
is not mapper.polymorphic_on:
|
||||
|
||||
if adapter:
|
||||
pd = adapter.columns[polymorphic_discriminator]
|
||||
else:
|
||||
pd = polymorphic_discriminator
|
||||
column_collection.append(pd)
|
||||
|
||||
|
||||
def _instance_processor(
|
||||
mapper, context, result, path, adapter,
|
||||
only_load_props=None, refresh_state=None,
|
||||
polymorphic_discriminator=None,
|
||||
_polymorphic_from=None):
|
||||
"""Produce a mapper level row processor callable
|
||||
which processes rows into mapped instances."""
|
||||
|
||||
|
|
@ -249,288 +282,292 @@ def instance_processor(mapper, context, path, adapter,
|
|||
|
||||
pk_cols = mapper.primary_key
|
||||
|
||||
if polymorphic_from or refresh_state:
|
||||
polymorphic_on = None
|
||||
else:
|
||||
if polymorphic_discriminator is not None:
|
||||
polymorphic_on = polymorphic_discriminator
|
||||
else:
|
||||
polymorphic_on = mapper.polymorphic_on
|
||||
polymorphic_instances = util.PopulateDict(
|
||||
_configure_subclass_mapper(
|
||||
mapper,
|
||||
context, path, adapter)
|
||||
)
|
||||
|
||||
version_id_col = mapper.version_id_col
|
||||
|
||||
if adapter:
|
||||
pk_cols = [adapter.columns[c] for c in pk_cols]
|
||||
if polymorphic_on is not None:
|
||||
polymorphic_on = adapter.columns[polymorphic_on]
|
||||
if version_id_col is not None:
|
||||
version_id_col = adapter.columns[version_id_col]
|
||||
|
||||
identity_class = mapper._identity_class
|
||||
|
||||
new_populators = []
|
||||
existing_populators = []
|
||||
eager_populators = []
|
||||
populators = collections.defaultdict(list)
|
||||
|
||||
load_path = context.query._current_path + path \
|
||||
if context.query._current_path.path \
|
||||
else path
|
||||
props = mapper._prop_set
|
||||
if only_load_props is not None:
|
||||
props = props.intersection(
|
||||
mapper._props[k] for k in only_load_props)
|
||||
|
||||
def populate_state(state, dict_, row, isnew, only_load_props):
|
||||
if isnew:
|
||||
if context.propagate_options:
|
||||
state.load_options = context.propagate_options
|
||||
if state.load_options:
|
||||
state.load_path = load_path
|
||||
quick_populators = path.get(
|
||||
context.attributes, "memoized_setups", _none_set)
|
||||
|
||||
if not new_populators:
|
||||
_populators(mapper, context, path, row, adapter,
|
||||
new_populators,
|
||||
existing_populators,
|
||||
eager_populators
|
||||
)
|
||||
|
||||
if isnew:
|
||||
populators = new_populators
|
||||
for prop in props:
|
||||
if prop in quick_populators:
|
||||
# this is an inlined path just for column-based attributes.
|
||||
col = quick_populators[prop]
|
||||
if col is _DEFER_FOR_STATE:
|
||||
populators["new"].append(
|
||||
(prop.key, prop._deferred_column_loader))
|
||||
elif col is _SET_DEFERRED_EXPIRED:
|
||||
# note that in this path, we are no longer
|
||||
# searching in the result to see if the column might
|
||||
# be present in some unexpected way.
|
||||
populators["expire"].append((prop.key, False))
|
||||
else:
|
||||
if adapter:
|
||||
col = adapter.columns[col]
|
||||
getter = result._getter(col)
|
||||
if getter:
|
||||
populators["quick"].append((prop.key, getter))
|
||||
else:
|
||||
# fall back to the ColumnProperty itself, which
|
||||
# will iterate through all of its columns
|
||||
# to see if one fits
|
||||
prop.create_row_processor(
|
||||
context, path, mapper, result, adapter, populators)
|
||||
else:
|
||||
populators = existing_populators
|
||||
prop.create_row_processor(
|
||||
context, path, mapper, result, adapter, populators)
|
||||
|
||||
if only_load_props is None:
|
||||
for key, populator in populators:
|
||||
populator(state, dict_, row)
|
||||
elif only_load_props:
|
||||
for key, populator in populators:
|
||||
if key in only_load_props:
|
||||
populator(state, dict_, row)
|
||||
propagate_options = context.propagate_options
|
||||
if propagate_options:
|
||||
load_path = context.query._current_path + path \
|
||||
if context.query._current_path.path else path
|
||||
|
||||
session_identity_map = context.session.identity_map
|
||||
|
||||
listeners = mapper.dispatch
|
||||
|
||||
# legacy events - I'd very much like to yank these totally
|
||||
translate_row = listeners.translate_row or None
|
||||
create_instance = listeners.create_instance or None
|
||||
populate_instance = listeners.populate_instance or None
|
||||
append_result = listeners.append_result or None
|
||||
####
|
||||
|
||||
populate_existing = context.populate_existing or mapper.always_refresh
|
||||
invoke_all_eagers = context.invoke_all_eagers
|
||||
load_evt = bool(mapper.class_manager.dispatch.load)
|
||||
refresh_evt = bool(mapper.class_manager.dispatch.refresh)
|
||||
instance_state = attributes.instance_state
|
||||
instance_dict = attributes.instance_dict
|
||||
session_id = context.session.hash_key
|
||||
version_check = context.version_check
|
||||
runid = context.runid
|
||||
|
||||
if refresh_state:
|
||||
refresh_identity_key = refresh_state.key
|
||||
if refresh_identity_key is None:
|
||||
# super-rare condition; a refresh is being called
|
||||
# on a non-instance-key instance; this is meant to only
|
||||
# occur within a flush()
|
||||
refresh_identity_key = \
|
||||
mapper._identity_key_from_state(refresh_state)
|
||||
else:
|
||||
refresh_identity_key = None
|
||||
|
||||
if mapper.allow_partial_pks:
|
||||
is_not_primary_key = _none_set.issuperset
|
||||
else:
|
||||
is_not_primary_key = _none_set.issubset
|
||||
is_not_primary_key = _none_set.intersection
|
||||
|
||||
def _instance(row, result):
|
||||
if not new_populators and invoke_all_eagers:
|
||||
_populators(mapper, context, path, row, adapter,
|
||||
new_populators,
|
||||
existing_populators,
|
||||
eager_populators
|
||||
)
|
||||
def _instance(row):
|
||||
|
||||
if translate_row:
|
||||
for fn in translate_row:
|
||||
ret = fn(mapper, context, row)
|
||||
if ret is not EXT_CONTINUE:
|
||||
row = ret
|
||||
break
|
||||
|
||||
if polymorphic_on is not None:
|
||||
discriminator = row[polymorphic_on]
|
||||
if discriminator is not None:
|
||||
_instance = polymorphic_instances[discriminator]
|
||||
if _instance:
|
||||
return _instance(row, result)
|
||||
|
||||
# determine identity key
|
||||
if refresh_state:
|
||||
identitykey = refresh_state.key
|
||||
if identitykey is None:
|
||||
# super-rare condition; a refresh is being called
|
||||
# on a non-instance-key instance; this is meant to only
|
||||
# occur within a flush()
|
||||
identitykey = mapper._identity_key_from_state(refresh_state)
|
||||
# determine the state that we'll be populating
|
||||
if refresh_identity_key:
|
||||
# fixed state that we're refreshing
|
||||
state = refresh_state
|
||||
instance = state.obj()
|
||||
dict_ = instance_dict(instance)
|
||||
isnew = state.runid != runid
|
||||
currentload = True
|
||||
loaded_instance = False
|
||||
else:
|
||||
# look at the row, see if that identity is in the
|
||||
# session, or we have to create a new one
|
||||
identitykey = (
|
||||
identity_class,
|
||||
tuple([row[column] for column in pk_cols])
|
||||
)
|
||||
|
||||
instance = session_identity_map.get(identitykey)
|
||||
instance = session_identity_map.get(identitykey)
|
||||
|
||||
if instance is not None:
|
||||
state = attributes.instance_state(instance)
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
if instance is not None:
|
||||
# existing instance
|
||||
state = instance_state(instance)
|
||||
dict_ = instance_dict(instance)
|
||||
|
||||
isnew = state.runid != context.runid
|
||||
currentload = not isnew
|
||||
loaded_instance = False
|
||||
isnew = state.runid != runid
|
||||
currentload = not isnew
|
||||
loaded_instance = False
|
||||
|
||||
if not currentload and \
|
||||
version_id_col is not None and \
|
||||
context.version_check and \
|
||||
mapper._get_state_attr_by_column(
|
||||
state,
|
||||
dict_,
|
||||
mapper.version_id_col) != \
|
||||
row[version_id_col]:
|
||||
if version_check and not currentload:
|
||||
_validate_version_id(mapper, state, dict_, row, adapter)
|
||||
|
||||
raise orm_exc.StaleDataError(
|
||||
"Instance '%s' has version id '%s' which "
|
||||
"does not match database-loaded version id '%s'."
|
||||
% (state_str(state),
|
||||
mapper._get_state_attr_by_column(
|
||||
state, dict_,
|
||||
mapper.version_id_col),
|
||||
row[version_id_col]))
|
||||
elif refresh_state:
|
||||
# out of band refresh_state detected (i.e. its not in the
|
||||
# session.identity_map) honor it anyway. this can happen
|
||||
# if a _get() occurs within save_obj(), such as
|
||||
# when eager_defaults is True.
|
||||
state = refresh_state
|
||||
instance = state.obj()
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
isnew = state.runid != context.runid
|
||||
currentload = True
|
||||
loaded_instance = False
|
||||
else:
|
||||
# check for non-NULL values in the primary key columns,
|
||||
# else no entity is returned for the row
|
||||
if is_not_primary_key(identitykey[1]):
|
||||
return None
|
||||
|
||||
isnew = True
|
||||
currentload = True
|
||||
loaded_instance = True
|
||||
|
||||
if create_instance:
|
||||
for fn in create_instance:
|
||||
instance = fn(mapper, context,
|
||||
row, mapper.class_)
|
||||
if instance is not EXT_CONTINUE:
|
||||
manager = attributes.manager_of_class(
|
||||
instance.__class__)
|
||||
# TODO: if manager is None, raise a friendly error
|
||||
# about returning instances of unmapped types
|
||||
manager.setup_instance(instance)
|
||||
break
|
||||
else:
|
||||
instance = mapper.class_manager.new_instance()
|
||||
else:
|
||||
# create a new instance
|
||||
|
||||
# check for non-NULL values in the primary key columns,
|
||||
# else no entity is returned for the row
|
||||
if is_not_primary_key(identitykey[1]):
|
||||
return None
|
||||
|
||||
isnew = True
|
||||
currentload = True
|
||||
loaded_instance = True
|
||||
|
||||
instance = mapper.class_manager.new_instance()
|
||||
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
state = attributes.instance_state(instance)
|
||||
state.key = identitykey
|
||||
dict_ = instance_dict(instance)
|
||||
state = instance_state(instance)
|
||||
state.key = identitykey
|
||||
|
||||
# attach instance to session.
|
||||
state.session_id = context.session.hash_key
|
||||
session_identity_map.add(state)
|
||||
# attach instance to session.
|
||||
state.session_id = session_id
|
||||
session_identity_map._add_unpresent(state, identitykey)
|
||||
|
||||
# populate. this looks at whether this state is new
|
||||
# for this load or was existing, and whether or not this
|
||||
# row is the first row with this identity.
|
||||
if currentload or populate_existing:
|
||||
# state is being fully loaded, so populate.
|
||||
# add to the "context.progress" collection.
|
||||
if isnew:
|
||||
state.runid = context.runid
|
||||
context.progress[state] = dict_
|
||||
# full population routines. Objects here are either
|
||||
# just created, or we are doing a populate_existing
|
||||
|
||||
if populate_instance:
|
||||
for fn in populate_instance:
|
||||
ret = fn(mapper, context, row, state,
|
||||
only_load_props=only_load_props,
|
||||
instancekey=identitykey, isnew=isnew)
|
||||
if ret is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, only_load_props)
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, only_load_props)
|
||||
if isnew and propagate_options:
|
||||
state.load_options = propagate_options
|
||||
state.load_path = load_path
|
||||
|
||||
if loaded_instance:
|
||||
state.manager.dispatch.load(state, context)
|
||||
elif isnew:
|
||||
state.manager.dispatch.refresh(state, context, only_load_props)
|
||||
|
||||
elif state in context.partials or state.unloaded or eager_populators:
|
||||
# state is having a partial set of its attributes
|
||||
# refreshed. Populate those attributes,
|
||||
# and add to the "context.partials" collection.
|
||||
if state in context.partials:
|
||||
isnew = False
|
||||
(d_, attrs) = context.partials[state]
|
||||
else:
|
||||
isnew = True
|
||||
attrs = state.unloaded
|
||||
context.partials[state] = (dict_, attrs)
|
||||
|
||||
if populate_instance:
|
||||
for fn in populate_instance:
|
||||
ret = fn(mapper, context, row, state,
|
||||
only_load_props=attrs,
|
||||
instancekey=identitykey, isnew=isnew)
|
||||
if ret is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, attrs)
|
||||
else:
|
||||
populate_state(state, dict_, row, isnew, attrs)
|
||||
|
||||
for key, pop in eager_populators:
|
||||
if key not in state.unloaded:
|
||||
pop(state, dict_, row)
|
||||
_populate_full(
|
||||
context, row, state, dict_, isnew,
|
||||
loaded_instance, populate_existing, populators)
|
||||
|
||||
if isnew:
|
||||
state.manager.dispatch.refresh(state, context, attrs)
|
||||
if loaded_instance and load_evt:
|
||||
state.manager.dispatch.load(state, context)
|
||||
elif refresh_evt:
|
||||
state.manager.dispatch.refresh(
|
||||
state, context, only_load_props)
|
||||
|
||||
if result is not None:
|
||||
if append_result:
|
||||
for fn in append_result:
|
||||
if fn(mapper, context, row, state,
|
||||
result, instancekey=identitykey,
|
||||
isnew=isnew) is not EXT_CONTINUE:
|
||||
break
|
||||
else:
|
||||
result.append(instance)
|
||||
else:
|
||||
result.append(instance)
|
||||
if populate_existing or state.modified:
|
||||
if refresh_state and only_load_props:
|
||||
state._commit(dict_, only_load_props)
|
||||
else:
|
||||
state._commit_all(dict_, session_identity_map)
|
||||
|
||||
else:
|
||||
# partial population routines, for objects that were already
|
||||
# in the Session, but a row matches them; apply eager loaders
|
||||
# on existing objects, etc.
|
||||
unloaded = state.unloaded
|
||||
isnew = state not in context.partials
|
||||
|
||||
if not isnew or unloaded or populators["eager"]:
|
||||
# state is having a partial set of its attributes
|
||||
# refreshed. Populate those attributes,
|
||||
# and add to the "context.partials" collection.
|
||||
|
||||
to_load = _populate_partial(
|
||||
context, row, state, dict_, isnew,
|
||||
unloaded, populators)
|
||||
|
||||
if isnew:
|
||||
if refresh_evt:
|
||||
state.manager.dispatch.refresh(
|
||||
state, context, to_load)
|
||||
|
||||
state._commit(dict_, to_load)
|
||||
|
||||
return instance
|
||||
|
||||
if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
|
||||
# if we are doing polymorphic, dispatch to a different _instance()
|
||||
# method specific to the subclass mapper
|
||||
_instance = _decorate_polymorphic_switch(
|
||||
_instance, context, mapper, result, path,
|
||||
polymorphic_discriminator, adapter)
|
||||
|
||||
return _instance
|
||||
|
||||
|
||||
def _populators(mapper, context, path, row, adapter,
|
||||
new_populators, existing_populators, eager_populators):
|
||||
"""Produce a collection of attribute level row processor
|
||||
callables."""
|
||||
def _populate_full(
|
||||
context, row, state, dict_, isnew,
|
||||
loaded_instance, populate_existing, populators):
|
||||
if isnew:
|
||||
# first time we are seeing a row with this identity.
|
||||
state.runid = context.runid
|
||||
|
||||
delayed_populators = []
|
||||
pops = (new_populators, existing_populators, delayed_populators,
|
||||
eager_populators)
|
||||
|
||||
for prop in mapper._props.values():
|
||||
|
||||
for i, pop in enumerate(prop.create_row_processor(
|
||||
context,
|
||||
path,
|
||||
mapper, row, adapter)):
|
||||
if pop is not None:
|
||||
pops[i].append((prop.key, pop))
|
||||
|
||||
if delayed_populators:
|
||||
new_populators.extend(delayed_populators)
|
||||
for key, getter in populators["quick"]:
|
||||
dict_[key] = getter(row)
|
||||
if populate_existing:
|
||||
for key, set_callable in populators["expire"]:
|
||||
dict_.pop(key, None)
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
else:
|
||||
for key, set_callable in populators["expire"]:
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
for key, populator in populators["new"]:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["delayed"]:
|
||||
populator(state, dict_, row)
|
||||
else:
|
||||
# have already seen rows with this identity.
|
||||
for key, populator in populators["existing"]:
|
||||
populator(state, dict_, row)
|
||||
|
||||
|
||||
def _configure_subclass_mapper(mapper, context, path, adapter):
|
||||
"""Produce a mapper level row processor callable factory for mappers
|
||||
inheriting this one."""
|
||||
def _populate_partial(
|
||||
context, row, state, dict_, isnew,
|
||||
unloaded, populators):
|
||||
if not isnew:
|
||||
to_load = context.partials[state]
|
||||
for key, populator in populators["existing"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
else:
|
||||
to_load = unloaded
|
||||
context.partials[state] = to_load
|
||||
|
||||
for key, getter in populators["quick"]:
|
||||
if key in to_load:
|
||||
dict_[key] = getter(row)
|
||||
for key, set_callable in populators["expire"]:
|
||||
if key in to_load:
|
||||
dict_.pop(key, None)
|
||||
if set_callable:
|
||||
state.expired_attributes.add(key)
|
||||
for key, populator in populators["new"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["delayed"]:
|
||||
if key in to_load:
|
||||
populator(state, dict_, row)
|
||||
for key, populator in populators["eager"]:
|
||||
if key not in unloaded:
|
||||
populator(state, dict_, row)
|
||||
|
||||
return to_load
|
||||
|
||||
|
||||
def _validate_version_id(mapper, state, dict_, row, adapter):
|
||||
|
||||
version_id_col = mapper.version_id_col
|
||||
|
||||
if version_id_col is None:
|
||||
return
|
||||
|
||||
if adapter:
|
||||
version_id_col = adapter.columns[version_id_col]
|
||||
|
||||
if mapper._get_state_attr_by_column(
|
||||
state, dict_, mapper.version_id_col) != row[version_id_col]:
|
||||
raise orm_exc.StaleDataError(
|
||||
"Instance '%s' has version id '%s' which "
|
||||
"does not match database-loaded version id '%s'."
|
||||
% (state_str(state), mapper._get_state_attr_by_column(
|
||||
state, dict_, mapper.version_id_col),
|
||||
row[version_id_col]))
|
||||
|
||||
|
||||
def _decorate_polymorphic_switch(
|
||||
instance_fn, context, mapper, result, path,
|
||||
polymorphic_discriminator, adapter):
|
||||
if polymorphic_discriminator is not None:
|
||||
polymorphic_on = polymorphic_discriminator
|
||||
else:
|
||||
polymorphic_on = mapper.polymorphic_on
|
||||
if polymorphic_on is None:
|
||||
return instance_fn
|
||||
|
||||
if adapter:
|
||||
polymorphic_on = adapter.columns[polymorphic_on]
|
||||
|
||||
def configure_subclass_mapper(discriminator):
|
||||
try:
|
||||
|
|
@ -539,16 +576,26 @@ def _configure_subclass_mapper(mapper, context, path, adapter):
|
|||
raise AssertionError(
|
||||
"No such polymorphic_identity %r is defined" %
|
||||
discriminator)
|
||||
if sub_mapper is mapper:
|
||||
return None
|
||||
else:
|
||||
if sub_mapper is mapper:
|
||||
return None
|
||||
|
||||
return instance_processor(
|
||||
sub_mapper,
|
||||
context,
|
||||
path,
|
||||
adapter,
|
||||
polymorphic_from=mapper)
|
||||
return configure_subclass_mapper
|
||||
return _instance_processor(
|
||||
sub_mapper, context, result,
|
||||
path, adapter, _polymorphic_from=mapper)
|
||||
|
||||
polymorphic_instances = util.PopulateDict(
|
||||
configure_subclass_mapper
|
||||
)
|
||||
|
||||
def polymorphic_instance(row):
|
||||
discriminator = row[polymorphic_on]
|
||||
if discriminator is not None:
|
||||
_instance = polymorphic_instances[discriminator]
|
||||
if _instance:
|
||||
return _instance(row)
|
||||
return instance_fn(row)
|
||||
return polymorphic_instance
|
||||
|
||||
|
||||
def load_scalar_attributes(mapper, state, attribute_names):
|
||||
|
|
@ -567,10 +614,17 @@ def load_scalar_attributes(mapper, state, attribute_names):
|
|||
result = False
|
||||
|
||||
if mapper.inherits and not mapper.concrete:
|
||||
# because we are using Core to produce a select() that we
|
||||
# pass to the Query, we aren't calling setup() for mapped
|
||||
# attributes; in 1.0 this means deferred attrs won't get loaded
|
||||
# by default
|
||||
statement = mapper._optimized_get_statement(state, attribute_names)
|
||||
if statement is not None:
|
||||
result = load_on_ident(
|
||||
session.query(mapper).from_statement(statement),
|
||||
session.query(mapper).
|
||||
options(
|
||||
strategy_options.Load(mapper).undefer("*")
|
||||
).from_statement(statement),
|
||||
None,
|
||||
only_load_props=attribute_names,
|
||||
refresh_state=state
|
||||
|
|
@ -596,10 +650,11 @@ def load_scalar_attributes(mapper, state, attribute_names):
|
|||
if (_none_set.issubset(identity_key) and
|
||||
not mapper.allow_partial_pks) or \
|
||||
_none_set.issuperset(identity_key):
|
||||
util.warn("Instance %s to be refreshed doesn't "
|
||||
"contain a full primary key - can't be refreshed "
|
||||
"(and shouldn't be expired, either)."
|
||||
% state_str(state))
|
||||
util.warn_limited(
|
||||
"Instance %s to be refreshed doesn't "
|
||||
"contain a full primary key - can't be refreshed "
|
||||
"(and shouldn't be expired, either).",
|
||||
state_str(state))
|
||||
return
|
||||
|
||||
result = load_on_ident(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/mapper.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -25,7 +25,8 @@ from .. import sql, util, log, exc as sa_exc, event, schema, inspection
|
|||
from ..sql import expression, visitors, operators, util as sql_util
|
||||
from . import instrumentation, attributes, exc as orm_exc, loading
|
||||
from . import properties
|
||||
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
|
||||
from . import util as orm_util
|
||||
from .interfaces import MapperProperty, InspectionAttr, _MappedAttribute
|
||||
|
||||
from .base import _class_to_mapper, _state_mapper, class_mapper, \
|
||||
state_str, _INSTRUMENTOR
|
||||
|
|
@ -51,8 +52,7 @@ _CONFIGURE_MUTEX = util.threading.RLock()
|
|||
|
||||
@inspection._self_inspects
|
||||
@log.class_logger
|
||||
class Mapper(_InspectionAttr):
|
||||
|
||||
class Mapper(InspectionAttr):
|
||||
"""Define the correlation of class attributes to database table
|
||||
columns.
|
||||
|
||||
|
|
@ -426,6 +426,12 @@ class Mapper(_InspectionAttr):
|
|||
thus persisting the value to the ``discriminator`` column
|
||||
in the database.
|
||||
|
||||
.. warning::
|
||||
|
||||
Currently, **only one discriminator column may be set**, typically
|
||||
on the base-most class in the hierarchy. "Cascading" polymorphic
|
||||
columns are not yet supported.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inheritance_toplevel`
|
||||
|
|
@ -968,6 +974,15 @@ class Mapper(_InspectionAttr):
|
|||
self._all_tables = self.inherits._all_tables
|
||||
|
||||
if self.polymorphic_identity is not None:
|
||||
if self.polymorphic_identity in self.polymorphic_map:
|
||||
util.warn(
|
||||
"Reassigning polymorphic association for identity %r "
|
||||
"from %r to %r: Check for duplicate use of %r as "
|
||||
"value for polymorphic_identity." %
|
||||
(self.polymorphic_identity,
|
||||
self.polymorphic_map[self.polymorphic_identity],
|
||||
self, self.polymorphic_identity)
|
||||
)
|
||||
self.polymorphic_map[self.polymorphic_identity] = self
|
||||
|
||||
else:
|
||||
|
|
@ -1080,6 +1095,7 @@ class Mapper(_InspectionAttr):
|
|||
auto-session attachment logic.
|
||||
|
||||
"""
|
||||
|
||||
manager = attributes.manager_of_class(self.class_)
|
||||
|
||||
if self.non_primary:
|
||||
|
|
@ -1109,6 +1125,8 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
_mapper_registry[self] = True
|
||||
|
||||
# note: this *must be called before instrumentation.register_class*
|
||||
# to maintain the documented behavior of instrument_class
|
||||
self.dispatch.instrument_class(self, self.class_)
|
||||
|
||||
if manager is None:
|
||||
|
|
@ -1127,7 +1145,6 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
event.listen(manager, 'first_init', _event_on_first_init, raw=True)
|
||||
event.listen(manager, 'init', _event_on_init, raw=True)
|
||||
event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
|
||||
|
||||
for key, method in util.iterate_attributes(self.class_):
|
||||
if isinstance(method, types.FunctionType):
|
||||
|
|
@ -1189,14 +1206,6 @@ class Mapper(_InspectionAttr):
|
|||
util.ordered_column_set(t.c).\
|
||||
intersection(all_cols)
|
||||
|
||||
# determine cols that aren't expressed within our tables; mark these
|
||||
# as "read only" properties which are refreshed upon INSERT/UPDATE
|
||||
self._readonly_props = set(
|
||||
self._columntoproperty[col]
|
||||
for col in self._columntoproperty
|
||||
if not hasattr(col, 'table') or
|
||||
col.table not in self._cols_by_table)
|
||||
|
||||
# if explicit PK argument sent, add those columns to the
|
||||
# primary key mappings
|
||||
if self._primary_key_argument:
|
||||
|
|
@ -1247,6 +1256,15 @@ class Mapper(_InspectionAttr):
|
|||
self.primary_key = tuple(primary_key)
|
||||
self._log("Identified primary key columns: %s", primary_key)
|
||||
|
||||
# determine cols that aren't expressed within our tables; mark these
|
||||
# as "read only" properties which are refreshed upon INSERT/UPDATE
|
||||
self._readonly_props = set(
|
||||
self._columntoproperty[col]
|
||||
for col in self._columntoproperty
|
||||
if self._columntoproperty[col] not in self._identity_key_props and
|
||||
(not hasattr(col, 'table') or
|
||||
col.table not in self._cols_by_table))
|
||||
|
||||
def _configure_properties(self):
|
||||
|
||||
# Column and other ClauseElement objects which are mapped
|
||||
|
|
@ -1452,13 +1470,11 @@ class Mapper(_InspectionAttr):
|
|||
if polymorphic_key in dict_ and \
|
||||
dict_[polymorphic_key] not in \
|
||||
mapper._acceptable_polymorphic_identities:
|
||||
util.warn(
|
||||
util.warn_limited(
|
||||
"Flushing object %s with "
|
||||
"incompatible polymorphic identity %r; the "
|
||||
"object may not refresh and/or load correctly" % (
|
||||
state_str(state),
|
||||
dict_[polymorphic_key]
|
||||
)
|
||||
"object may not refresh and/or load correctly",
|
||||
(state_str(state), dict_[polymorphic_key])
|
||||
)
|
||||
|
||||
self._set_polymorphic_identity = _set_polymorphic_identity
|
||||
|
|
@ -1489,6 +1505,10 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
return identities
|
||||
|
||||
@_memoized_configured_property
|
||||
def _prop_set(self):
|
||||
return frozenset(self._props.values())
|
||||
|
||||
def _adapt_inherited_property(self, key, prop, init):
|
||||
if not self.concrete:
|
||||
self._configure_property(key, prop, init=False, setparent=False)
|
||||
|
|
@ -1578,6 +1598,8 @@ class Mapper(_InspectionAttr):
|
|||
self,
|
||||
prop,
|
||||
))
|
||||
oldprop = self._props[key]
|
||||
self._path_registry.pop(oldprop, None)
|
||||
|
||||
self._props[key] = prop
|
||||
|
||||
|
|
@ -1892,6 +1914,66 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
"""
|
||||
|
||||
@_memoized_configured_property
|
||||
def _insert_cols_as_none(self):
|
||||
return dict(
|
||||
(
|
||||
table,
|
||||
frozenset(
|
||||
col.key for col in columns
|
||||
if not col.primary_key and
|
||||
not col.server_default and not col.default)
|
||||
)
|
||||
for table, columns in self._cols_by_table.items()
|
||||
)
|
||||
|
||||
@_memoized_configured_property
|
||||
def _propkey_to_col(self):
|
||||
return dict(
|
||||
(
|
||||
table,
|
||||
dict(
|
||||
(self._columntoproperty[col].key, col)
|
||||
for col in columns
|
||||
)
|
||||
)
|
||||
for table, columns in self._cols_by_table.items()
|
||||
)
|
||||
|
||||
@_memoized_configured_property
|
||||
def _pk_keys_by_table(self):
|
||||
return dict(
|
||||
(
|
||||
table,
|
||||
frozenset([col.key for col in pks])
|
||||
)
|
||||
for table, pks in self._pks_by_table.items()
|
||||
)
|
||||
|
||||
@_memoized_configured_property
|
||||
def _server_default_cols(self):
|
||||
return dict(
|
||||
(
|
||||
table,
|
||||
frozenset([
|
||||
col.key for col in columns
|
||||
if col.server_default is not None])
|
||||
)
|
||||
for table, columns in self._cols_by_table.items()
|
||||
)
|
||||
|
||||
@_memoized_configured_property
|
||||
def _server_onupdate_default_cols(self):
|
||||
return dict(
|
||||
(
|
||||
table,
|
||||
frozenset([
|
||||
col.key for col in columns
|
||||
if col.server_onupdate is not None])
|
||||
)
|
||||
for table, columns in self._cols_by_table.items()
|
||||
)
|
||||
|
||||
@property
|
||||
def selectable(self):
|
||||
"""The :func:`.select` construct this :class:`.Mapper` selects from
|
||||
|
|
@ -1968,6 +2050,17 @@ class Mapper(_InspectionAttr):
|
|||
returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`,
|
||||
:attr:`.relationships`, and :attr:`.composites`.
|
||||
|
||||
.. warning::
|
||||
|
||||
the :attr:`.Mapper.relationships` accessor namespace is an
|
||||
instance of :class:`.OrderedProperties`. This is
|
||||
a dictionary-like object which includes a small number of
|
||||
named methods such as :meth:`.OrderedProperties.items`
|
||||
and :meth:`.OrderedProperties.values`. When
|
||||
accessing attributes dynamically, favor using the dict-access
|
||||
scheme, e.g. ``mapper.attrs[somename]`` over
|
||||
``getattr(mapper.attrs, somename)`` to avoid name collisions.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
|
@ -1979,7 +2072,7 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
@util.memoized_property
|
||||
def all_orm_descriptors(self):
|
||||
"""A namespace of all :class:`._InspectionAttr` attributes associated
|
||||
"""A namespace of all :class:`.InspectionAttr` attributes associated
|
||||
with the mapped class.
|
||||
|
||||
These attributes are in all cases Python :term:`descriptors`
|
||||
|
|
@ -1988,13 +2081,13 @@ class Mapper(_InspectionAttr):
|
|||
This namespace includes attributes that are mapped to the class
|
||||
as well as attributes declared by extension modules.
|
||||
It includes any Python descriptor type that inherits from
|
||||
:class:`._InspectionAttr`. This includes
|
||||
:class:`.InspectionAttr`. This includes
|
||||
:class:`.QueryableAttribute`, as well as extension types such as
|
||||
:class:`.hybrid_property`, :class:`.hybrid_method` and
|
||||
:class:`.AssociationProxy`.
|
||||
|
||||
To distinguish between mapped attributes and extension attributes,
|
||||
the attribute :attr:`._InspectionAttr.extension_type` will refer
|
||||
the attribute :attr:`.InspectionAttr.extension_type` will refer
|
||||
to a constant that distinguishes between different extension types.
|
||||
|
||||
When dealing with a :class:`.QueryableAttribute`, the
|
||||
|
|
@ -2003,6 +2096,17 @@ class Mapper(_InspectionAttr):
|
|||
referring to the collection of mapped properties via
|
||||
:attr:`.Mapper.attrs`.
|
||||
|
||||
.. warning::
|
||||
|
||||
the :attr:`.Mapper.relationships` accessor namespace is an
|
||||
instance of :class:`.OrderedProperties`. This is
|
||||
a dictionary-like object which includes a small number of
|
||||
named methods such as :meth:`.OrderedProperties.items`
|
||||
and :meth:`.OrderedProperties.values`. When
|
||||
accessing attributes dynamically, favor using the dict-access
|
||||
scheme, e.g. ``mapper.attrs[somename]`` over
|
||||
``getattr(mapper.attrs, somename)`` to avoid name collisions.
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
|
@ -2044,6 +2148,17 @@ class Mapper(_InspectionAttr):
|
|||
"""Return a namespace of all :class:`.RelationshipProperty`
|
||||
properties maintained by this :class:`.Mapper`.
|
||||
|
||||
.. warning::
|
||||
|
||||
the :attr:`.Mapper.relationships` accessor namespace is an
|
||||
instance of :class:`.OrderedProperties`. This is
|
||||
a dictionary-like object which includes a small number of
|
||||
named methods such as :meth:`.OrderedProperties.items`
|
||||
and :meth:`.OrderedProperties.values`. When
|
||||
accessing attributes dynamically, favor using the dict-access
|
||||
scheme, e.g. ``mapper.attrs[somename]`` over
|
||||
``getattr(mapper.attrs, somename)`` to avoid name collisions.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
|
||||
|
|
@ -2238,6 +2353,16 @@ class Mapper(_InspectionAttr):
|
|||
def primary_base_mapper(self):
|
||||
return self.class_manager.mapper.base_mapper
|
||||
|
||||
def _result_has_identity_key(self, result, adapter=None):
|
||||
pk_cols = self.primary_key
|
||||
if adapter:
|
||||
pk_cols = [adapter.columns[c] for c in pk_cols]
|
||||
for col in pk_cols:
|
||||
if not result._has_key(col):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def identity_key_from_row(self, row, adapter=None):
|
||||
"""Return an identity-map key for use in storing/retrieving an
|
||||
item from the identity map.
|
||||
|
|
@ -2286,7 +2411,7 @@ class Mapper(_InspectionAttr):
|
|||
manager = state.manager
|
||||
return self._identity_class, tuple([
|
||||
manager[self._columntoproperty[col].key].
|
||||
impl.get(state, dict_, attributes.PASSIVE_OFF)
|
||||
impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET)
|
||||
for col in self.primary_key
|
||||
])
|
||||
|
||||
|
|
@ -2301,22 +2426,50 @@ class Mapper(_InspectionAttr):
|
|||
|
||||
"""
|
||||
state = attributes.instance_state(instance)
|
||||
return self._primary_key_from_state(state)
|
||||
return self._primary_key_from_state(state, attributes.PASSIVE_OFF)
|
||||
|
||||
def _primary_key_from_state(self, state):
|
||||
def _primary_key_from_state(
|
||||
self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET):
|
||||
dict_ = state.dict
|
||||
manager = state.manager
|
||||
return [
|
||||
manager[self._columntoproperty[col].key].
|
||||
impl.get(state, dict_, attributes.PASSIVE_OFF)
|
||||
for col in self.primary_key
|
||||
manager[prop.key].
|
||||
impl.get(state, dict_, passive)
|
||||
for prop in self._identity_key_props
|
||||
]
|
||||
|
||||
def _get_state_attr_by_column(self, state, dict_, column,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
@_memoized_configured_property
|
||||
def _identity_key_props(self):
|
||||
return [self._columntoproperty[col] for col in self.primary_key]
|
||||
|
||||
@_memoized_configured_property
|
||||
def _all_pk_props(self):
|
||||
collection = set()
|
||||
for table in self.tables:
|
||||
collection.update(self._pks_by_table[table])
|
||||
return collection
|
||||
|
||||
@_memoized_configured_property
|
||||
def _should_undefer_in_wildcard(self):
|
||||
cols = set(self.primary_key)
|
||||
if self.polymorphic_on is not None:
|
||||
cols.add(self.polymorphic_on)
|
||||
return cols
|
||||
|
||||
@_memoized_configured_property
|
||||
def _primary_key_propkeys(self):
|
||||
return set([prop.key for prop in self._all_pk_props])
|
||||
|
||||
def _get_state_attr_by_column(
|
||||
self, state, dict_, column,
|
||||
passive=attributes.PASSIVE_RETURN_NEVER_SET):
|
||||
prop = self._columntoproperty[column]
|
||||
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
|
||||
|
||||
def _set_committed_state_attr_by_column(self, state, dict_, column, value):
|
||||
prop = self._columntoproperty[column]
|
||||
state.manager[prop.key].impl.set_committed_value(state, dict_, value)
|
||||
|
||||
def _set_state_attr_by_column(self, state, dict_, column, value):
|
||||
prop = self._columntoproperty[column]
|
||||
state.manager[prop.key].impl.set(state, dict_, value, None)
|
||||
|
|
@ -2324,14 +2477,12 @@ class Mapper(_InspectionAttr):
|
|||
def _get_committed_attr_by_column(self, obj, column):
|
||||
state = attributes.instance_state(obj)
|
||||
dict_ = attributes.instance_dict(obj)
|
||||
return self._get_committed_state_attr_by_column(state, dict_, column)
|
||||
return self._get_committed_state_attr_by_column(
|
||||
state, dict_, column, passive=attributes.PASSIVE_OFF)
|
||||
|
||||
def _get_committed_state_attr_by_column(
|
||||
self,
|
||||
state,
|
||||
dict_,
|
||||
column,
|
||||
passive=attributes.PASSIVE_OFF):
|
||||
self, state, dict_, column,
|
||||
passive=attributes.PASSIVE_RETURN_NEVER_SET):
|
||||
|
||||
prop = self._columntoproperty[column]
|
||||
return state.manager[prop.key].impl.\
|
||||
|
|
@ -2372,7 +2523,7 @@ class Mapper(_InspectionAttr):
|
|||
state, state.dict,
|
||||
leftcol,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE)
|
||||
if leftval is attributes.PASSIVE_NO_RESULT or leftval is None:
|
||||
if leftval in orm_util._none_set:
|
||||
raise ColumnsNotAvailable()
|
||||
binary.left = sql.bindparam(None, leftval,
|
||||
type_=binary.right.type)
|
||||
|
|
@ -2381,8 +2532,7 @@ class Mapper(_InspectionAttr):
|
|||
state, state.dict,
|
||||
rightcol,
|
||||
passive=attributes.PASSIVE_NO_INITIALIZE)
|
||||
if rightval is attributes.PASSIVE_NO_RESULT or \
|
||||
rightval is None:
|
||||
if rightval in orm_util._none_set:
|
||||
raise ColumnsNotAvailable()
|
||||
binary.right = sql.bindparam(None, rightval,
|
||||
type_=binary.right.type)
|
||||
|
|
@ -2419,15 +2569,24 @@ class Mapper(_InspectionAttr):
|
|||
for all relationships that meet the given cascade rule.
|
||||
|
||||
:param type_:
|
||||
The name of the cascade rule (i.e. save-update, delete,
|
||||
etc.)
|
||||
The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``,
|
||||
etc.).
|
||||
|
||||
.. note:: the ``"all"`` cascade is not accepted here. For a generic
|
||||
object traversal function, see :ref:`faq_walk_objects`.
|
||||
|
||||
:param state:
|
||||
The lead InstanceState. child items will be processed per
|
||||
the relationships defined for this object's mapper.
|
||||
|
||||
the return value are object instances; this provides a strong
|
||||
reference so that they don't fall out of scope immediately.
|
||||
:return: the method yields individual object instances.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`unitofwork_cascades`
|
||||
|
||||
:ref:`faq_walk_objects` - illustrates a generic function to
|
||||
traverse all objects without relying on cascades.
|
||||
|
||||
"""
|
||||
visited_states = set()
|
||||
|
|
@ -2544,7 +2703,33 @@ def configure_mappers():
|
|||
have been constructed thus far.
|
||||
|
||||
This function can be called any number of times, but in
|
||||
most cases is handled internally.
|
||||
most cases is invoked automatically, the first time mappings are used,
|
||||
as well as whenever mappings are used and additional not-yet-configured
|
||||
mappers have been constructed.
|
||||
|
||||
Points at which this occur include when a mapped class is instantiated
|
||||
into an instance, as well as when the :meth:`.Session.query` method
|
||||
is used.
|
||||
|
||||
The :func:`.configure_mappers` function provides several event hooks
|
||||
that can be used to augment its functionality. These methods include:
|
||||
|
||||
* :meth:`.MapperEvents.before_configured` - called once before
|
||||
:func:`.configure_mappers` does any work; this can be used to establish
|
||||
additional options, properties, or related mappings before the operation
|
||||
proceeds.
|
||||
|
||||
* :meth:`.MapperEvents.mapper_configured` - called as each indivudal
|
||||
:class:`.Mapper` is configured within the process; will include all
|
||||
mapper state except for backrefs set up by other mappers that are still
|
||||
to be configured.
|
||||
|
||||
* :meth:`.MapperEvents.after_configured` - called once after
|
||||
:func:`.configure_mappers` is complete; at this stage, all
|
||||
:class:`.Mapper` objects that are known to SQLAlchemy will be fully
|
||||
configured. Note that the calling application may still have other
|
||||
mappings that haven't been produced yet, such as if they are in modules
|
||||
as yet unimported.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -2563,7 +2748,7 @@ def configure_mappers():
|
|||
if not Mapper._new_mappers:
|
||||
return
|
||||
|
||||
Mapper.dispatch(Mapper).before_configured()
|
||||
Mapper.dispatch._for_class(Mapper).before_configured()
|
||||
# initialize properties on all mappers
|
||||
# note that _mapper_registry is unordered, which
|
||||
# may randomly conceal/reveal issues related to
|
||||
|
|
@ -2584,7 +2769,7 @@ def configure_mappers():
|
|||
mapper._expire_memoizations()
|
||||
mapper.dispatch.mapper_configured(
|
||||
mapper, mapper.class_)
|
||||
except:
|
||||
except Exception:
|
||||
exc = sys.exc_info()[1]
|
||||
if not hasattr(exc, '_configure_failed'):
|
||||
mapper._configure_failed = exc
|
||||
|
|
@ -2595,7 +2780,7 @@ def configure_mappers():
|
|||
_already_compiling = False
|
||||
finally:
|
||||
_CONFIGURE_MUTEX.release()
|
||||
Mapper.dispatch(Mapper).after_configured()
|
||||
Mapper.dispatch._for_class(Mapper).after_configured()
|
||||
|
||||
|
||||
def reconstructor(fn):
|
||||
|
|
@ -2704,20 +2889,11 @@ def _event_on_init(state, args, kwargs):
|
|||
instrumenting_mapper._set_polymorphic_identity(state)
|
||||
|
||||
|
||||
def _event_on_resurrect(state):
|
||||
# re-populate the primary key elements
|
||||
# of the dict based on the mapping.
|
||||
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
|
||||
if instrumenting_mapper:
|
||||
for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
|
||||
instrumenting_mapper._set_state_attr_by_column(
|
||||
state, state.dict, col, val)
|
||||
|
||||
|
||||
class _ColumnMapping(dict):
|
||||
|
||||
"""Error reporting helper for mapper._columntoproperty."""
|
||||
|
||||
__slots__ = 'mapper',
|
||||
|
||||
def __init__(self, mapper):
|
||||
self.mapper = mapper
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,6 +13,9 @@ from .. import util
|
|||
from .. import exc
|
||||
from itertools import chain
|
||||
from .base import class_mapper
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _unreduce_path(path):
|
||||
|
|
@ -49,14 +52,19 @@ class PathRegistry(object):
|
|||
|
||||
"""
|
||||
|
||||
is_token = False
|
||||
is_root = False
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and \
|
||||
self.path == other.path
|
||||
|
||||
def set(self, attributes, key, value):
|
||||
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes[(key, self.path)] = value
|
||||
|
||||
def setdefault(self, attributes, key, value):
|
||||
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes.setdefault((key, self.path), value)
|
||||
|
||||
def get(self, attributes, key, value=None):
|
||||
|
|
@ -148,6 +156,8 @@ class RootRegistry(PathRegistry):
|
|||
"""
|
||||
path = ()
|
||||
has_entity = False
|
||||
is_aliased_class = False
|
||||
is_root = True
|
||||
|
||||
def __getitem__(self, entity):
|
||||
return entity._path_registry
|
||||
|
|
@ -163,6 +173,15 @@ class TokenRegistry(PathRegistry):
|
|||
|
||||
has_entity = False
|
||||
|
||||
is_token = True
|
||||
|
||||
def generate_for_superclasses(self):
|
||||
if not self.parent.is_aliased_class and not self.parent.is_root:
|
||||
for ent in self.parent.mapper.iterate_to_root():
|
||||
yield TokenRegistry(self.parent.parent[ent], self.token)
|
||||
else:
|
||||
yield self
|
||||
|
||||
def __getitem__(self, entity):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
|
@ -184,6 +203,11 @@ class PropRegistry(PathRegistry):
|
|||
self.parent = parent
|
||||
self.path = parent.path + (prop,)
|
||||
|
||||
def __str__(self):
|
||||
return " -> ".join(
|
||||
str(elem) for elem in self.path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def has_entity(self):
|
||||
return hasattr(self.prop, "mapper")
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
# orm/properties.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -34,6 +34,13 @@ class ColumnProperty(StrategizedProperty):
|
|||
|
||||
strategy_wildcard_key = 'column'
|
||||
|
||||
__slots__ = (
|
||||
'_orig_columns', 'columns', 'group', 'deferred',
|
||||
'instrument', 'comparator_factory', 'descriptor', 'extension',
|
||||
'active_history', 'expire_on_flush', 'info', 'doc',
|
||||
'strategy_class', '_creation_order', '_is_polymorphic_discriminator',
|
||||
'_mapped_by_synonym', '_deferred_column_loader')
|
||||
|
||||
def __init__(self, *columns, **kwargs):
|
||||
"""Provide a column-level property for use with a Mapper.
|
||||
|
||||
|
|
@ -109,6 +116,7 @@ class ColumnProperty(StrategizedProperty):
|
|||
**Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
super(ColumnProperty, self).__init__()
|
||||
self._orig_columns = [expression._labeled(c) for c in columns]
|
||||
self.columns = [expression._labeled(_orm_full_deannotate(c))
|
||||
for c in columns]
|
||||
|
|
@ -149,6 +157,12 @@ class ColumnProperty(StrategizedProperty):
|
|||
("instrument", self.instrument)
|
||||
)
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
|
||||
def _memoized_attr__deferred_column_loader(self, state, strategies):
|
||||
return state.InstanceState._instance_level_callable_processor(
|
||||
self.parent.class_manager,
|
||||
strategies.LoadDeferredColumns(self.key), self.key)
|
||||
|
||||
@property
|
||||
def expression(self):
|
||||
"""Return the primary column or expression for this ColumnProperty.
|
||||
|
|
@ -206,7 +220,7 @@ class ColumnProperty(StrategizedProperty):
|
|||
elif dest_state.has_identity and self.key not in dest_dict:
|
||||
dest_state._expire_attributes(dest_dict, [self.key])
|
||||
|
||||
class Comparator(PropComparator):
|
||||
class Comparator(util.MemoizedSlots, PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.ColumnProperty` attributes.
|
||||
|
||||
|
|
@ -224,24 +238,27 @@ class ColumnProperty(StrategizedProperty):
|
|||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
@util.memoized_instancemethod
|
||||
def __clause_element__(self):
|
||||
|
||||
__slots__ = '__clause_element__', 'info'
|
||||
|
||||
def _memoized_method___clause_element__(self):
|
||||
if self.adapter:
|
||||
return self.adapter(self.prop.columns[0])
|
||||
else:
|
||||
# no adapter, so we aren't aliased
|
||||
# assert self._parententity is self._parentmapper
|
||||
return self.prop.columns[0]._annotate({
|
||||
"parententity": self._parentmapper,
|
||||
"parentmapper": self._parentmapper})
|
||||
"parententity": self._parententity,
|
||||
"parentmapper": self._parententity})
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
def _memoized_attr_info(self):
|
||||
ce = self.__clause_element__()
|
||||
try:
|
||||
return ce.info
|
||||
except AttributeError:
|
||||
return self.prop.info
|
||||
|
||||
def __getattr__(self, key):
|
||||
def _fallback_getattr(self, key):
|
||||
"""proxy attribute access down to the mapped column.
|
||||
|
||||
this allows user-defined comparison methods to be accessed.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
# orm/relationships.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -16,13 +16,14 @@ and `secondaryjoin` aspects of :func:`.relationship`.
|
|||
from __future__ import absolute_import
|
||||
from .. import sql, util, exc as sa_exc, schema, log
|
||||
|
||||
import weakref
|
||||
from .util import CascadeOptions, _orm_annotate, _orm_deannotate
|
||||
from . import dependency
|
||||
from . import attributes
|
||||
from ..sql.util import (
|
||||
ClauseAdapter,
|
||||
join_condition, _shallow_annotate, visit_binary_product,
|
||||
_deep_deannotate, selectables_overlap
|
||||
_deep_deannotate, selectables_overlap, adapt_criterion_to_null
|
||||
)
|
||||
from ..sql import operators, expression, visitors
|
||||
from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY,
|
||||
|
|
@ -112,6 +113,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
active_history=False,
|
||||
cascade_backrefs=True,
|
||||
load_on_pending=False,
|
||||
bake_queries=True,
|
||||
strategy_class=None, _local_remote_pairs=None,
|
||||
query_class=None,
|
||||
info=None):
|
||||
|
|
@ -193,7 +195,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
|
||||
The :paramref:`~.relationship.secondary` keyword argument is
|
||||
typically applied in the case where the intermediary :class:`.Table`
|
||||
is not otherwise exprssed in any direct class mapping. If the
|
||||
is not otherwise expressed in any direct class mapping. If the
|
||||
"secondary" table is also explicitly mapped elsewhere (e.g. as in
|
||||
:ref:`association_pattern`), one should consider applying the
|
||||
:paramref:`~.relationship.viewonly` flag so that this
|
||||
|
|
@ -273,6 +275,31 @@ class RelationshipProperty(StrategizedProperty):
|
|||
:paramref:`~.relationship.backref` - alternative form
|
||||
of backref specification.
|
||||
|
||||
:param bake_queries=True:
|
||||
Use the :class:`.BakedQuery` cache to cache the construction of SQL
|
||||
used in lazy loads, when the :func:`.bake_lazy_loaders` function has
|
||||
first been called. Defaults to True and is intended to provide an
|
||||
"opt out" flag per-relationship when the baked query cache system is
|
||||
in use.
|
||||
|
||||
.. warning::
|
||||
|
||||
This flag **only** has an effect when the application-wide
|
||||
:func:`.bake_lazy_loaders` function has been called. It
|
||||
defaults to True so is an "opt out" flag.
|
||||
|
||||
Setting this flag to False when baked queries are otherwise in
|
||||
use might be to reduce
|
||||
ORM memory use for this :func:`.relationship`, or to work around
|
||||
unresolved stability issues observed within the baked query
|
||||
cache system.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`baked_toplevel`
|
||||
|
||||
:param cascade:
|
||||
a comma-separated list of cascade rules which determines how
|
||||
Session operations should be "cascaded" from parent to child.
|
||||
|
|
@ -459,22 +486,18 @@ class RelationshipProperty(StrategizedProperty):
|
|||
nullable, or when the reference is one-to-one or a collection that
|
||||
is guaranteed to have one or at least one entry.
|
||||
|
||||
If the joined-eager load is chained onto an existing LEFT OUTER
|
||||
JOIN, ``innerjoin=True`` will be bypassed and the join will continue
|
||||
to chain as LEFT OUTER JOIN so that the results don't change. As an
|
||||
alternative, specify the value ``"nested"``. This will instead nest
|
||||
the join on the right side, e.g. using the form "a LEFT OUTER JOIN
|
||||
(b JOIN c)".
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to
|
||||
support nesting of eager "inner" joins.
|
||||
The option supports the same "nested" and "unnested" options as
|
||||
that of :paramref:`.joinedload.innerjoin`. See that flag
|
||||
for details on nested / unnested behaviors.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.joinedload.innerjoin` - the option as specified by
|
||||
loader option, including detail on nesting behavior.
|
||||
|
||||
:ref:`what_kind_of_loading` - Discussion of some details of
|
||||
various loader options.
|
||||
|
||||
:paramref:`.joinedload.innerjoin` - loader option version
|
||||
|
||||
:param join_depth:
|
||||
when non-``None``, an integer value indicating how many levels
|
||||
|
|
@ -531,7 +554,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
|
||||
.. seealso::
|
||||
|
||||
:doc:`/orm/loading` - Full documentation on relationship loader
|
||||
:doc:`/orm/loading_relationships` - Full documentation on relationship loader
|
||||
configuration.
|
||||
|
||||
:ref:`dynamic_relationship` - detail on the ``dynamic`` option.
|
||||
|
|
@ -597,30 +620,26 @@ class RelationshipProperty(StrategizedProperty):
|
|||
and examples.
|
||||
|
||||
:param passive_updates=True:
|
||||
Indicates loading and INSERT/UPDATE/DELETE behavior when the
|
||||
source of a foreign key value changes (i.e. an "on update"
|
||||
cascade), which are typically the primary key columns of the
|
||||
source row.
|
||||
Indicates the persistence behavior to take when a referenced
|
||||
primary key value changes in place, indicating that the referencing
|
||||
foreign key columns will also need their value changed.
|
||||
|
||||
When True, it is assumed that ON UPDATE CASCADE is configured on
|
||||
When True, it is assumed that ``ON UPDATE CASCADE`` is configured on
|
||||
the foreign key in the database, and that the database will
|
||||
handle propagation of an UPDATE from a source column to
|
||||
dependent rows. Note that with databases which enforce
|
||||
referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
|
||||
ON UPDATE CASCADE is required for this operation. The
|
||||
relationship() will update the value of the attribute on related
|
||||
items which are locally present in the session during a flush.
|
||||
dependent rows. When False, the SQLAlchemy :func:`.relationship`
|
||||
construct will attempt to emit its own UPDATE statements to
|
||||
modify related targets. However note that SQLAlchemy **cannot**
|
||||
emit an UPDATE for more than one level of cascade. Also,
|
||||
setting this flag to False is not compatible in the case where
|
||||
the database is in fact enforcing referential integrity, unless
|
||||
those constraints are explicitly "deferred", if the target backend
|
||||
supports it.
|
||||
|
||||
When False, it is assumed that the database does not enforce
|
||||
referential integrity and will not be issuing its own CASCADE
|
||||
operation for an update. The relationship() will issue the
|
||||
appropriate UPDATE statements to the database in response to the
|
||||
change of a referenced key, and items locally present in the
|
||||
session during a flush will also be refreshed.
|
||||
|
||||
This flag should probably be set to False if primary key changes
|
||||
are expected and the database in use doesn't support CASCADE
|
||||
(i.e. SQLite, MySQL MyISAM tables).
|
||||
It is highly advised that an application which is employing
|
||||
mutable primary keys keeps ``passive_updates`` set to True,
|
||||
and instead uses the referential integrity features of the database
|
||||
itself in order to handle the change efficiently and fully.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
|
@ -778,6 +797,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
|
||||
|
||||
"""
|
||||
super(RelationshipProperty, self).__init__()
|
||||
|
||||
self.uselist = uselist
|
||||
self.argument = argument
|
||||
|
|
@ -804,6 +824,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
self.join_depth = join_depth
|
||||
self.local_remote_pairs = _local_remote_pairs
|
||||
self.extension = extension
|
||||
self.bake_queries = bake_queries
|
||||
self.load_on_pending = load_on_pending
|
||||
self.comparator_factory = comparator_factory or \
|
||||
RelationshipProperty.Comparator
|
||||
|
|
@ -875,13 +896,13 @@ class RelationshipProperty(StrategizedProperty):
|
|||
|
||||
"""
|
||||
self.prop = prop
|
||||
self._parentmapper = parentmapper
|
||||
self._parententity = parentmapper
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
if of_type:
|
||||
self._of_type = of_type
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
return self.__class__(self.property, self._parentmapper,
|
||||
return self.__class__(self.property, self._parententity,
|
||||
adapt_to_entity=adapt_to_entity,
|
||||
of_type=self._of_type)
|
||||
|
||||
|
|
@ -933,7 +954,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
"""
|
||||
return RelationshipProperty.Comparator(
|
||||
self.property,
|
||||
self._parentmapper,
|
||||
self._parententity,
|
||||
adapt_to_entity=self._adapt_to_entity,
|
||||
of_type=cls)
|
||||
|
||||
|
|
@ -1224,11 +1245,15 @@ class RelationshipProperty(StrategizedProperty):
|
|||
state = attributes.instance_state(other)
|
||||
|
||||
def state_bindparam(x, state, col):
|
||||
o = state.obj() # strong ref
|
||||
dict_ = state.dict
|
||||
return sql.bindparam(
|
||||
x, unique=True, callable_=lambda:
|
||||
self.property.mapper.
|
||||
_get_committed_attr_by_column(o, col))
|
||||
x, unique=True,
|
||||
callable_=self.property._get_attr_w_warn_on_none(
|
||||
col,
|
||||
self.property.mapper._get_state_attr_by_column,
|
||||
state, dict_, col, passive=attributes.PASSIVE_OFF
|
||||
)
|
||||
)
|
||||
|
||||
def adapt(col):
|
||||
if self.adapter:
|
||||
|
|
@ -1243,13 +1268,14 @@ class RelationshipProperty(StrategizedProperty):
|
|||
adapt(x) == None)
|
||||
for (x, y) in self.property.local_remote_pairs])
|
||||
|
||||
criterion = sql.and_(*[x == y for (x, y) in
|
||||
zip(
|
||||
self.property.mapper.primary_key,
|
||||
self.property.
|
||||
mapper.
|
||||
primary_key_from_instance(other))
|
||||
criterion = sql.and_(*[
|
||||
x == y for (x, y) in
|
||||
zip(
|
||||
self.property.mapper.primary_key,
|
||||
self.property.mapper.primary_key_from_instance(other)
|
||||
)
|
||||
])
|
||||
|
||||
return ~self._criterion_exists(criterion)
|
||||
|
||||
def __ne__(self, other):
|
||||
|
|
@ -1293,8 +1319,9 @@ class RelationshipProperty(StrategizedProperty):
|
|||
"""
|
||||
if isinstance(other, (util.NoneType, expression.Null)):
|
||||
if self.property.direction == MANYTOONE:
|
||||
return sql.or_(*[x != None for x in
|
||||
self.property._calculated_foreign_keys])
|
||||
return _orm_annotate(~self.property._optimized_compare(
|
||||
None, adapt_source=self.adapter))
|
||||
|
||||
else:
|
||||
return self._criterion_exists()
|
||||
elif self.property.uselist:
|
||||
|
|
@ -1303,7 +1330,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
" to an object or collection; use "
|
||||
"contains() to test for membership.")
|
||||
else:
|
||||
return self.__negated_contains_or_equals(other)
|
||||
return _orm_annotate(self.__negated_contains_or_equals(other))
|
||||
|
||||
@util.memoized_property
|
||||
def property(self):
|
||||
|
|
@ -1311,36 +1338,88 @@ class RelationshipProperty(StrategizedProperty):
|
|||
mapperlib.Mapper._configure_all()
|
||||
return self.prop
|
||||
|
||||
def compare(self, op, value,
|
||||
value_is_parent=False,
|
||||
alias_secondary=True):
|
||||
if op == operators.eq:
|
||||
if value is None:
|
||||
if self.uselist:
|
||||
return ~sql.exists([1], self.primaryjoin)
|
||||
else:
|
||||
return self._optimized_compare(
|
||||
None,
|
||||
value_is_parent=value_is_parent,
|
||||
alias_secondary=alias_secondary)
|
||||
else:
|
||||
return self._optimized_compare(
|
||||
value,
|
||||
value_is_parent=value_is_parent,
|
||||
alias_secondary=alias_secondary)
|
||||
else:
|
||||
return op(self.comparator, value)
|
||||
def _with_parent(self, instance, alias_secondary=True):
|
||||
assert instance is not None
|
||||
return self._optimized_compare(
|
||||
instance, value_is_parent=True, alias_secondary=alias_secondary)
|
||||
|
||||
def _optimized_compare(self, value, value_is_parent=False,
|
||||
def _optimized_compare(self, state, value_is_parent=False,
|
||||
adapt_source=None,
|
||||
alias_secondary=True):
|
||||
if value is not None:
|
||||
value = attributes.instance_state(value)
|
||||
return self._lazy_strategy.lazy_clause(
|
||||
value,
|
||||
reverse_direction=not value_is_parent,
|
||||
alias_secondary=alias_secondary,
|
||||
adapt_source=adapt_source)
|
||||
if state is not None:
|
||||
state = attributes.instance_state(state)
|
||||
|
||||
reverse_direction = not value_is_parent
|
||||
|
||||
if state is None:
|
||||
return self._lazy_none_clause(
|
||||
reverse_direction,
|
||||
adapt_source=adapt_source)
|
||||
|
||||
if not reverse_direction:
|
||||
criterion, bind_to_col = \
|
||||
self._lazy_strategy._lazywhere, \
|
||||
self._lazy_strategy._bind_to_col
|
||||
else:
|
||||
criterion, bind_to_col = \
|
||||
self._lazy_strategy._rev_lazywhere, \
|
||||
self._lazy_strategy._rev_bind_to_col
|
||||
|
||||
if reverse_direction:
|
||||
mapper = self.mapper
|
||||
else:
|
||||
mapper = self.parent
|
||||
|
||||
dict_ = attributes.instance_dict(state.obj())
|
||||
|
||||
def visit_bindparam(bindparam):
|
||||
if bindparam._identifying_key in bind_to_col:
|
||||
bindparam.callable = self._get_attr_w_warn_on_none(
|
||||
bind_to_col[bindparam._identifying_key],
|
||||
mapper._get_state_attr_by_column,
|
||||
state, dict_,
|
||||
bind_to_col[bindparam._identifying_key],
|
||||
passive=attributes.PASSIVE_OFF)
|
||||
|
||||
if self.secondary is not None and alias_secondary:
|
||||
criterion = ClauseAdapter(
|
||||
self.secondary.alias()).\
|
||||
traverse(criterion)
|
||||
|
||||
criterion = visitors.cloned_traverse(
|
||||
criterion, {}, {'bindparam': visit_bindparam})
|
||||
|
||||
if adapt_source:
|
||||
criterion = adapt_source(criterion)
|
||||
return criterion
|
||||
|
||||
def _get_attr_w_warn_on_none(self, column, fn, *arg, **kw):
|
||||
def _go():
|
||||
value = fn(*arg, **kw)
|
||||
if value is None:
|
||||
util.warn(
|
||||
"Got None for value of column %s; this is unsupported "
|
||||
"for a relationship comparison and will not "
|
||||
"currently produce an IS comparison "
|
||||
"(but may in a future release)" % column)
|
||||
return value
|
||||
return _go
|
||||
|
||||
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
|
||||
if not reverse_direction:
|
||||
criterion, bind_to_col = \
|
||||
self._lazy_strategy._lazywhere, \
|
||||
self._lazy_strategy._bind_to_col
|
||||
else:
|
||||
criterion, bind_to_col = \
|
||||
self._lazy_strategy._rev_lazywhere, \
|
||||
self._lazy_strategy._rev_bind_to_col
|
||||
|
||||
criterion = adapt_criterion_to_null(criterion, bind_to_col)
|
||||
|
||||
if adapt_source:
|
||||
criterion = adapt_source(criterion)
|
||||
return criterion
|
||||
|
||||
def __str__(self):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
|
|
@ -1551,6 +1630,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
self._check_cascade_settings(self._cascade)
|
||||
self._post_init()
|
||||
self._generate_backref()
|
||||
self._join_condition._warn_for_conflicting_sync_targets()
|
||||
super(RelationshipProperty, self).do_init()
|
||||
self._lazy_strategy = self._get_strategy((("lazy", "select"),))
|
||||
|
||||
|
|
@ -1637,7 +1717,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
"""Test that this relationship is legal, warn about
|
||||
inheritance conflicts."""
|
||||
|
||||
if not self.is_primary() and not mapperlib.class_mapper(
|
||||
if self.parent.non_primary and not mapperlib.class_mapper(
|
||||
self.parent.class_,
|
||||
configure=False).has_property(self.key):
|
||||
raise sa_exc.ArgumentError(
|
||||
|
|
@ -1723,7 +1803,7 @@ class RelationshipProperty(StrategizedProperty):
|
|||
"""Interpret the 'backref' instruction to create a
|
||||
:func:`.relationship` complementary to this one."""
|
||||
|
||||
if not self.is_primary():
|
||||
if self.parent.non_primary:
|
||||
return
|
||||
if self.backref is not None and not self.back_populates:
|
||||
if isinstance(self.backref, util.string_types):
|
||||
|
|
@ -2200,7 +2280,7 @@ class JoinCondition(object):
|
|||
elif self._local_remote_pairs or self._remote_side:
|
||||
self._annotate_remote_from_args()
|
||||
elif self._refers_to_parent_table():
|
||||
self._annotate_selfref(lambda col: "foreign" in col._annotations)
|
||||
self._annotate_selfref(lambda col: "foreign" in col._annotations, False)
|
||||
elif self._tables_overlap():
|
||||
self._annotate_remote_with_overlap()
|
||||
else:
|
||||
|
|
@ -2219,7 +2299,7 @@ class JoinCondition(object):
|
|||
self.secondaryjoin = visitors.replacement_traverse(
|
||||
self.secondaryjoin, {}, repl)
|
||||
|
||||
def _annotate_selfref(self, fn):
|
||||
def _annotate_selfref(self, fn, remote_side_given):
|
||||
"""annotate 'remote' in primaryjoin, secondaryjoin
|
||||
when the relationship is detected as self-referential.
|
||||
|
||||
|
|
@ -2234,7 +2314,7 @@ class JoinCondition(object):
|
|||
if fn(binary.right) and not equated:
|
||||
binary.right = binary.right._annotate(
|
||||
{"remote": True})
|
||||
else:
|
||||
elif not remote_side_given:
|
||||
self._warn_non_column_elements()
|
||||
|
||||
self.primaryjoin = visitors.cloned_traverse(
|
||||
|
|
@ -2259,7 +2339,7 @@ class JoinCondition(object):
|
|||
remote_side = self._remote_side
|
||||
|
||||
if self._refers_to_parent_table():
|
||||
self._annotate_selfref(lambda col: col in remote_side)
|
||||
self._annotate_selfref(lambda col: col in remote_side, True)
|
||||
else:
|
||||
def repl(element):
|
||||
if element in remote_side:
|
||||
|
|
@ -2280,12 +2360,21 @@ class JoinCondition(object):
|
|||
binary.right, binary.left = proc_left_right(binary.right,
|
||||
binary.left)
|
||||
|
||||
check_entities = self.prop is not None and \
|
||||
self.prop.mapper is not self.prop.parent
|
||||
|
||||
def proc_left_right(left, right):
|
||||
if isinstance(left, expression.ColumnClause) and \
|
||||
isinstance(right, expression.ColumnClause):
|
||||
if self.child_selectable.c.contains_column(right) and \
|
||||
self.parent_selectable.c.contains_column(left):
|
||||
right = right._annotate({"remote": True})
|
||||
elif check_entities and \
|
||||
right._annotations.get('parentmapper') is self.prop.mapper:
|
||||
right = right._annotate({"remote": True})
|
||||
elif check_entities and \
|
||||
left._annotations.get('parentmapper') is self.prop.mapper:
|
||||
left = left._annotate({"remote": True})
|
||||
else:
|
||||
self._warn_non_column_elements()
|
||||
|
||||
|
|
@ -2538,6 +2627,60 @@ class JoinCondition(object):
|
|||
self.secondary_synchronize_pairs = \
|
||||
self._deannotate_pairs(secondary_sync_pairs)
|
||||
|
||||
_track_overlapping_sync_targets = weakref.WeakKeyDictionary()
|
||||
|
||||
def _warn_for_conflicting_sync_targets(self):
|
||||
if not self.support_sync:
|
||||
return
|
||||
|
||||
# we would like to detect if we are synchronizing any column
|
||||
# pairs in conflict with another relationship that wishes to sync
|
||||
# an entirely different column to the same target. This is a
|
||||
# very rare edge case so we will try to minimize the memory/overhead
|
||||
# impact of this check
|
||||
for from_, to_ in [
|
||||
(from_, to_) for (from_, to_) in self.synchronize_pairs
|
||||
] + [
|
||||
(from_, to_) for (from_, to_) in self.secondary_synchronize_pairs
|
||||
]:
|
||||
# save ourselves a ton of memory and overhead by only
|
||||
# considering columns that are subject to a overlapping
|
||||
# FK constraints at the core level. This condition can arise
|
||||
# if multiple relationships overlap foreign() directly, but
|
||||
# we're going to assume it's typically a ForeignKeyConstraint-
|
||||
# level configuration that benefits from this warning.
|
||||
if len(to_.foreign_keys) < 2:
|
||||
continue
|
||||
|
||||
if to_ not in self._track_overlapping_sync_targets:
|
||||
self._track_overlapping_sync_targets[to_] = \
|
||||
weakref.WeakKeyDictionary({self.prop: from_})
|
||||
else:
|
||||
other_props = []
|
||||
prop_to_from = self._track_overlapping_sync_targets[to_]
|
||||
for pr, fr_ in prop_to_from.items():
|
||||
if pr.mapper in mapperlib._mapper_registry and \
|
||||
fr_ is not from_ and \
|
||||
pr not in self.prop._reverse_property:
|
||||
other_props.append((pr, fr_))
|
||||
|
||||
if other_props:
|
||||
util.warn(
|
||||
"relationship '%s' will copy column %s to column %s, "
|
||||
"which conflicts with relationship(s): %s. "
|
||||
"Consider applying "
|
||||
"viewonly=True to read-only relationships, or provide "
|
||||
"a primaryjoin condition marking writable columns "
|
||||
"with the foreign() annotation." % (
|
||||
self.prop,
|
||||
from_, to_,
|
||||
", ".join(
|
||||
"'%s' (copies %s to %s)" % (pr, fr_, to_)
|
||||
for (pr, fr_) in other_props)
|
||||
)
|
||||
)
|
||||
self._track_overlapping_sync_targets[to_][self.prop] = from_
|
||||
|
||||
@util.memoized_property
|
||||
def remote_columns(self):
|
||||
return self._gather_join_annotations("remote")
|
||||
|
|
@ -2654,27 +2797,31 @@ class JoinCondition(object):
|
|||
|
||||
def create_lazy_clause(self, reverse_direction=False):
|
||||
binds = util.column_dict()
|
||||
lookup = collections.defaultdict(list)
|
||||
equated_columns = util.column_dict()
|
||||
|
||||
if reverse_direction and self.secondaryjoin is None:
|
||||
for l, r in self.local_remote_pairs:
|
||||
lookup[r].append((r, l))
|
||||
equated_columns[l] = r
|
||||
else:
|
||||
# replace all "local side" columns, which is
|
||||
# anything that isn't marked "remote"
|
||||
has_secondary = self.secondaryjoin is not None
|
||||
|
||||
if has_secondary:
|
||||
lookup = collections.defaultdict(list)
|
||||
for l, r in self.local_remote_pairs:
|
||||
lookup[l].append((l, r))
|
||||
equated_columns[r] = l
|
||||
elif not reverse_direction:
|
||||
for l, r in self.local_remote_pairs:
|
||||
equated_columns[r] = l
|
||||
else:
|
||||
for l, r in self.local_remote_pairs:
|
||||
equated_columns[l] = r
|
||||
|
||||
def col_to_bind(col):
|
||||
if (reverse_direction and col in lookup) or \
|
||||
(not reverse_direction and "local" in col._annotations):
|
||||
if col in lookup:
|
||||
for tobind, equated in lookup[col]:
|
||||
if equated in binds:
|
||||
return None
|
||||
|
||||
if (
|
||||
(not reverse_direction and 'local' in col._annotations) or
|
||||
reverse_direction and (
|
||||
(has_secondary and col in lookup) or
|
||||
(not has_secondary and 'remote' in col._annotations)
|
||||
)
|
||||
):
|
||||
if col not in binds:
|
||||
binds[col] = sql.bindparam(
|
||||
None, None, type_=col.type, unique=True)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/scoping.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -21,6 +21,12 @@ class scoped_session(object):
|
|||
|
||||
"""
|
||||
|
||||
session_factory = None
|
||||
"""The `session_factory` provided to `__init__` is stored in this
|
||||
attribute and may be accessed at a later time. This can be useful when
|
||||
a new non-scoped :class:`.Session` or :class:`.Connection` to the
|
||||
database is needed."""
|
||||
|
||||
def __init__(self, session_factory, scopefunc=None):
|
||||
"""Construct a new :class:`.scoped_session`.
|
||||
|
||||
|
|
@ -38,6 +44,7 @@ class scoped_session(object):
|
|||
|
||||
"""
|
||||
self.session_factory = session_factory
|
||||
|
||||
if scopefunc:
|
||||
self.registry = ScopedRegistry(session_factory, scopefunc)
|
||||
else:
|
||||
|
|
@ -45,12 +52,12 @@ class scoped_session(object):
|
|||
|
||||
def __call__(self, **kw):
|
||||
"""Return the current :class:`.Session`, creating it
|
||||
using the session factory if not present.
|
||||
using the :attr:`.scoped_session.session_factory` if not present.
|
||||
|
||||
:param \**kw: Keyword arguments will be passed to the
|
||||
session factory callable, if an existing :class:`.Session`
|
||||
is not present. If the :class:`.Session` is present and
|
||||
keyword arguments have been passed,
|
||||
:attr:`.scoped_session.session_factory` callable, if an existing
|
||||
:class:`.Session` is not present. If the :class:`.Session` is present
|
||||
and keyword arguments have been passed,
|
||||
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
|
||||
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/session.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -20,6 +20,8 @@ from .base import (
|
|||
_class_to_mapper, _state_mapper, object_state,
|
||||
_none_set, state_str, instance_str
|
||||
)
|
||||
import itertools
|
||||
from . import persistence
|
||||
from .unitofwork import UOWTransaction
|
||||
from . import state as statelib
|
||||
import sys
|
||||
|
|
@ -45,7 +47,6 @@ def _state_session(state):
|
|||
|
||||
|
||||
class _SessionClassMethods(object):
|
||||
|
||||
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
|
||||
|
||||
@classmethod
|
||||
|
|
@ -84,7 +85,6 @@ CLOSED = util.symbol('CLOSED')
|
|||
|
||||
|
||||
class SessionTransaction(object):
|
||||
|
||||
"""A :class:`.Session`-level transaction.
|
||||
|
||||
:class:`.SessionTransaction` is a mostly behind-the-scenes object
|
||||
|
|
@ -226,10 +226,10 @@ class SessionTransaction(object):
|
|||
def _is_transaction_boundary(self):
|
||||
return self.nested or not self._parent
|
||||
|
||||
def connection(self, bindkey, **kwargs):
|
||||
def connection(self, bindkey, execution_options=None, **kwargs):
|
||||
self._assert_active()
|
||||
bind = self.session.get_bind(bindkey, **kwargs)
|
||||
return self._connection_for_bind(bind)
|
||||
return self._connection_for_bind(bind, execution_options)
|
||||
|
||||
def _begin(self, nested=False):
|
||||
self._assert_active()
|
||||
|
|
@ -237,14 +237,21 @@ class SessionTransaction(object):
|
|||
self.session, self, nested=nested)
|
||||
|
||||
def _iterate_parents(self, upto=None):
|
||||
if self._parent is upto:
|
||||
return (self,)
|
||||
else:
|
||||
if self._parent is None:
|
||||
|
||||
current = self
|
||||
result = ()
|
||||
while current:
|
||||
result += (current, )
|
||||
if current._parent is upto:
|
||||
break
|
||||
elif current._parent is None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Transaction %s is not on the active transaction list" % (
|
||||
upto))
|
||||
return (self,) + self._parent._iterate_parents(upto)
|
||||
else:
|
||||
current = current._parent
|
||||
|
||||
return result
|
||||
|
||||
def _take_snapshot(self):
|
||||
if not self._is_transaction_boundary:
|
||||
|
|
@ -271,7 +278,7 @@ class SessionTransaction(object):
|
|||
del s.key
|
||||
|
||||
for s, (oldkey, newkey) in self._key_switches.items():
|
||||
self.session.identity_map.discard(s)
|
||||
self.session.identity_map.safe_discard(s)
|
||||
s.key = oldkey
|
||||
self.session.identity_map.replace(s)
|
||||
|
||||
|
|
@ -293,22 +300,27 @@ class SessionTransaction(object):
|
|||
if not self.nested and self.session.expire_on_commit:
|
||||
for s in self.session.identity_map.all_states():
|
||||
s._expire(s.dict, self.session.identity_map._modified)
|
||||
for s in self._deleted:
|
||||
s.session_id = None
|
||||
for s in list(self._deleted):
|
||||
s._detach()
|
||||
self._deleted.clear()
|
||||
elif self.nested:
|
||||
self._parent._new.update(self._new)
|
||||
self._parent._dirty.update(self._dirty)
|
||||
self._parent._deleted.update(self._deleted)
|
||||
self._parent._key_switches.update(self._key_switches)
|
||||
|
||||
def _connection_for_bind(self, bind):
|
||||
def _connection_for_bind(self, bind, execution_options):
|
||||
self._assert_active()
|
||||
|
||||
if bind in self._connections:
|
||||
if execution_options:
|
||||
util.warn(
|
||||
"Connection is already established for the "
|
||||
"given bind; execution_options ignored")
|
||||
return self._connections[bind][0]
|
||||
|
||||
if self._parent:
|
||||
conn = self._parent._connection_for_bind(bind)
|
||||
conn = self._parent._connection_for_bind(bind, execution_options)
|
||||
if not self.nested:
|
||||
return conn
|
||||
else:
|
||||
|
|
@ -321,6 +333,9 @@ class SessionTransaction(object):
|
|||
else:
|
||||
conn = bind.contextual_connect()
|
||||
|
||||
if execution_options:
|
||||
conn = conn.execution_options(**execution_options)
|
||||
|
||||
if self.session.twophase and self._parent is None:
|
||||
transaction = conn.begin_twophase()
|
||||
elif self.nested:
|
||||
|
|
@ -397,26 +412,29 @@ class SessionTransaction(object):
|
|||
for subtransaction in stx._iterate_parents(upto=self):
|
||||
subtransaction.close()
|
||||
|
||||
boundary = self
|
||||
if self._state in (ACTIVE, PREPARED):
|
||||
for transaction in self._iterate_parents():
|
||||
if transaction._parent is None or transaction.nested:
|
||||
transaction._rollback_impl()
|
||||
transaction._state = DEACTIVE
|
||||
boundary = transaction
|
||||
break
|
||||
else:
|
||||
transaction._state = DEACTIVE
|
||||
|
||||
sess = self.session
|
||||
|
||||
if self.session._enable_transaction_accounting and \
|
||||
if sess._enable_transaction_accounting and \
|
||||
not sess._is_clean():
|
||||
|
||||
# if items were added, deleted, or mutated
|
||||
# here, we need to re-restore the snapshot
|
||||
util.warn(
|
||||
"Session's state has been changed on "
|
||||
"a non-active transaction - this state "
|
||||
"will be discarded.")
|
||||
self._restore_snapshot(dirty_only=self.nested)
|
||||
boundary._restore_snapshot(dirty_only=boundary.nested)
|
||||
|
||||
self.close()
|
||||
if self._parent and _capture_exception:
|
||||
|
|
@ -435,11 +453,13 @@ class SessionTransaction(object):
|
|||
|
||||
self.session.dispatch.after_rollback(self.session)
|
||||
|
||||
def close(self):
|
||||
def close(self, invalidate=False):
|
||||
self.session.transaction = self._parent
|
||||
if self._parent is None:
|
||||
for connection, transaction, autoclose in \
|
||||
set(self._connections.values()):
|
||||
if invalidate:
|
||||
connection.invalidate()
|
||||
if autoclose:
|
||||
connection.close()
|
||||
else:
|
||||
|
|
@ -473,7 +493,6 @@ class SessionTransaction(object):
|
|||
|
||||
|
||||
class Session(_SessionClassMethods):
|
||||
|
||||
"""Manages persistence operations for ORM-mapped objects.
|
||||
|
||||
The Session's usage paradigm is described at :doc:`/orm/session`.
|
||||
|
|
@ -485,7 +504,8 @@ class Session(_SessionClassMethods):
|
|||
'__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested',
|
||||
'close', 'commit', 'connection', 'delete', 'execute', 'expire',
|
||||
'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind',
|
||||
'is_modified',
|
||||
'is_modified', 'bulk_save_objects', 'bulk_insert_mappings',
|
||||
'bulk_update_mappings',
|
||||
'merge', 'query', 'refresh', 'rollback',
|
||||
'scalar')
|
||||
|
||||
|
|
@ -529,8 +549,8 @@ class Session(_SessionClassMethods):
|
|||
:meth:`~.Session.flush` call to this ``Session`` before proceeding.
|
||||
This is a convenience feature so that :meth:`~.Session.flush` need
|
||||
not be called repeatedly in order for database queries to retrieve
|
||||
results. It's typical that ``autoflush`` is used in conjunction with
|
||||
``autocommit=False``. In this scenario, explicit calls to
|
||||
results. It's typical that ``autoflush`` is used in conjunction
|
||||
with ``autocommit=False``. In this scenario, explicit calls to
|
||||
:meth:`~.Session.flush` are rarely needed; you usually only need to
|
||||
call :meth:`~.Session.commit` (which flushes) to finalize changes.
|
||||
|
||||
|
|
@ -546,8 +566,8 @@ class Session(_SessionClassMethods):
|
|||
:class:`.Engine` or :class:`.Connection` objects. Operations which
|
||||
proceed relative to a particular :class:`.Mapper` will consult this
|
||||
dictionary for the direct :class:`.Mapper` instance as
|
||||
well as the mapper's ``mapped_table`` attribute in order to locate a
|
||||
connectable to use. The full resolution is described in the
|
||||
well as the mapper's ``mapped_table`` attribute in order to locate
|
||||
a connectable to use. The full resolution is described in the
|
||||
:meth:`.Session.get_bind`.
|
||||
Usage looks like::
|
||||
|
||||
|
|
@ -594,8 +614,8 @@ class Session(_SessionClassMethods):
|
|||
.. versionadded:: 0.9.0
|
||||
|
||||
:param query_cls: Class which should be used to create new Query
|
||||
objects, as returned by the :meth:`~.Session.query` method. Defaults
|
||||
to :class:`.Query`.
|
||||
objects, as returned by the :meth:`~.Session.query` method.
|
||||
Defaults to :class:`.Query`.
|
||||
|
||||
:param twophase: When ``True``, all transactions will be started as
|
||||
a "two phase" transaction, i.e. using the "two phase" semantics
|
||||
|
|
@ -610,15 +630,26 @@ class Session(_SessionClassMethods):
|
|||
``False``, objects placed in the :class:`.Session` will be
|
||||
strongly referenced until explicitly removed or the
|
||||
:class:`.Session` is closed. **Deprecated** - this option
|
||||
is obsolete.
|
||||
is present to allow compatibility with older applications, but
|
||||
it is recommended that strong references to objects
|
||||
be maintained by the calling application
|
||||
externally to the :class:`.Session` itself,
|
||||
to the extent that is required by the application.
|
||||
|
||||
"""
|
||||
|
||||
if weak_identity_map:
|
||||
self._identity_cls = identity.WeakInstanceDict
|
||||
else:
|
||||
util.warn_deprecated("weak_identity_map=False is deprecated. "
|
||||
"This feature is not needed.")
|
||||
util.warn_deprecated(
|
||||
"weak_identity_map=False is deprecated. "
|
||||
"It is present to allow compatibility with older "
|
||||
"applications, but "
|
||||
"it is recommended that strong references to "
|
||||
"objects be maintained by the calling application "
|
||||
"externally to the :class:`.Session` itself, "
|
||||
"to the extent that is required by the application.")
|
||||
|
||||
self._identity_cls = identity.StrongInstanceDict
|
||||
self.identity_map = self._identity_cls()
|
||||
|
||||
|
|
@ -644,14 +675,8 @@ class Session(_SessionClassMethods):
|
|||
SessionExtension._adapt_listener(self, ext)
|
||||
|
||||
if binds is not None:
|
||||
for mapperortable, bind in binds.items():
|
||||
insp = inspect(mapperortable)
|
||||
if insp.is_selectable:
|
||||
self.bind_table(mapperortable, bind)
|
||||
elif insp.is_mapper:
|
||||
self.bind_mapper(mapperortable, bind)
|
||||
else:
|
||||
assert False
|
||||
for key, bind in binds.items():
|
||||
self._add_bind(key, bind)
|
||||
|
||||
if not self.autocommit:
|
||||
self.begin()
|
||||
|
|
@ -666,7 +691,7 @@ class Session(_SessionClassMethods):
|
|||
def info(self):
|
||||
"""A user-modifiable dictionary.
|
||||
|
||||
The initial value of this dictioanry can be populated using the
|
||||
The initial value of this dictionary can be populated using the
|
||||
``info`` argument to the :class:`.Session` constructor or
|
||||
:class:`.sessionmaker` constructor or factory methods. The dictionary
|
||||
here is always local to this :class:`.Session` and can be modified
|
||||
|
|
@ -797,6 +822,7 @@ class Session(_SessionClassMethods):
|
|||
def connection(self, mapper=None, clause=None,
|
||||
bind=None,
|
||||
close_with_result=False,
|
||||
execution_options=None,
|
||||
**kw):
|
||||
"""Return a :class:`.Connection` object corresponding to this
|
||||
:class:`.Session` object's transactional state.
|
||||
|
|
@ -841,6 +867,18 @@ class Session(_SessionClassMethods):
|
|||
configured with ``autocommit=True`` and does not already have a
|
||||
transaction in progress.
|
||||
|
||||
:param execution_options: a dictionary of execution options that will
|
||||
be passed to :meth:`.Connection.execution_options`, **when the
|
||||
connection is first procured only**. If the connection is already
|
||||
present within the :class:`.Session`, a warning is emitted and
|
||||
the arguments are ignored.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_transaction_isolation`
|
||||
|
||||
:param \**kw:
|
||||
Additional keyword arguments are sent to :meth:`get_bind()`,
|
||||
allowing additional arguments to be passed to custom
|
||||
|
|
@ -851,13 +889,18 @@ class Session(_SessionClassMethods):
|
|||
bind = self.get_bind(mapper, clause=clause, **kw)
|
||||
|
||||
return self._connection_for_bind(bind,
|
||||
close_with_result=close_with_result)
|
||||
close_with_result=close_with_result,
|
||||
execution_options=execution_options)
|
||||
|
||||
def _connection_for_bind(self, engine, **kwargs):
|
||||
def _connection_for_bind(self, engine, execution_options=None, **kw):
|
||||
if self.transaction is not None:
|
||||
return self.transaction._connection_for_bind(engine)
|
||||
return self.transaction._connection_for_bind(
|
||||
engine, execution_options)
|
||||
else:
|
||||
return engine.contextual_connect(**kwargs)
|
||||
conn = engine.contextual_connect(**kw)
|
||||
if execution_options:
|
||||
conn = conn.execution_options(**execution_options)
|
||||
return conn
|
||||
|
||||
def execute(self, clause, params=None, mapper=None, bind=None, **kw):
|
||||
"""Execute a SQL expression construct or string statement within
|
||||
|
|
@ -1006,10 +1049,46 @@ class Session(_SessionClassMethods):
|
|||
not use any connection resources until they are first needed.
|
||||
|
||||
"""
|
||||
self._close_impl(invalidate=False)
|
||||
|
||||
def invalidate(self):
|
||||
"""Close this Session, using connection invalidation.
|
||||
|
||||
This is a variant of :meth:`.Session.close` that will additionally
|
||||
ensure that the :meth:`.Connection.invalidate` method will be called
|
||||
on all :class:`.Connection` objects. This can be called when
|
||||
the database is known to be in a state where the connections are
|
||||
no longer safe to be used.
|
||||
|
||||
E.g.::
|
||||
|
||||
try:
|
||||
sess = Session()
|
||||
sess.add(User())
|
||||
sess.commit()
|
||||
except gevent.Timeout:
|
||||
sess.invalidate()
|
||||
raise
|
||||
except:
|
||||
sess.rollback()
|
||||
raise
|
||||
|
||||
This clears all items and ends any transaction in progress.
|
||||
|
||||
If this session were created with ``autocommit=False``, a new
|
||||
transaction is immediately begun. Note that this new transaction does
|
||||
not use any connection resources until they are first needed.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
"""
|
||||
self._close_impl(invalidate=True)
|
||||
|
||||
def _close_impl(self, invalidate):
|
||||
self.expunge_all()
|
||||
if self.transaction is not None:
|
||||
for transaction in self.transaction._iterate_parents():
|
||||
transaction.close()
|
||||
transaction.close(invalidate)
|
||||
|
||||
def expunge_all(self):
|
||||
"""Remove all object instances from this ``Session``.
|
||||
|
|
@ -1029,40 +1108,47 @@ class Session(_SessionClassMethods):
|
|||
# TODO: + crystallize + document resolution order
|
||||
# vis. bind_mapper/bind_table
|
||||
|
||||
def _add_bind(self, key, bind):
|
||||
try:
|
||||
insp = inspect(key)
|
||||
except sa_exc.NoInspectionAvailable:
|
||||
if not isinstance(key, type):
|
||||
raise exc.ArgumentError(
|
||||
"Not acceptable bind target: %s" %
|
||||
key)
|
||||
else:
|
||||
self.__binds[key] = bind
|
||||
else:
|
||||
if insp.is_selectable:
|
||||
self.__binds[insp] = bind
|
||||
elif insp.is_mapper:
|
||||
self.__binds[insp.class_] = bind
|
||||
for selectable in insp._all_tables:
|
||||
self.__binds[selectable] = bind
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Not acceptable bind target: %s" %
|
||||
key)
|
||||
|
||||
def bind_mapper(self, mapper, bind):
|
||||
"""Bind operations for a mapper to a Connectable.
|
||||
"""Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine`
|
||||
or :class:`.Connection`.
|
||||
|
||||
mapper
|
||||
A mapper instance or mapped class
|
||||
|
||||
bind
|
||||
Any Connectable: a :class:`.Engine` or :class:`.Connection`.
|
||||
|
||||
All subsequent operations involving this mapper will use the given
|
||||
`bind`.
|
||||
The given mapper is added to a lookup used by the
|
||||
:meth:`.Session.get_bind` method.
|
||||
|
||||
"""
|
||||
if isinstance(mapper, type):
|
||||
mapper = class_mapper(mapper)
|
||||
|
||||
self.__binds[mapper.base_mapper] = bind
|
||||
for t in mapper._all_tables:
|
||||
self.__binds[t] = bind
|
||||
self._add_bind(mapper, bind)
|
||||
|
||||
def bind_table(self, table, bind):
|
||||
"""Bind operations on a Table to a Connectable.
|
||||
"""Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine`
|
||||
or :class:`.Connection`.
|
||||
|
||||
table
|
||||
A :class:`.Table` instance
|
||||
|
||||
bind
|
||||
Any Connectable: a :class:`.Engine` or :class:`.Connection`.
|
||||
|
||||
All subsequent operations involving this :class:`.Table` will use the
|
||||
given `bind`.
|
||||
The given mapper is added to a lookup used by the
|
||||
:meth:`.Session.get_bind` method.
|
||||
|
||||
"""
|
||||
self.__binds[table] = bind
|
||||
self._add_bind(table, bind)
|
||||
|
||||
def get_bind(self, mapper=None, clause=None):
|
||||
"""Return a "bind" to which this :class:`.Session` is bound.
|
||||
|
|
@ -1116,6 +1202,7 @@ class Session(_SessionClassMethods):
|
|||
bound :class:`.MetaData`.
|
||||
|
||||
"""
|
||||
|
||||
if mapper is clause is None:
|
||||
if self.bind:
|
||||
return self.bind
|
||||
|
|
@ -1125,15 +1212,23 @@ class Session(_SessionClassMethods):
|
|||
"Connection, and no context was provided to locate "
|
||||
"a binding.")
|
||||
|
||||
c_mapper = mapper is not None and _class_to_mapper(mapper) or None
|
||||
if mapper is not None:
|
||||
try:
|
||||
mapper = inspect(mapper)
|
||||
except sa_exc.NoInspectionAvailable:
|
||||
if isinstance(mapper, type):
|
||||
raise exc.UnmappedClassError(mapper)
|
||||
else:
|
||||
raise
|
||||
|
||||
# manually bound?
|
||||
if self.__binds:
|
||||
if c_mapper:
|
||||
if c_mapper.base_mapper in self.__binds:
|
||||
return self.__binds[c_mapper.base_mapper]
|
||||
elif c_mapper.mapped_table in self.__binds:
|
||||
return self.__binds[c_mapper.mapped_table]
|
||||
if mapper:
|
||||
for cls in mapper.class_.__mro__:
|
||||
if cls in self.__binds:
|
||||
return self.__binds[cls]
|
||||
if clause is None:
|
||||
clause = mapper.mapped_table
|
||||
|
||||
if clause is not None:
|
||||
for t in sql_util.find_tables(clause, include_crud=True):
|
||||
if t in self.__binds:
|
||||
|
|
@ -1145,12 +1240,12 @@ class Session(_SessionClassMethods):
|
|||
if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
|
||||
return clause.bind
|
||||
|
||||
if c_mapper and c_mapper.mapped_table.bind:
|
||||
return c_mapper.mapped_table.bind
|
||||
if mapper and mapper.mapped_table.bind:
|
||||
return mapper.mapped_table.bind
|
||||
|
||||
context = []
|
||||
if mapper is not None:
|
||||
context.append('mapper %s' % c_mapper)
|
||||
context.append('mapper %s' % mapper)
|
||||
if clause is not None:
|
||||
context.append('SQL expression')
|
||||
|
||||
|
|
@ -1397,11 +1492,12 @@ class Session(_SessionClassMethods):
|
|||
self._new.pop(state)
|
||||
state._detach()
|
||||
elif self.identity_map.contains_state(state):
|
||||
self.identity_map.discard(state)
|
||||
self.identity_map.safe_discard(state)
|
||||
self._deleted.pop(state, None)
|
||||
state._detach()
|
||||
elif self.transaction:
|
||||
self.transaction._deleted.pop(state, None)
|
||||
state._detach()
|
||||
|
||||
def _register_newly_persistent(self, states):
|
||||
for state in states:
|
||||
|
|
@ -1413,7 +1509,7 @@ class Session(_SessionClassMethods):
|
|||
|
||||
instance_key = mapper._identity_key_from_state(state)
|
||||
|
||||
if _none_set.issubset(instance_key[1]) and \
|
||||
if _none_set.intersection(instance_key[1]) and \
|
||||
not mapper.allow_partial_pks or \
|
||||
_none_set.issuperset(instance_key[1]):
|
||||
raise exc.FlushError(
|
||||
|
|
@ -1430,10 +1526,10 @@ class Session(_SessionClassMethods):
|
|||
if state.key is None:
|
||||
state.key = instance_key
|
||||
elif state.key != instance_key:
|
||||
# primary key switch. use discard() in case another
|
||||
# primary key switch. use safe_discard() in case another
|
||||
# state has already replaced this one in the identity
|
||||
# map (see test/orm/test_naturalpks.py ReversePKsTest)
|
||||
self.identity_map.discard(state)
|
||||
self.identity_map.safe_discard(state)
|
||||
if state in self.transaction._key_switches:
|
||||
orig_key = self.transaction._key_switches[state][0]
|
||||
else:
|
||||
|
|
@ -1467,7 +1563,7 @@ class Session(_SessionClassMethods):
|
|||
if self._enable_transaction_accounting and self.transaction:
|
||||
self.transaction._deleted[state] = True
|
||||
|
||||
self.identity_map.discard(state)
|
||||
self.identity_map.safe_discard(state)
|
||||
self._deleted.pop(state, None)
|
||||
state.deleted = True
|
||||
|
||||
|
|
@ -1630,6 +1726,9 @@ class Session(_SessionClassMethods):
|
|||
"all changes on mapped instances before merging with "
|
||||
"load=False.")
|
||||
key = mapper._identity_key_from_state(state)
|
||||
key_is_persistent = attributes.NEVER_SET not in key[1]
|
||||
else:
|
||||
key_is_persistent = True
|
||||
|
||||
if key in self.identity_map:
|
||||
merged = self.identity_map[key]
|
||||
|
|
@ -1646,9 +1745,10 @@ class Session(_SessionClassMethods):
|
|||
self._update_impl(merged_state)
|
||||
new_instance = True
|
||||
|
||||
elif not _none_set.issubset(key[1]) or \
|
||||
elif key_is_persistent and (
|
||||
not _none_set.intersection(key[1]) or
|
||||
(mapper.allow_partial_pks and
|
||||
not _none_set.issuperset(key[1])):
|
||||
not _none_set.issuperset(key[1]))):
|
||||
merged = self.query(mapper.class_).get(key[1])
|
||||
else:
|
||||
merged = None
|
||||
|
|
@ -1746,7 +1846,7 @@ class Session(_SessionClassMethods):
|
|||
"function to send this object back to the transient state." %
|
||||
state_str(state)
|
||||
)
|
||||
self._before_attach(state)
|
||||
self._before_attach(state, check_identity_map=False)
|
||||
self._deleted.pop(state, None)
|
||||
if discard_existing:
|
||||
self.identity_map.replace(state)
|
||||
|
|
@ -1826,13 +1926,12 @@ class Session(_SessionClassMethods):
|
|||
self._attach(state, include_before=True)
|
||||
state._load_pending = True
|
||||
|
||||
def _before_attach(self, state):
|
||||
def _before_attach(self, state, check_identity_map=True):
|
||||
if state.session_id != self.hash_key and \
|
||||
self.dispatch.before_attach:
|
||||
self.dispatch.before_attach(self, state.obj())
|
||||
|
||||
def _attach(self, state, include_before=False):
|
||||
if state.key and \
|
||||
if check_identity_map and state.key and \
|
||||
state.key in self.identity_map and \
|
||||
not self.identity_map.contains_state(state):
|
||||
raise sa_exc.InvalidRequestError(
|
||||
|
|
@ -1848,10 +1947,11 @@ class Session(_SessionClassMethods):
|
|||
"(this is '%s')" % (state_str(state),
|
||||
state.session_id, self.hash_key))
|
||||
|
||||
def _attach(self, state, include_before=False):
|
||||
|
||||
if state.session_id != self.hash_key:
|
||||
if include_before and \
|
||||
self.dispatch.before_attach:
|
||||
self.dispatch.before_attach(self, state.obj())
|
||||
if include_before:
|
||||
self._before_attach(state)
|
||||
state.session_id = self.hash_key
|
||||
if state.modified and state._strong_obj is None:
|
||||
state._strong_obj = state.obj()
|
||||
|
|
@ -1898,7 +1998,7 @@ class Session(_SessionClassMethods):
|
|||
|
||||
For ``autocommit`` Sessions with no active manual transaction, flush()
|
||||
will create a transaction on the fly that surrounds the entire set of
|
||||
operations int the flush.
|
||||
operations into the flush.
|
||||
|
||||
:param objects: Optional; restricts the flush operation to operate
|
||||
only on elements that are in the given collection.
|
||||
|
|
@ -2036,6 +2136,226 @@ class Session(_SessionClassMethods):
|
|||
with util.safe_reraise():
|
||||
transaction.rollback(_capture_exception=True)
|
||||
|
||||
def bulk_save_objects(
|
||||
self, objects, return_defaults=False, update_changed_only=True):
|
||||
"""Perform a bulk save of the given list of objects.
|
||||
|
||||
The bulk save feature allows mapped objects to be used as the
|
||||
source of simple INSERT and UPDATE operations which can be more easily
|
||||
grouped together into higher performing "executemany"
|
||||
operations; the extraction of data from the objects is also performed
|
||||
using a lower-latency process that ignores whether or not attributes
|
||||
have actually been modified in the case of UPDATEs, and also ignores
|
||||
SQL expressions.
|
||||
|
||||
The objects as given are not added to the session and no additional
|
||||
state is established on them, unless the ``return_defaults`` flag
|
||||
is also set, in which case primary key attributes and server-side
|
||||
default values will be populated.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. warning::
|
||||
|
||||
The bulk save feature allows for a lower-latency INSERT/UPDATE
|
||||
of rows at the expense of most other unit-of-work features.
|
||||
Features such as object management, relationship handling,
|
||||
and SQL clause support are **silently omitted** in favor of raw
|
||||
INSERT/UPDATES of records.
|
||||
|
||||
**Please read the list of caveats at** :ref:`bulk_operations`
|
||||
**before using this method, and fully test and confirm the
|
||||
functionality of all code developed using these systems.**
|
||||
|
||||
:param objects: a list of mapped object instances. The mapped
|
||||
objects are persisted as is, and are **not** associated with the
|
||||
:class:`.Session` afterwards.
|
||||
|
||||
For each object, whether the object is sent as an INSERT or an
|
||||
UPDATE is dependent on the same rules used by the :class:`.Session`
|
||||
in traditional operation; if the object has the
|
||||
:attr:`.InstanceState.key`
|
||||
attribute set, then the object is assumed to be "detached" and
|
||||
will result in an UPDATE. Otherwise, an INSERT is used.
|
||||
|
||||
In the case of an UPDATE, statements are grouped based on which
|
||||
attributes have changed, and are thus to be the subject of each
|
||||
SET clause. If ``update_changed_only`` is False, then all
|
||||
attributes present within each object are applied to the UPDATE
|
||||
statement, which may help in allowing the statements to be grouped
|
||||
together into a larger executemany(), and will also reduce the
|
||||
overhead of checking history on attributes.
|
||||
|
||||
:param return_defaults: when True, rows that are missing values which
|
||||
generate defaults, namely integer primary key defaults and sequences,
|
||||
will be inserted **one at a time**, so that the primary key value
|
||||
is available. In particular this will allow joined-inheritance
|
||||
and other multi-table mappings to insert correctly without the need
|
||||
to provide primary key values ahead of time; however,
|
||||
:paramref:`.Session.bulk_save_objects.return_defaults` **greatly
|
||||
reduces the performance gains** of the method overall.
|
||||
|
||||
:param update_changed_only: when True, UPDATE statements are rendered
|
||||
based on those attributes in each state that have logged changes.
|
||||
When False, all attributes present are rendered into the SET clause
|
||||
with the exception of primary key attributes.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`bulk_operations`
|
||||
|
||||
:meth:`.Session.bulk_insert_mappings`
|
||||
|
||||
:meth:`.Session.bulk_update_mappings`
|
||||
|
||||
"""
|
||||
for (mapper, isupdate), states in itertools.groupby(
|
||||
(attributes.instance_state(obj) for obj in objects),
|
||||
lambda state: (state.mapper, state.key is not None)
|
||||
):
|
||||
self._bulk_save_mappings(
|
||||
mapper, states, isupdate, True,
|
||||
return_defaults, update_changed_only)
|
||||
|
||||
def bulk_insert_mappings(self, mapper, mappings, return_defaults=False):
|
||||
"""Perform a bulk insert of the given list of mapping dictionaries.
|
||||
|
||||
The bulk insert feature allows plain Python dictionaries to be used as
|
||||
the source of simple INSERT operations which can be more easily
|
||||
grouped together into higher performing "executemany"
|
||||
operations. Using dictionaries, there is no "history" or session
|
||||
state management features in use, reducing latency when inserting
|
||||
large numbers of simple rows.
|
||||
|
||||
The values within the dictionaries as given are typically passed
|
||||
without modification into Core :meth:`.Insert` constructs, after
|
||||
organizing the values within them across the tables to which
|
||||
the given mapper is mapped.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. warning::
|
||||
|
||||
The bulk insert feature allows for a lower-latency INSERT
|
||||
of rows at the expense of most other unit-of-work features.
|
||||
Features such as object management, relationship handling,
|
||||
and SQL clause support are **silently omitted** in favor of raw
|
||||
INSERT of records.
|
||||
|
||||
**Please read the list of caveats at** :ref:`bulk_operations`
|
||||
**before using this method, and fully test and confirm the
|
||||
functionality of all code developed using these systems.**
|
||||
|
||||
:param mapper: a mapped class, or the actual :class:`.Mapper` object,
|
||||
representing the single kind of object represented within the mapping
|
||||
list.
|
||||
|
||||
:param mappings: a list of dictionaries, each one containing the state
|
||||
of the mapped row to be inserted, in terms of the attribute names
|
||||
on the mapped class. If the mapping refers to multiple tables,
|
||||
such as a joined-inheritance mapping, each dictionary must contain
|
||||
all keys to be populated into all tables.
|
||||
|
||||
:param return_defaults: when True, rows that are missing values which
|
||||
generate defaults, namely integer primary key defaults and sequences,
|
||||
will be inserted **one at a time**, so that the primary key value
|
||||
is available. In particular this will allow joined-inheritance
|
||||
and other multi-table mappings to insert correctly without the need
|
||||
to provide primary
|
||||
key values ahead of time; however,
|
||||
:paramref:`.Session.bulk_insert_mappings.return_defaults`
|
||||
**greatly reduces the performance gains** of the method overall.
|
||||
If the rows
|
||||
to be inserted only refer to a single table, then there is no
|
||||
reason this flag should be set as the returned default information
|
||||
is not used.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`bulk_operations`
|
||||
|
||||
:meth:`.Session.bulk_save_objects`
|
||||
|
||||
:meth:`.Session.bulk_update_mappings`
|
||||
|
||||
"""
|
||||
self._bulk_save_mappings(
|
||||
mapper, mappings, False, False, return_defaults, False)
|
||||
|
||||
def bulk_update_mappings(self, mapper, mappings):
|
||||
"""Perform a bulk update of the given list of mapping dictionaries.
|
||||
|
||||
The bulk update feature allows plain Python dictionaries to be used as
|
||||
the source of simple UPDATE operations which can be more easily
|
||||
grouped together into higher performing "executemany"
|
||||
operations. Using dictionaries, there is no "history" or session
|
||||
state management features in use, reducing latency when updating
|
||||
large numbers of simple rows.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. warning::
|
||||
|
||||
The bulk update feature allows for a lower-latency UPDATE
|
||||
of rows at the expense of most other unit-of-work features.
|
||||
Features such as object management, relationship handling,
|
||||
and SQL clause support are **silently omitted** in favor of raw
|
||||
UPDATES of records.
|
||||
|
||||
**Please read the list of caveats at** :ref:`bulk_operations`
|
||||
**before using this method, and fully test and confirm the
|
||||
functionality of all code developed using these systems.**
|
||||
|
||||
:param mapper: a mapped class, or the actual :class:`.Mapper` object,
|
||||
representing the single kind of object represented within the mapping
|
||||
list.
|
||||
|
||||
:param mappings: a list of dictionaries, each one containing the state
|
||||
of the mapped row to be updated, in terms of the attribute names
|
||||
on the mapped class. If the mapping refers to multiple tables,
|
||||
such as a joined-inheritance mapping, each dictionary may contain
|
||||
keys corresponding to all tables. All those keys which are present
|
||||
and are not part of the primary key are applied to the SET clause
|
||||
of the UPDATE statement; the primary key values, which are required,
|
||||
are applied to the WHERE clause.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`bulk_operations`
|
||||
|
||||
:meth:`.Session.bulk_insert_mappings`
|
||||
|
||||
:meth:`.Session.bulk_save_objects`
|
||||
|
||||
"""
|
||||
self._bulk_save_mappings(mapper, mappings, True, False, False, False)
|
||||
|
||||
def _bulk_save_mappings(
|
||||
self, mapper, mappings, isupdate, isstates,
|
||||
return_defaults, update_changed_only):
|
||||
mapper = _class_to_mapper(mapper)
|
||||
self._flushing = True
|
||||
|
||||
transaction = self.begin(
|
||||
subtransactions=True)
|
||||
try:
|
||||
if isupdate:
|
||||
persistence._bulk_update(
|
||||
mapper, mappings, transaction,
|
||||
isstates, update_changed_only)
|
||||
else:
|
||||
persistence._bulk_insert(
|
||||
mapper, mappings, transaction, isstates, return_defaults)
|
||||
transaction.commit()
|
||||
|
||||
except:
|
||||
with util.safe_reraise():
|
||||
transaction.rollback(_capture_exception=True)
|
||||
finally:
|
||||
self._flushing = False
|
||||
|
||||
def is_modified(self, instance, include_collections=True,
|
||||
passive=True):
|
||||
"""Return ``True`` if the given instance has locally
|
||||
|
|
@ -2251,7 +2571,6 @@ class Session(_SessionClassMethods):
|
|||
|
||||
|
||||
class sessionmaker(_SessionClassMethods):
|
||||
|
||||
"""A configurable :class:`.Session` factory.
|
||||
|
||||
The :class:`.sessionmaker` factory generates new
|
||||
|
|
@ -2379,18 +2698,49 @@ class sessionmaker(_SessionClassMethods):
|
|||
|
||||
|
||||
def make_transient(instance):
|
||||
"""Make the given instance 'transient'.
|
||||
"""Alter the state of the given instance so that it is :term:`transient`.
|
||||
|
||||
This will remove its association with any
|
||||
session and additionally will remove its "identity key",
|
||||
such that it's as though the object were newly constructed,
|
||||
except retaining its values. It also resets the
|
||||
"deleted" flag on the state if this object
|
||||
had been explicitly deleted by its session.
|
||||
.. note::
|
||||
|
||||
Attributes which were "expired" or deferred at the
|
||||
instance level are reverted to undefined, and
|
||||
will not trigger any loads.
|
||||
:func:`.make_transient` is a special-case function for
|
||||
advanced use cases only.
|
||||
|
||||
The given mapped instance is assumed to be in the :term:`persistent` or
|
||||
:term:`detached` state. The function will remove its association with any
|
||||
:class:`.Session` as well as its :attr:`.InstanceState.identity`. The
|
||||
effect is that the object will behave as though it were newly constructed,
|
||||
except retaining any attribute / collection values that were loaded at the
|
||||
time of the call. The :attr:`.InstanceState.deleted` flag is also reset
|
||||
if this object had been deleted as a result of using
|
||||
:meth:`.Session.delete`.
|
||||
|
||||
.. warning::
|
||||
|
||||
:func:`.make_transient` does **not** "unexpire" or otherwise eagerly
|
||||
load ORM-mapped attributes that are not currently loaded at the time
|
||||
the function is called. This includes attributes which:
|
||||
|
||||
* were expired via :meth:`.Session.expire`
|
||||
|
||||
* were expired as the natural effect of committing a session
|
||||
transaction, e.g. :meth:`.Session.commit`
|
||||
|
||||
* are normally :term:`lazy loaded` but are not currently loaded
|
||||
|
||||
* are "deferred" via :ref:`deferred` and are not yet loaded
|
||||
|
||||
* were not present in the query which loaded this object, such as that
|
||||
which is common in joined table inheritance and other scenarios.
|
||||
|
||||
After :func:`.make_transient` is called, unloaded attributes such
|
||||
as those above will normally resolve to the value ``None`` when
|
||||
accessed, or an empty collection for a collection-oriented attribute.
|
||||
As the object is transient and un-associated with any database
|
||||
identity, it will no longer retrieve these values.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.make_transient_to_detached`
|
||||
|
||||
"""
|
||||
state = attributes.instance_state(instance)
|
||||
|
|
@ -2398,9 +2748,13 @@ def make_transient(instance):
|
|||
if s:
|
||||
s._expunge_state(state)
|
||||
|
||||
# remove expired state and
|
||||
# deferred callables
|
||||
state.callables.clear()
|
||||
# remove expired state
|
||||
state.expired_attributes.clear()
|
||||
|
||||
# remove deferred callables
|
||||
if state.callables:
|
||||
del state.callables
|
||||
|
||||
if state.key:
|
||||
del state.key
|
||||
if state.deleted:
|
||||
|
|
@ -2408,7 +2762,12 @@ def make_transient(instance):
|
|||
|
||||
|
||||
def make_transient_to_detached(instance):
|
||||
"""Make the given transient instance 'detached'.
|
||||
"""Make the given transient instance :term:`detached`.
|
||||
|
||||
.. note::
|
||||
|
||||
:func:`.make_transient_to_detached` is a special-case function for
|
||||
advanced use cases only.
|
||||
|
||||
All attribute history on the given instance
|
||||
will be reset as though the instance were freshly loaded
|
||||
|
|
@ -2443,16 +2802,19 @@ def make_transient_to_detached(instance):
|
|||
|
||||
|
||||
def object_session(instance):
|
||||
"""Return the ``Session`` to which instance belongs.
|
||||
"""Return the :class:`.Session` to which the given instance belongs.
|
||||
|
||||
If the instance is not a mapped instance, an error is raised.
|
||||
This is essentially the same as the :attr:`.InstanceState.session`
|
||||
accessor. See that attribute for details.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
return _state_session(attributes.instance_state(instance))
|
||||
state = attributes.instance_state(instance)
|
||||
except exc.NO_STATE:
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
else:
|
||||
return _state_session(state)
|
||||
|
||||
|
||||
_new_sessionid = util.counter()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/state.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -21,7 +21,7 @@ from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
|
|||
from . import base
|
||||
|
||||
|
||||
class InstanceState(interfaces._InspectionAttr):
|
||||
class InstanceState(interfaces.InspectionAttr):
|
||||
"""tracks state information at the instance level.
|
||||
|
||||
The :class:`.InstanceState` is a key object used by the
|
||||
|
|
@ -58,15 +58,35 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
expired = False
|
||||
deleted = False
|
||||
_load_pending = False
|
||||
|
||||
is_instance = True
|
||||
|
||||
callables = ()
|
||||
"""A namespace where a per-state loader callable can be associated.
|
||||
|
||||
In SQLAlchemy 1.0, this is only used for lazy loaders / deferred
|
||||
loaders that were set up via query option.
|
||||
|
||||
Previously, callables was used also to indicate expired attributes
|
||||
by storing a link to the InstanceState itself in this dictionary.
|
||||
This role is now handled by the expired_attributes set.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, obj, manager):
|
||||
self.class_ = obj.__class__
|
||||
self.manager = manager
|
||||
self.obj = weakref.ref(obj, self._cleanup)
|
||||
self.callables = {}
|
||||
self.committed_state = {}
|
||||
self.expired_attributes = set()
|
||||
|
||||
expired_attributes = None
|
||||
"""The set of keys which are 'expired' to be loaded by
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
|
||||
see also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs."""
|
||||
|
||||
|
||||
@util.memoized_property
|
||||
def attrs(self):
|
||||
|
|
@ -146,7 +166,16 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def session(self, sessionlib):
|
||||
"""Return the owning :class:`.Session` for this instance,
|
||||
or ``None`` if none available."""
|
||||
or ``None`` if none available.
|
||||
|
||||
Note that the result here can in some cases be *different*
|
||||
from that of ``obj in session``; an object that's been deleted
|
||||
will report as not ``in session``, however if the transaction is
|
||||
still in progress, this attribute will still refer to that session.
|
||||
Only when the transaction is completed does the object become
|
||||
fully detached under normal circumstances.
|
||||
|
||||
"""
|
||||
return sessionlib._state_session(self)
|
||||
|
||||
@property
|
||||
|
|
@ -165,7 +194,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
Returns ``None`` if the object has no primary key identity.
|
||||
|
||||
.. note::
|
||||
An object which is transient or pending
|
||||
An object which is :term:`transient` or :term:`pending`
|
||||
does **not** have a mapped identity until it is flushed,
|
||||
even if its attributes include primary key values.
|
||||
|
||||
|
|
@ -220,11 +249,25 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
del self.obj
|
||||
|
||||
def _cleanup(self, ref):
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict:
|
||||
instance_dict.discard(self)
|
||||
"""Weakref callback cleanup.
|
||||
|
||||
This callable cleans out the state when it is being garbage
|
||||
collected.
|
||||
|
||||
this _cleanup **assumes** that there are no strong refs to us!
|
||||
Will not work otherwise!
|
||||
|
||||
"""
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict is not None:
|
||||
instance_dict._fast_discard(self)
|
||||
del self._instance_dict
|
||||
|
||||
# we can't possibly be in instance_dict._modified
|
||||
# b.c. this is weakref cleanup only, that set
|
||||
# is strong referencing!
|
||||
# assert self not in instance_dict._modified
|
||||
|
||||
self.callables = {}
|
||||
self.session_id = self._strong_obj = None
|
||||
del self.obj
|
||||
|
||||
|
|
@ -251,7 +294,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
return {}
|
||||
|
||||
def _initialize_instance(*mixed, **kwargs):
|
||||
self, instance, args = mixed[0], mixed[1], mixed[2:]
|
||||
self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa
|
||||
manager = self.manager
|
||||
|
||||
manager.dispatch.init(self, args, kwargs)
|
||||
|
|
@ -259,8 +302,8 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
try:
|
||||
return manager.original_init(*mixed[1:], **kwargs)
|
||||
except:
|
||||
manager.dispatch.init_failure(self, args, kwargs)
|
||||
raise
|
||||
with util.safe_reraise():
|
||||
manager.dispatch.init_failure(self, args, kwargs)
|
||||
|
||||
def get_history(self, key, passive):
|
||||
return self.manager[key].impl.get_history(self, self.dict, passive)
|
||||
|
|
@ -279,7 +322,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
(k, self.__dict__[k]) for k in (
|
||||
'committed_state', '_pending_mutations', 'modified',
|
||||
'expired', 'callables', 'key', 'parents', 'load_options',
|
||||
'class_',
|
||||
'class_', 'expired_attributes'
|
||||
) if k in self.__dict__
|
||||
)
|
||||
if self.load_path:
|
||||
|
|
@ -306,7 +349,18 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
self.parents = state_dict.get('parents', {})
|
||||
self.modified = state_dict.get('modified', False)
|
||||
self.expired = state_dict.get('expired', False)
|
||||
self.callables = state_dict.get('callables', {})
|
||||
if 'callables' in state_dict:
|
||||
self.callables = state_dict['callables']
|
||||
|
||||
try:
|
||||
self.expired_attributes = state_dict['expired_attributes']
|
||||
except KeyError:
|
||||
self.expired_attributes = set()
|
||||
# 0.9 and earlier compat
|
||||
for k in list(self.callables):
|
||||
if self.callables[k] is self:
|
||||
self.expired_attributes.add(k)
|
||||
del self.callables[k]
|
||||
|
||||
self.__dict__.update([
|
||||
(k, state_dict[k]) for k in (
|
||||
|
|
@ -320,12 +374,6 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
|
||||
state_dict['manager'](self, inst, state_dict)
|
||||
|
||||
def _initialize(self, key):
|
||||
"""Set this attribute to an empty value or collection,
|
||||
based on the AttributeImpl in use."""
|
||||
|
||||
self.manager.get_impl(key).initialize(self, self.dict)
|
||||
|
||||
def _reset(self, dict_, key):
|
||||
"""Remove the given attribute and any
|
||||
callables associated with it."""
|
||||
|
|
@ -333,71 +381,73 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
old = dict_.pop(key, None)
|
||||
if old is not None and self.manager[key].impl.collection:
|
||||
self.manager[key].impl._invalidate_collection(old)
|
||||
self.callables.pop(key, None)
|
||||
|
||||
def _expire_attribute_pre_commit(self, dict_, key):
|
||||
"""a fast expire that can be called by column loaders during a load.
|
||||
|
||||
The additional bookkeeping is finished up in commit_all().
|
||||
|
||||
Should only be called for scalar attributes.
|
||||
|
||||
This method is actually called a lot with joined-table
|
||||
loading, when the second table isn't present in the result.
|
||||
|
||||
"""
|
||||
dict_.pop(key, None)
|
||||
self.callables[key] = self
|
||||
self.expired_attributes.discard(key)
|
||||
if self.callables:
|
||||
self.callables.pop(key, None)
|
||||
|
||||
@classmethod
|
||||
def _row_processor(cls, manager, fn, key):
|
||||
def _instance_level_callable_processor(cls, manager, fn, key):
|
||||
impl = manager[key].impl
|
||||
if impl.collection:
|
||||
def _set_callable(state, dict_, row):
|
||||
if 'callables' not in state.__dict__:
|
||||
state.callables = {}
|
||||
old = dict_.pop(key, None)
|
||||
if old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
state.callables[key] = fn
|
||||
else:
|
||||
def _set_callable(state, dict_, row):
|
||||
if 'callables' not in state.__dict__:
|
||||
state.callables = {}
|
||||
state.callables[key] = fn
|
||||
return _set_callable
|
||||
|
||||
def _expire(self, dict_, modified_set):
|
||||
self.expired = True
|
||||
|
||||
if self.modified:
|
||||
modified_set.discard(self)
|
||||
self.committed_state.clear()
|
||||
self.modified = False
|
||||
|
||||
self.modified = False
|
||||
self._strong_obj = None
|
||||
|
||||
self.committed_state.clear()
|
||||
if '_pending_mutations' in self.__dict__:
|
||||
del self.__dict__['_pending_mutations']
|
||||
|
||||
InstanceState._pending_mutations._reset(self)
|
||||
if 'parents' in self.__dict__:
|
||||
del self.__dict__['parents']
|
||||
|
||||
# clear out 'parents' collection. not
|
||||
# entirely clear how we can best determine
|
||||
# which to remove, or not.
|
||||
InstanceState.parents._reset(self)
|
||||
self.expired_attributes.update(
|
||||
[impl.key for impl in self.manager._scalar_loader_impls
|
||||
if impl.expire_missing or impl.key in dict_]
|
||||
)
|
||||
|
||||
for key in self.manager:
|
||||
impl = self.manager[key].impl
|
||||
if impl.accepts_scalar_loader and \
|
||||
(impl.expire_missing or key in dict_):
|
||||
self.callables[key] = self
|
||||
old = dict_.pop(key, None)
|
||||
if impl.collection and old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
if self.callables:
|
||||
for k in self.expired_attributes.intersection(self.callables):
|
||||
del self.callables[k]
|
||||
|
||||
for k in self.manager._collection_impl_keys.intersection(dict_):
|
||||
collection = dict_.pop(k)
|
||||
collection._sa_adapter.invalidated = True
|
||||
|
||||
for key in self.manager._all_key_set.intersection(dict_):
|
||||
del dict_[key]
|
||||
|
||||
self.manager.dispatch.expire(self, None)
|
||||
|
||||
def _expire_attributes(self, dict_, attribute_names):
|
||||
pending = self.__dict__.get('_pending_mutations', None)
|
||||
|
||||
callables = self.callables
|
||||
|
||||
for key in attribute_names:
|
||||
impl = self.manager[key].impl
|
||||
if impl.accepts_scalar_loader:
|
||||
self.callables[key] = self
|
||||
self.expired_attributes.add(key)
|
||||
if callables and key in callables:
|
||||
del callables[key]
|
||||
old = dict_.pop(key, None)
|
||||
if impl.collection and old is not None:
|
||||
impl._invalidate_collection(old)
|
||||
|
|
@ -408,7 +458,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
|
||||
self.manager.dispatch.expire(self, attribute_names)
|
||||
|
||||
def __call__(self, state, passive):
|
||||
def _load_expired(self, state, passive):
|
||||
"""__call__ allows the InstanceState to act as a deferred
|
||||
callable for loading expired attributes, which is also
|
||||
serializable (picklable).
|
||||
|
|
@ -427,8 +477,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
# instance state didn't have an identity,
|
||||
# the attributes still might be in the callables
|
||||
# dict. ensure they are removed.
|
||||
for k in toload.intersection(self.callables):
|
||||
del self.callables[k]
|
||||
self.expired_attributes.clear()
|
||||
|
||||
return ATTR_WAS_SET
|
||||
|
||||
|
|
@ -463,18 +512,6 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
if self.manager[attr].impl.accepts_scalar_loader
|
||||
)
|
||||
|
||||
@property
|
||||
def expired_attributes(self):
|
||||
"""Return the set of keys which are 'expired' to be loaded by
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
|
||||
see also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs.
|
||||
|
||||
"""
|
||||
return set([k for k, v in self.callables.items() if v is self])
|
||||
|
||||
def _instance_dict(self):
|
||||
return None
|
||||
|
||||
|
|
@ -497,6 +534,7 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
|
||||
if (self.session_id and self._strong_obj is None) \
|
||||
or not self.modified:
|
||||
self.modified = True
|
||||
instance_dict = self._instance_dict()
|
||||
if instance_dict:
|
||||
instance_dict._modified.add(self)
|
||||
|
|
@ -517,7 +555,6 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
self.manager[attr.key],
|
||||
base.state_class_str(self)
|
||||
))
|
||||
self.modified = True
|
||||
|
||||
def _commit(self, dict_, keys):
|
||||
"""Commit attributes.
|
||||
|
|
@ -534,10 +571,18 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
|
||||
self.expired = False
|
||||
|
||||
for key in set(self.callables).\
|
||||
self.expired_attributes.difference_update(
|
||||
set(keys).intersection(dict_))
|
||||
|
||||
# the per-keys commit removes object-level callables,
|
||||
# while that of commit_all does not. it's not clear
|
||||
# if this behavior has a clear rationale, however tests do
|
||||
# ensure this is what it does.
|
||||
if self.callables:
|
||||
for key in set(self.callables).\
|
||||
intersection(keys).\
|
||||
intersection(dict_):
|
||||
del self.callables[key]
|
||||
intersection(dict_):
|
||||
del self.callables[key]
|
||||
|
||||
def _commit_all(self, dict_, instance_dict=None):
|
||||
"""commit all attributes unconditionally.
|
||||
|
|
@ -548,7 +593,8 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
- all attributes are marked as "committed"
|
||||
- the "strong dirty reference" is removed
|
||||
- the "modified" flag is set to False
|
||||
- any "expired" markers/callables for attributes loaded are removed.
|
||||
- any "expired" markers for scalar attributes loaded are removed.
|
||||
- lazy load callables for objects / collections *stay*
|
||||
|
||||
Attributes marked as "expired" can potentially remain
|
||||
"expired" after this step if a value was not populated in state.dict.
|
||||
|
|
@ -558,16 +604,17 @@ class InstanceState(interfaces._InspectionAttr):
|
|||
|
||||
@classmethod
|
||||
def _commit_all_states(self, iter, instance_dict=None):
|
||||
"""Mass version of commit_all()."""
|
||||
"""Mass / highly inlined version of commit_all()."""
|
||||
|
||||
for state, dict_ in iter:
|
||||
state.committed_state.clear()
|
||||
InstanceState._pending_mutations._reset(state)
|
||||
state_dict = state.__dict__
|
||||
|
||||
callables = state.callables
|
||||
for key in list(callables):
|
||||
if key in dict_ and callables[key] is state:
|
||||
del callables[key]
|
||||
state.committed_state.clear()
|
||||
|
||||
if '_pending_mutations' in state_dict:
|
||||
del state_dict['_pending_mutations']
|
||||
|
||||
state.expired_attributes.difference_update(dict_)
|
||||
|
||||
if instance_dict and state.modified:
|
||||
instance_dict._modified.discard(state)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/strategies.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -22,6 +22,7 @@ from . import properties
|
|||
from .interfaces import (
|
||||
LoaderStrategy, StrategizedProperty
|
||||
)
|
||||
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
|
||||
from .session import _state_session
|
||||
import itertools
|
||||
|
||||
|
|
@ -105,6 +106,8 @@ class UninstrumentedColumnLoader(LoaderStrategy):
|
|||
if the argument is against the with_polymorphic selectable.
|
||||
|
||||
"""
|
||||
__slots__ = 'columns',
|
||||
|
||||
def __init__(self, parent):
|
||||
super(UninstrumentedColumnLoader, self).__init__(parent)
|
||||
self.columns = self.parent_property.columns
|
||||
|
|
@ -119,8 +122,8 @@ class UninstrumentedColumnLoader(LoaderStrategy):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt,
|
||||
mapper, row, adapter):
|
||||
return None, None, None
|
||||
mapper, result, adapter, populators):
|
||||
pass
|
||||
|
||||
|
||||
@log.class_logger
|
||||
|
|
@ -128,6 +131,8 @@ class UninstrumentedColumnLoader(LoaderStrategy):
|
|||
class ColumnLoader(LoaderStrategy):
|
||||
"""Provide loading behavior for a :class:`.ColumnProperty`."""
|
||||
|
||||
__slots__ = 'columns', 'is_composite'
|
||||
|
||||
def __init__(self, parent):
|
||||
super(ColumnLoader, self).__init__(parent)
|
||||
self.columns = self.parent_property.columns
|
||||
|
|
@ -135,12 +140,18 @@ class ColumnLoader(LoaderStrategy):
|
|||
|
||||
def setup_query(
|
||||
self, context, entity, path, loadopt,
|
||||
adapter, column_collection, **kwargs):
|
||||
adapter, column_collection, memoized_populators, **kwargs):
|
||||
|
||||
for c in self.columns:
|
||||
if adapter:
|
||||
c = adapter.columns[c]
|
||||
column_collection.append(c)
|
||||
|
||||
fetch = self.columns[0]
|
||||
if adapter:
|
||||
fetch = adapter.columns[fetch]
|
||||
memoized_populators[self.parent_property] = fetch
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
coltype = self.columns[0].type
|
||||
|
|
@ -157,21 +168,18 @@ class ColumnLoader(LoaderStrategy):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path,
|
||||
loadopt, mapper, row, adapter):
|
||||
key = self.key
|
||||
loadopt, mapper, result, adapter, populators):
|
||||
# look through list of columns represented here
|
||||
# to see which, if any, is present in the row.
|
||||
for col in self.columns:
|
||||
if adapter:
|
||||
col = adapter.columns[col]
|
||||
if col is not None and col in row:
|
||||
def fetch_col(state, dict_, row):
|
||||
dict_[key] = row[col]
|
||||
return fetch_col, None, None
|
||||
getter = result._getter(col)
|
||||
if getter:
|
||||
populators["quick"].append((self.key, getter))
|
||||
break
|
||||
else:
|
||||
def expire_for_non_present_col(state, dict_, row):
|
||||
state._expire_attribute_pre_commit(dict_, key)
|
||||
return expire_for_non_present_col, None, None
|
||||
populators["expire"].append((self.key, True))
|
||||
|
||||
|
||||
@log.class_logger
|
||||
|
|
@ -179,6 +187,8 @@ class ColumnLoader(LoaderStrategy):
|
|||
class DeferredColumnLoader(LoaderStrategy):
|
||||
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
|
||||
|
||||
__slots__ = 'columns', 'group'
|
||||
|
||||
def __init__(self, parent):
|
||||
super(DeferredColumnLoader, self).__init__(parent)
|
||||
if hasattr(self.parent_property, 'composite_class'):
|
||||
|
|
@ -189,28 +199,18 @@ class DeferredColumnLoader(LoaderStrategy):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt,
|
||||
mapper, row, adapter):
|
||||
col = self.columns[0]
|
||||
if adapter:
|
||||
col = adapter.columns[col]
|
||||
mapper, result, adapter, populators):
|
||||
|
||||
key = self.key
|
||||
if col in row:
|
||||
return self.parent_property._get_strategy_by_cls(ColumnLoader).\
|
||||
create_row_processor(
|
||||
context, path, loadopt, mapper, row, adapter)
|
||||
|
||||
elif not self.is_class_level:
|
||||
set_deferred_for_local_state = InstanceState._row_processor(
|
||||
mapper.class_manager,
|
||||
LoadDeferredColumns(key), key)
|
||||
return set_deferred_for_local_state, None, None
|
||||
# this path currently does not check the result
|
||||
# for the column; this is because in most cases we are
|
||||
# working just with the setup_query() directive which does
|
||||
# not support this, and the behavior here should be consistent.
|
||||
if not self.is_class_level:
|
||||
set_deferred_for_local_state = \
|
||||
self.parent_property._deferred_column_loader
|
||||
populators["new"].append((self.key, set_deferred_for_local_state))
|
||||
else:
|
||||
def reset_col_for_deferred(state, dict_, row):
|
||||
# reset state on the key so that deferred callables
|
||||
# fire off on next access.
|
||||
state._reset(dict_, key)
|
||||
return reset_col_for_deferred, None, None
|
||||
populators["expire"].append((self.key, False))
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
|
|
@ -223,20 +223,22 @@ class DeferredColumnLoader(LoaderStrategy):
|
|||
)
|
||||
|
||||
def setup_query(
|
||||
self, context, entity, path, loadopt, adapter,
|
||||
only_load_props=None, **kwargs):
|
||||
self, context, entity, path, loadopt,
|
||||
adapter, column_collection, memoized_populators,
|
||||
only_load_props=None, **kw):
|
||||
|
||||
if (
|
||||
(
|
||||
loadopt and
|
||||
'undefer_pks' in loadopt.local_opts and
|
||||
set(self.columns).intersection(self.parent.primary_key)
|
||||
set(self.columns).intersection(
|
||||
self.parent._should_undefer_in_wildcard)
|
||||
)
|
||||
or
|
||||
(
|
||||
loadopt and
|
||||
self.group and
|
||||
loadopt.local_opts.get('undefer_group', False) == self.group
|
||||
loadopt.local_opts.get('undefer_group_%s' % self.group, False)
|
||||
)
|
||||
or
|
||||
(
|
||||
|
|
@ -245,7 +247,12 @@ class DeferredColumnLoader(LoaderStrategy):
|
|||
):
|
||||
self.parent_property._get_strategy_by_cls(ColumnLoader).\
|
||||
setup_query(context, entity,
|
||||
path, loadopt, adapter, **kwargs)
|
||||
path, loadopt, adapter,
|
||||
column_collection, memoized_populators, **kw)
|
||||
elif self.is_class_level:
|
||||
memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
|
||||
else:
|
||||
memoized_populators[self.parent_property] = _DEFER_FOR_STATE
|
||||
|
||||
def _load_for_state(self, state, passive):
|
||||
if not state.key:
|
||||
|
|
@ -305,6 +312,8 @@ class LoadDeferredColumns(object):
|
|||
class AbstractRelationshipLoader(LoaderStrategy):
|
||||
"""LoaderStratgies which deal with related objects."""
|
||||
|
||||
__slots__ = 'mapper', 'target', 'uselist'
|
||||
|
||||
def __init__(self, parent):
|
||||
super(AbstractRelationshipLoader, self).__init__(parent)
|
||||
self.mapper = self.parent_property.mapper
|
||||
|
|
@ -321,6 +330,8 @@ class NoLoader(AbstractRelationshipLoader):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
|
||||
|
|
@ -333,21 +344,29 @@ class NoLoader(AbstractRelationshipLoader):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt, mapper,
|
||||
row, adapter):
|
||||
result, adapter, populators):
|
||||
def invoke_no_load(state, dict_, row):
|
||||
state._initialize(self.key)
|
||||
return invoke_no_load, None, None
|
||||
if self.uselist:
|
||||
state.manager.get_impl(self.key).initialize(state, dict_)
|
||||
else:
|
||||
dict_[self.key] = None
|
||||
populators["new"].append((self.key, invoke_no_load))
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy=True)
|
||||
@properties.RelationshipProperty.strategy_for(lazy="select")
|
||||
class LazyLoader(AbstractRelationshipLoader):
|
||||
class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
|
||||
"""Provide loading behavior for a :class:`.RelationshipProperty`
|
||||
with "lazy=True", that is loads when first accessed.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col',
|
||||
'_equated_columns', '_rev_bind_to_col', '_rev_equated_columns',
|
||||
'_simple_lazy_clause')
|
||||
|
||||
def __init__(self, parent):
|
||||
super(LazyLoader, self).__init__(parent)
|
||||
join_condition = self.parent_property._join_condition
|
||||
|
|
@ -378,7 +397,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
self._equated_columns[c] = self._equated_columns[col]
|
||||
|
||||
self.logger.info("%s will use query.get() to "
|
||||
"optimize instance loads" % self)
|
||||
"optimize instance loads", self)
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
|
|
@ -406,78 +425,57 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
active_history=active_history
|
||||
)
|
||||
|
||||
def lazy_clause(
|
||||
self, state, reverse_direction=False,
|
||||
alias_secondary=False,
|
||||
adapt_source=None,
|
||||
passive=None):
|
||||
def _memoized_attr__simple_lazy_clause(self):
|
||||
criterion, bind_to_col = (
|
||||
self._lazywhere,
|
||||
self._bind_to_col
|
||||
)
|
||||
|
||||
params = []
|
||||
|
||||
def visit_bindparam(bindparam):
|
||||
bindparam.unique = False
|
||||
if bindparam._identifying_key in bind_to_col:
|
||||
params.append((
|
||||
bindparam.key, bind_to_col[bindparam._identifying_key],
|
||||
None))
|
||||
else:
|
||||
params.append((bindparam.key, None, bindparam.value))
|
||||
|
||||
criterion = visitors.cloned_traverse(
|
||||
criterion, {}, {'bindparam': visit_bindparam}
|
||||
)
|
||||
|
||||
return criterion, params
|
||||
|
||||
def _generate_lazy_clause(self, state, passive):
|
||||
criterion, param_keys = self._simple_lazy_clause
|
||||
|
||||
if state is None:
|
||||
return self._lazy_none_clause(
|
||||
reverse_direction,
|
||||
adapt_source=adapt_source)
|
||||
return sql_util.adapt_criterion_to_null(
|
||||
criterion, [key for key, ident, value in param_keys])
|
||||
|
||||
if not reverse_direction:
|
||||
criterion, bind_to_col = \
|
||||
self._lazywhere, \
|
||||
self._bind_to_col
|
||||
else:
|
||||
criterion, bind_to_col = \
|
||||
self._rev_lazywhere, \
|
||||
self._rev_bind_to_col
|
||||
|
||||
if reverse_direction:
|
||||
mapper = self.parent_property.mapper
|
||||
else:
|
||||
mapper = self.parent_property.parent
|
||||
mapper = self.parent_property.parent
|
||||
|
||||
o = state.obj() # strong ref
|
||||
dict_ = attributes.instance_dict(o)
|
||||
|
||||
# use the "committed state" only if we're in a flush
|
||||
# for this state.
|
||||
if passive & attributes.INIT_OK:
|
||||
passive ^= attributes.INIT_OK
|
||||
|
||||
if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
|
||||
def visit_bindparam(bindparam):
|
||||
if bindparam._identifying_key in bind_to_col:
|
||||
bindparam.callable = \
|
||||
lambda: mapper._get_committed_state_attr_by_column(
|
||||
state, dict_,
|
||||
bind_to_col[bindparam._identifying_key])
|
||||
else:
|
||||
def visit_bindparam(bindparam):
|
||||
if bindparam._identifying_key in bind_to_col:
|
||||
bindparam.callable = \
|
||||
lambda: mapper._get_state_attr_by_column(
|
||||
state, dict_,
|
||||
bind_to_col[bindparam._identifying_key])
|
||||
params = {}
|
||||
for key, ident, value in param_keys:
|
||||
if ident is not None:
|
||||
if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
|
||||
value = mapper._get_committed_state_attr_by_column(
|
||||
state, dict_, ident, passive)
|
||||
else:
|
||||
value = mapper._get_state_attr_by_column(
|
||||
state, dict_, ident, passive)
|
||||
|
||||
if self.parent_property.secondary is not None and alias_secondary:
|
||||
criterion = sql_util.ClauseAdapter(
|
||||
self.parent_property.secondary.alias()).\
|
||||
traverse(criterion)
|
||||
params[key] = value
|
||||
|
||||
criterion = visitors.cloned_traverse(
|
||||
criterion, {}, {'bindparam': visit_bindparam})
|
||||
|
||||
if adapt_source:
|
||||
criterion = adapt_source(criterion)
|
||||
return criterion
|
||||
|
||||
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
|
||||
if not reverse_direction:
|
||||
criterion, bind_to_col = \
|
||||
self._lazywhere, \
|
||||
self._bind_to_col
|
||||
else:
|
||||
criterion, bind_to_col = \
|
||||
self._rev_lazywhere, \
|
||||
self._rev_bind_to_col
|
||||
|
||||
criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
|
||||
|
||||
if adapt_source:
|
||||
criterion = adapt_source(criterion)
|
||||
return criterion
|
||||
return criterion, params
|
||||
|
||||
def _load_for_state(self, state, passive):
|
||||
if not state.key and (
|
||||
|
|
@ -554,10 +552,9 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
|
||||
@util.dependencies("sqlalchemy.orm.strategy_options")
|
||||
def _emit_lazyload(
|
||||
self, strategy_options, session, state,
|
||||
ident_key, passive):
|
||||
q = session.query(self.mapper)._adapt_all_clauses()
|
||||
self, strategy_options, session, state, ident_key, passive):
|
||||
|
||||
q = session.query(self.mapper)._adapt_all_clauses()
|
||||
if self.parent_property.secondary is not None:
|
||||
q = q.select_from(self.mapper, self.parent_property.secondary)
|
||||
|
||||
|
|
@ -588,17 +585,19 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
rev._use_get and \
|
||||
not isinstance(rev.strategy, LazyLoader):
|
||||
q = q.options(
|
||||
strategy_options.Load(rev.parent).
|
||||
lazyload(rev.key))
|
||||
strategy_options.Load(rev.parent).lazyload(rev.key))
|
||||
|
||||
lazy_clause = self.lazy_clause(state, passive=passive)
|
||||
lazy_clause, params = self._generate_lazy_clause(
|
||||
state, passive=passive)
|
||||
|
||||
if pending:
|
||||
bind_values = sql_util.bind_values(lazy_clause)
|
||||
if None in bind_values:
|
||||
if util.has_intersection(
|
||||
orm_util._none_set, params.values()):
|
||||
return None
|
||||
elif util.has_intersection(orm_util._never_set, params.values()):
|
||||
return None
|
||||
|
||||
q = q.filter(lazy_clause)
|
||||
q = q.filter(lazy_clause).params(params)
|
||||
|
||||
result = q.all()
|
||||
if self.uselist:
|
||||
|
|
@ -618,7 +617,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt,
|
||||
mapper, row, adapter):
|
||||
mapper, result, adapter, populators):
|
||||
key = self.key
|
||||
if not self.is_class_level:
|
||||
# we are not the primary manager for this attribute
|
||||
|
|
@ -629,12 +628,12 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
# "lazyload" option on a "no load"
|
||||
# attribute - "eager" attributes always have a
|
||||
# class-level lazyloader installed.
|
||||
set_lazy_callable = InstanceState._row_processor(
|
||||
set_lazy_callable = InstanceState._instance_level_callable_processor(
|
||||
mapper.class_manager,
|
||||
LoadLazyAttribute(key), key)
|
||||
LoadLazyAttribute(key, self._strategy_keys[0]), key)
|
||||
|
||||
return set_lazy_callable, None, None
|
||||
else:
|
||||
populators["new"].append((self.key, set_lazy_callable))
|
||||
elif context.populate_existing or mapper.always_refresh:
|
||||
def reset_for_lazy_callable(state, dict_, row):
|
||||
# we are the primary manager for this attribute on
|
||||
# this class - reset its
|
||||
|
|
@ -646,26 +645,29 @@ class LazyLoader(AbstractRelationshipLoader):
|
|||
# any existing state.
|
||||
state._reset(dict_, key)
|
||||
|
||||
return reset_for_lazy_callable, None, None
|
||||
populators["new"].append((self.key, reset_for_lazy_callable))
|
||||
|
||||
|
||||
class LoadLazyAttribute(object):
|
||||
"""serializable loader object used by LazyLoader"""
|
||||
|
||||
def __init__(self, key):
|
||||
def __init__(self, key, strategy_key=(('lazy', 'select'),)):
|
||||
self.key = key
|
||||
self.strategy_key = strategy_key
|
||||
|
||||
def __call__(self, state, passive=attributes.PASSIVE_OFF):
|
||||
key = self.key
|
||||
instance_mapper = state.manager.mapper
|
||||
prop = instance_mapper._props[key]
|
||||
strategy = prop._strategies[LazyLoader]
|
||||
strategy = prop._strategies[self.strategy_key]
|
||||
|
||||
return strategy._load_for_state(state, passive)
|
||||
|
||||
|
||||
@properties.RelationshipProperty.strategy_for(lazy="immediate")
|
||||
class ImmediateLoader(AbstractRelationshipLoader):
|
||||
__slots__ = ()
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.parent_property.\
|
||||
_get_strategy_by_cls(LazyLoader).\
|
||||
|
|
@ -679,16 +681,18 @@ class ImmediateLoader(AbstractRelationshipLoader):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt,
|
||||
mapper, row, adapter):
|
||||
mapper, result, adapter, populators):
|
||||
def load_immediate(state, dict_, row):
|
||||
state.get_impl(self.key).get(state, dict_)
|
||||
|
||||
return None, None, load_immediate
|
||||
populators["delayed"].append((self.key, load_immediate))
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="subquery")
|
||||
class SubqueryLoader(AbstractRelationshipLoader):
|
||||
__slots__ = 'join_depth',
|
||||
|
||||
def __init__(self, parent):
|
||||
super(SubqueryLoader, self).__init__(parent)
|
||||
self.join_depth = self.parent_property.join_depth
|
||||
|
|
@ -706,6 +710,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
|
||||
if not context.query._enable_eagerloads:
|
||||
return
|
||||
elif context.query._yield_per:
|
||||
context.query._no_yield_per("subquery")
|
||||
|
||||
path = path[self.parent_property]
|
||||
|
||||
|
|
@ -994,7 +1000,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt,
|
||||
mapper, row, adapter):
|
||||
mapper, result, adapter, populators):
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"'%s' does not support object "
|
||||
|
|
@ -1006,7 +1012,13 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
subq = path.get(context.attributes, 'subquery')
|
||||
|
||||
if subq is None:
|
||||
return None, None, None
|
||||
return
|
||||
|
||||
assert subq.session is context.session, (
|
||||
"Subquery session doesn't refer to that of "
|
||||
"our context. Are there broken context caching "
|
||||
"schemes being used?"
|
||||
)
|
||||
|
||||
local_cols = self.parent_property.local_columns
|
||||
|
||||
|
|
@ -1022,11 +1034,14 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
local_cols = [adapter.columns[c] for c in local_cols]
|
||||
|
||||
if self.uselist:
|
||||
return self._create_collection_loader(collections, local_cols)
|
||||
self._create_collection_loader(
|
||||
context, collections, local_cols, populators)
|
||||
else:
|
||||
return self._create_scalar_loader(collections, local_cols)
|
||||
self._create_scalar_loader(
|
||||
context, collections, local_cols, populators)
|
||||
|
||||
def _create_collection_loader(self, collections, local_cols):
|
||||
def _create_collection_loader(
|
||||
self, context, collections, local_cols, populators):
|
||||
def load_collection_from_subq(state, dict_, row):
|
||||
collection = collections.get(
|
||||
tuple([row[col] for col in local_cols]),
|
||||
|
|
@ -1035,9 +1050,12 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
state.get_impl(self.key).\
|
||||
set_committed_value(state, dict_, collection)
|
||||
|
||||
return load_collection_from_subq, None, None, collections.loader
|
||||
populators["new"].append((self.key, load_collection_from_subq))
|
||||
if context.invoke_all_eagers:
|
||||
populators["eager"].append((self.key, collections.loader))
|
||||
|
||||
def _create_scalar_loader(self, collections, local_cols):
|
||||
def _create_scalar_loader(
|
||||
self, context, collections, local_cols, populators):
|
||||
def load_scalar_from_subq(state, dict_, row):
|
||||
collection = collections.get(
|
||||
tuple([row[col] for col in local_cols]),
|
||||
|
|
@ -1053,7 +1071,9 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
|||
state.get_impl(self.key).\
|
||||
set_committed_value(state, dict_, scalar)
|
||||
|
||||
return load_scalar_from_subq, None, None, collections.loader
|
||||
populators["new"].append((self.key, load_scalar_from_subq))
|
||||
if context.invoke_all_eagers:
|
||||
populators["eager"].append((self.key, collections.loader))
|
||||
|
||||
|
||||
@log.class_logger
|
||||
|
|
@ -1064,6 +1084,9 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
using joined eager loading.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'join_depth',
|
||||
|
||||
def __init__(self, parent):
|
||||
super(JoinedLoader, self).__init__(parent)
|
||||
self.join_depth = self.parent_property.join_depth
|
||||
|
|
@ -1081,6 +1104,8 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
|
||||
if not context.query._enable_eagerloads:
|
||||
return
|
||||
elif context.query._yield_per and self.uselist:
|
||||
context.query._no_yield_per("joined collection")
|
||||
|
||||
path = path[self.parent_property]
|
||||
|
||||
|
|
@ -1123,16 +1148,12 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
|
||||
path = path[self.mapper]
|
||||
|
||||
for value in self.mapper._iterate_polymorphic_properties(
|
||||
mappers=with_polymorphic):
|
||||
value.setup(
|
||||
context,
|
||||
entity,
|
||||
path,
|
||||
clauses,
|
||||
parentmapper=self.mapper,
|
||||
column_collection=add_to_collection,
|
||||
chained_from_outerjoin=chained_from_outerjoin)
|
||||
loading._setup_entity_query(
|
||||
context, self.mapper, entity,
|
||||
path, clauses, add_to_collection,
|
||||
with_polymorphic=with_polymorphic,
|
||||
parentmapper=self.mapper,
|
||||
chained_from_outerjoin=chained_from_outerjoin)
|
||||
|
||||
if with_poly_info is not None and \
|
||||
None in set(context.secondary_columns):
|
||||
|
|
@ -1235,10 +1256,11 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
clauses = orm_util.ORMAdapter(
|
||||
to_adapt,
|
||||
equivalents=self.mapper._equivalent_columns,
|
||||
adapt_required=True)
|
||||
adapt_required=True, allow_label_resolve=False,
|
||||
anonymize_labels=True)
|
||||
assert clauses.aliased_class is not None
|
||||
|
||||
if self.parent_property.direction != interfaces.MANYTOONE:
|
||||
if self.parent_property.uselist:
|
||||
context.multi_row_eager_loaders = True
|
||||
|
||||
innerjoin = (
|
||||
|
|
@ -1303,8 +1325,19 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
|
||||
if adapter:
|
||||
if getattr(adapter, 'aliased_class', None):
|
||||
# joining from an adapted entity. The adapted entity
|
||||
# might be a "with_polymorphic", so resolve that to our
|
||||
# specific mapper's entity before looking for our attribute
|
||||
# name on it.
|
||||
efm = inspect(adapter.aliased_class).\
|
||||
_entity_for_mapper(
|
||||
parentmapper
|
||||
if parentmapper.isa(self.parent) else self.parent)
|
||||
|
||||
# look for our attribute on the adapted entity, else fall back
|
||||
# to our straight property
|
||||
onclause = getattr(
|
||||
adapter.aliased_class, self.key,
|
||||
efm.entity, self.key,
|
||||
self.parent_property)
|
||||
else:
|
||||
onclause = getattr(
|
||||
|
|
@ -1321,40 +1354,31 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
|
||||
assert clauses.aliased_class is not None
|
||||
|
||||
join_to_outer = innerjoin and isinstance(towrap, sql.Join) and \
|
||||
towrap.isouter
|
||||
attach_on_outside = (
|
||||
not chained_from_outerjoin or
|
||||
not innerjoin or innerjoin == 'unnested')
|
||||
|
||||
if chained_from_outerjoin and join_to_outer and innerjoin == 'nested':
|
||||
inner = orm_util.join(
|
||||
towrap.right,
|
||||
clauses.aliased_class,
|
||||
onclause,
|
||||
isouter=False
|
||||
)
|
||||
|
||||
eagerjoin = orm_util.join(
|
||||
towrap.left,
|
||||
inner,
|
||||
towrap.onclause,
|
||||
isouter=True
|
||||
)
|
||||
eagerjoin._target_adapter = inner._target_adapter
|
||||
else:
|
||||
if chained_from_outerjoin:
|
||||
innerjoin = False
|
||||
eagerjoin = orm_util.join(
|
||||
if attach_on_outside:
|
||||
# this is the "classic" eager join case.
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
towrap,
|
||||
clauses.aliased_class,
|
||||
onclause,
|
||||
isouter=not innerjoin
|
||||
isouter=not innerjoin or (
|
||||
chained_from_outerjoin and isinstance(towrap, sql.Join)
|
||||
), _left_memo=self.parent, _right_memo=self.mapper
|
||||
)
|
||||
else:
|
||||
# all other cases are innerjoin=='nested' approach
|
||||
eagerjoin = self._splice_nested_inner_join(
|
||||
path, towrap, clauses, onclause)
|
||||
|
||||
context.eager_joins[entity_key] = eagerjoin
|
||||
|
||||
# send a hint to the Query as to where it may "splice" this join
|
||||
eagerjoin.stop_on = entity.selectable
|
||||
|
||||
if self.parent_property.secondary is None and \
|
||||
not parentmapper:
|
||||
if not parentmapper:
|
||||
# for parentclause that is the non-eager end of the join,
|
||||
# ensure all the parent cols in the primaryjoin are actually
|
||||
# in the
|
||||
|
|
@ -1377,7 +1401,67 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
)
|
||||
)
|
||||
|
||||
def _create_eager_adapter(self, context, row, adapter, path, loadopt):
|
||||
def _splice_nested_inner_join(
|
||||
self, path, join_obj, clauses, onclause, splicing=False):
|
||||
|
||||
if splicing is False:
|
||||
# first call is always handed a join object
|
||||
# from the outside
|
||||
assert isinstance(join_obj, orm_util._ORMJoin)
|
||||
elif isinstance(join_obj, sql.selectable.FromGrouping):
|
||||
return self._splice_nested_inner_join(
|
||||
path, join_obj.element, clauses, onclause, splicing
|
||||
)
|
||||
elif not isinstance(join_obj, orm_util._ORMJoin):
|
||||
if path[-2] is splicing:
|
||||
return orm_util._ORMJoin(
|
||||
join_obj, clauses.aliased_class,
|
||||
onclause, isouter=False,
|
||||
_left_memo=splicing,
|
||||
_right_memo=path[-1].mapper
|
||||
)
|
||||
else:
|
||||
# only here if splicing == True
|
||||
return None
|
||||
|
||||
target_join = self._splice_nested_inner_join(
|
||||
path, join_obj.right, clauses,
|
||||
onclause, join_obj._right_memo)
|
||||
if target_join is None:
|
||||
right_splice = False
|
||||
target_join = self._splice_nested_inner_join(
|
||||
path, join_obj.left, clauses,
|
||||
onclause, join_obj._left_memo)
|
||||
if target_join is None:
|
||||
# should only return None when recursively called,
|
||||
# e.g. splicing==True
|
||||
assert splicing is not False, \
|
||||
"assertion failed attempting to produce joined eager loads"
|
||||
return None
|
||||
else:
|
||||
right_splice = True
|
||||
|
||||
if right_splice:
|
||||
# for a right splice, attempt to flatten out
|
||||
# a JOIN b JOIN c JOIN .. to avoid needless
|
||||
# parenthesis nesting
|
||||
if not join_obj.isouter and not target_join.isouter:
|
||||
eagerjoin = join_obj._splice_into_center(target_join)
|
||||
else:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
join_obj.left, target_join,
|
||||
join_obj.onclause, isouter=join_obj.isouter,
|
||||
_left_memo=join_obj._left_memo)
|
||||
else:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
target_join, join_obj.right,
|
||||
join_obj.onclause, isouter=join_obj.isouter,
|
||||
_right_memo=join_obj._right_memo)
|
||||
|
||||
eagerjoin._target_adapter = target_join._target_adapter
|
||||
return eagerjoin
|
||||
|
||||
def _create_eager_adapter(self, context, result, adapter, path, loadopt):
|
||||
user_defined_adapter = self._init_user_defined_eager_proc(
|
||||
loadopt, context) if loadopt else False
|
||||
|
||||
|
|
@ -1395,17 +1479,16 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
if decorator is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
self.mapper.identity_key_from_row(row, decorator)
|
||||
if self.mapper._result_has_identity_key(result, decorator):
|
||||
return decorator
|
||||
except KeyError:
|
||||
else:
|
||||
# no identity key - don't return a row
|
||||
# processor, will cause a degrade to lazy
|
||||
return False
|
||||
|
||||
def create_row_processor(
|
||||
self, context, path, loadopt, mapper,
|
||||
row, adapter):
|
||||
result, adapter, populators):
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"'%s' does not support object "
|
||||
|
|
@ -1417,36 +1500,40 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
|
||||
eager_adapter = self._create_eager_adapter(
|
||||
context,
|
||||
row,
|
||||
result,
|
||||
adapter, our_path, loadopt)
|
||||
|
||||
if eager_adapter is not False:
|
||||
key = self.key
|
||||
|
||||
_instance = loading.instance_processor(
|
||||
_instance = loading._instance_processor(
|
||||
self.mapper,
|
||||
context,
|
||||
result,
|
||||
our_path[self.mapper],
|
||||
eager_adapter)
|
||||
|
||||
if not self.uselist:
|
||||
return self._create_scalar_loader(context, key, _instance)
|
||||
self._create_scalar_loader(context, key, _instance, populators)
|
||||
else:
|
||||
return self._create_collection_loader(context, key, _instance)
|
||||
self._create_collection_loader(
|
||||
context, key, _instance, populators)
|
||||
else:
|
||||
return self.parent_property._get_strategy_by_cls(LazyLoader).\
|
||||
self.parent_property._get_strategy_by_cls(LazyLoader).\
|
||||
create_row_processor(
|
||||
context, path, loadopt,
|
||||
mapper, row, adapter)
|
||||
mapper, result, adapter, populators)
|
||||
|
||||
def _create_collection_loader(self, context, key, _instance):
|
||||
def _create_collection_loader(self, context, key, _instance, populators):
|
||||
def load_collection_from_joined_new_row(state, dict_, row):
|
||||
collection = attributes.init_state_collection(
|
||||
state, dict_, key)
|
||||
result_list = util.UniqueAppender(collection,
|
||||
'append_without_event')
|
||||
context.attributes[(state, key)] = result_list
|
||||
_instance(row, result_list)
|
||||
inst = _instance(row)
|
||||
if inst is not None:
|
||||
result_list.append(inst)
|
||||
|
||||
def load_collection_from_joined_existing_row(state, dict_, row):
|
||||
if (state, key) in context.attributes:
|
||||
|
|
@ -1462,25 +1549,30 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
collection,
|
||||
'append_without_event')
|
||||
context.attributes[(state, key)] = result_list
|
||||
_instance(row, result_list)
|
||||
inst = _instance(row)
|
||||
if inst is not None:
|
||||
result_list.append(inst)
|
||||
|
||||
def load_collection_from_joined_exec(state, dict_, row):
|
||||
_instance(row, None)
|
||||
_instance(row)
|
||||
|
||||
return load_collection_from_joined_new_row, \
|
||||
load_collection_from_joined_existing_row, \
|
||||
None, load_collection_from_joined_exec
|
||||
populators["new"].append((self.key, load_collection_from_joined_new_row))
|
||||
populators["existing"].append(
|
||||
(self.key, load_collection_from_joined_existing_row))
|
||||
if context.invoke_all_eagers:
|
||||
populators["eager"].append(
|
||||
(self.key, load_collection_from_joined_exec))
|
||||
|
||||
def _create_scalar_loader(self, context, key, _instance):
|
||||
def _create_scalar_loader(self, context, key, _instance, populators):
|
||||
def load_scalar_from_joined_new_row(state, dict_, row):
|
||||
# set a scalar object instance directly on the parent
|
||||
# object, bypassing InstrumentedAttribute event handlers.
|
||||
dict_[key] = _instance(row, None)
|
||||
dict_[key] = _instance(row)
|
||||
|
||||
def load_scalar_from_joined_existing_row(state, dict_, row):
|
||||
# call _instance on the row, even though the object has
|
||||
# been created, so that we further descend into properties
|
||||
existing = _instance(row, None)
|
||||
existing = _instance(row)
|
||||
if existing is not None \
|
||||
and key in dict_ \
|
||||
and existing is not dict_[key]:
|
||||
|
|
@ -1490,11 +1582,13 @@ class JoinedLoader(AbstractRelationshipLoader):
|
|||
% self)
|
||||
|
||||
def load_scalar_from_joined_exec(state, dict_, row):
|
||||
_instance(row, None)
|
||||
_instance(row)
|
||||
|
||||
return load_scalar_from_joined_new_row, \
|
||||
load_scalar_from_joined_existing_row, \
|
||||
None, load_scalar_from_joined_exec
|
||||
populators["new"].append((self.key, load_scalar_from_joined_new_row))
|
||||
populators["existing"].append(
|
||||
(self.key, load_scalar_from_joined_existing_row))
|
||||
if context.invoke_all_eagers:
|
||||
populators["eager"].append((self.key, load_scalar_from_joined_exec))
|
||||
|
||||
|
||||
def single_parent_validator(desc, prop):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
# orm/strategy_options.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -89,6 +88,7 @@ class Load(Generative, MapperOption):
|
|||
cloned.local_opts = {}
|
||||
return cloned
|
||||
|
||||
_merge_into_path = False
|
||||
strategy = None
|
||||
propagate_to_loaders = False
|
||||
|
||||
|
|
@ -162,11 +162,14 @@ class Load(Generative, MapperOption):
|
|||
ext_info = inspect(ac)
|
||||
|
||||
path_element = ext_info.mapper
|
||||
existing = path.entity_path[prop].get(
|
||||
self.context, "path_with_polymorphic")
|
||||
if not ext_info.is_aliased_class:
|
||||
ac = orm_util.with_polymorphic(
|
||||
ext_info.mapper.base_mapper,
|
||||
ext_info.mapper, aliased=True,
|
||||
_use_mapper_path=True)
|
||||
_use_mapper_path=True,
|
||||
_existing_alias=existing)
|
||||
path.entity_path[prop].set(
|
||||
self.context, "path_with_polymorphic", inspect(ac))
|
||||
path = path[prop][path_element]
|
||||
|
|
@ -177,6 +180,9 @@ class Load(Generative, MapperOption):
|
|||
path = path.entity_path
|
||||
return path
|
||||
|
||||
def __str__(self):
|
||||
return "Load(strategy=%r)" % (self.strategy, )
|
||||
|
||||
def _coerce_strat(self, strategy):
|
||||
if strategy is not None:
|
||||
strategy = tuple(sorted(strategy.items()))
|
||||
|
|
@ -209,7 +215,15 @@ class Load(Generative, MapperOption):
|
|||
cloned._set_path_strategy()
|
||||
|
||||
def _set_path_strategy(self):
|
||||
if self.path.has_entity:
|
||||
if self._merge_into_path:
|
||||
# special helper for undefer_group
|
||||
existing = self.path.get(self.context, "loader")
|
||||
if existing:
|
||||
existing.local_opts.update(self.local_opts)
|
||||
else:
|
||||
self.path.set(self.context, "loader", self)
|
||||
|
||||
elif self.path.has_entity:
|
||||
self.path.parent.set(self.context, "loader", self)
|
||||
else:
|
||||
self.path.set(self.context, "loader", self)
|
||||
|
|
@ -359,6 +373,7 @@ class _UnboundLoad(Load):
|
|||
return None
|
||||
|
||||
token = start_path[0]
|
||||
|
||||
if isinstance(token, util.string_types):
|
||||
entity = self._find_entity_basestring(query, token, raiseerr)
|
||||
elif isinstance(token, PropComparator):
|
||||
|
|
@ -402,10 +417,27 @@ class _UnboundLoad(Load):
|
|||
# prioritize "first class" options over those
|
||||
# that were "links in the chain", e.g. "x" and "y" in
|
||||
# someload("x.y.z") versus someload("x") / someload("x.y")
|
||||
if self._is_chain_link:
|
||||
effective_path.setdefault(context, "loader", loader)
|
||||
|
||||
if effective_path.is_token:
|
||||
for path in effective_path.generate_for_superclasses():
|
||||
if self._merge_into_path:
|
||||
# special helper for undefer_group
|
||||
existing = path.get(context, "loader")
|
||||
if existing:
|
||||
existing.local_opts.update(self.local_opts)
|
||||
else:
|
||||
path.set(context, "loader", loader)
|
||||
elif self._is_chain_link:
|
||||
path.setdefault(context, "loader", loader)
|
||||
else:
|
||||
path.set(context, "loader", loader)
|
||||
else:
|
||||
effective_path.set(context, "loader", loader)
|
||||
# only supported for the undefer_group() wildcard opt
|
||||
assert not self._merge_into_path
|
||||
if self._is_chain_link:
|
||||
effective_path.setdefault(context, "loader", loader)
|
||||
else:
|
||||
effective_path.set(context, "loader", loader)
|
||||
|
||||
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
|
||||
if _is_aliased_class(mapper):
|
||||
|
|
@ -631,15 +663,47 @@ def joinedload(loadopt, attr, innerjoin=None):
|
|||
|
||||
query(Order).options(joinedload(Order.user, innerjoin=True))
|
||||
|
||||
If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
|
||||
``innerjoin=True`` will be bypassed and the join will continue to
|
||||
chain as LEFT OUTER JOIN so that the results don't change. As an
|
||||
alternative, specify the value ``"nested"``. This will instead nest the
|
||||
join on the right side, e.g. using the form "a LEFT OUTER JOIN
|
||||
(b JOIN c)".
|
||||
In order to chain multiple eager joins together where some may be
|
||||
OUTER and others INNER, right-nested joins are used to link them::
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
|
||||
nesting of eager "inner" joins.
|
||||
query(A).options(
|
||||
joinedload(A.bs, innerjoin=False).
|
||||
joinedload(B.cs, innerjoin=True)
|
||||
)
|
||||
|
||||
The above query, linking A.bs via "outer" join and B.cs via "inner" join
|
||||
would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When using
|
||||
SQLite, this form of JOIN is translated to use full subqueries as this
|
||||
syntax is otherwise not directly supported.
|
||||
|
||||
The ``innerjoin`` flag can also be stated with the term ``"unnested"``.
|
||||
This will prevent joins from being right-nested, and will instead
|
||||
link an "innerjoin" eagerload to an "outerjoin" eagerload by bypassing
|
||||
the "inner" join. Using this form as follows::
|
||||
|
||||
query(A).options(
|
||||
joinedload(A.bs, innerjoin=False).
|
||||
joinedload(B.cs, innerjoin="unnested")
|
||||
)
|
||||
|
||||
Joins will be rendered as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", so that
|
||||
all of "a" is matched rather than being incorrectly limited by a "b" that
|
||||
does not contain a "c".
|
||||
|
||||
.. note:: The "unnested" flag does **not** affect the JOIN rendered
|
||||
from a many-to-many association table, e.g. a table configured
|
||||
as :paramref:`.relationship.secondary`, to the target table; for
|
||||
correctness of results, these joins are always INNER and are
|
||||
therefore right-nested if linked to an OUTER join.
|
||||
|
||||
.. versionadded:: 0.9.4 Added support for "nesting" of eager "inner"
|
||||
joins. See :ref:`feature_2976`.
|
||||
|
||||
.. versionchanged:: 1.0.0 ``innerjoin=True`` now implies
|
||||
``innerjoin="nested"``, whereas in 0.9 it implied
|
||||
``innerjoin="unnested"``. In order to achieve the pre-1.0 "unnested"
|
||||
inner join behavior, use the value ``innerjoin="unnested"``.
|
||||
See :ref:`migration_3008`.
|
||||
|
||||
.. note::
|
||||
|
||||
|
|
@ -979,10 +1043,11 @@ def undefer_group(loadopt, name):
|
|||
:func:`.orm.undefer`
|
||||
|
||||
"""
|
||||
loadopt._merge_into_path = True
|
||||
return loadopt.set_column_strategy(
|
||||
"*",
|
||||
None,
|
||||
{"undefer_group": name}
|
||||
{"undefer_group_%s" % name: True}
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/sync.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -45,11 +45,28 @@ def populate(source, source_mapper, dest, dest_mapper,
|
|||
uowcommit.attributes[("pk_cascaded", dest, r)] = True
|
||||
|
||||
|
||||
def bulk_populate_inherit_keys(
|
||||
source_dict, source_mapper, synchronize_pairs):
|
||||
# a simplified version of populate() used by bulk insert mode
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source_dict[prop.key]
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, source_mapper, r)
|
||||
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[r]
|
||||
source_dict[prop.key] = value
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(True, source_mapper, l, source_mapper, r)
|
||||
|
||||
|
||||
def clear(dest, dest_mapper, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
if r.primary_key and \
|
||||
dest_mapper._get_state_attr_by_column(
|
||||
dest, dest.dict, r) is not None:
|
||||
dest, dest.dict, r) not in orm_util._none_set:
|
||||
|
||||
raise AssertionError(
|
||||
"Dependency rule tried to blank-out primary key "
|
||||
|
|
@ -68,7 +85,7 @@ def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
|
|||
oldvalue = source_mapper._get_committed_attr_by_column(
|
||||
source.obj(), l)
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l)
|
||||
source, source.dict, l, passive=attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
dest[r.key] = value
|
||||
|
|
@ -79,7 +96,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs):
|
|||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l)
|
||||
source, source.dict, l, passive=attributes.PASSIVE_OFF)
|
||||
except exc.UnmappedColumnError:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/unitofwork.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -16,6 +16,7 @@ organizes them in order of dependency, and executes.
|
|||
from .. import util, event
|
||||
from ..util import topological
|
||||
from . import attributes, persistence, util as orm_util
|
||||
import itertools
|
||||
|
||||
|
||||
def track_cascade_events(descriptor, prop):
|
||||
|
|
@ -379,14 +380,19 @@ class UOWTransaction(object):
|
|||
execute() method has succeeded and the transaction has been committed.
|
||||
|
||||
"""
|
||||
if not self.states:
|
||||
return
|
||||
|
||||
states = set(self.states)
|
||||
isdel = set(
|
||||
s for (s, (isdelete, listonly)) in self.states.items()
|
||||
if isdelete
|
||||
)
|
||||
other = states.difference(isdel)
|
||||
self.session._remove_newly_deleted(isdel)
|
||||
self.session._register_newly_persistent(other)
|
||||
if isdel:
|
||||
self.session._remove_newly_deleted(isdel)
|
||||
if other:
|
||||
self.session._register_newly_persistent(other)
|
||||
|
||||
|
||||
class IterateMappersMixin(object):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# orm/util.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,9 +13,9 @@ from . import attributes
|
|||
import re
|
||||
|
||||
from .base import instance_str, state_str, state_class_str, attribute_str, \
|
||||
state_attribute_str, object_mapper, object_state, _none_set
|
||||
state_attribute_str, object_mapper, object_state, _none_set, _never_set
|
||||
from .base import class_mapper, _class_to_mapper
|
||||
from .base import _InspectionAttr
|
||||
from .base import InspectionAttr
|
||||
from .path_registry import PathRegistry
|
||||
|
||||
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
|
||||
|
|
@ -30,21 +30,19 @@ class CascadeOptions(frozenset):
|
|||
'all', 'none', 'delete-orphan'])
|
||||
_allowed_cascades = all_cascades
|
||||
|
||||
def __new__(cls, arg):
|
||||
values = set([
|
||||
c for c
|
||||
in re.split('\s*,\s*', arg or "")
|
||||
if c
|
||||
])
|
||||
__slots__ = (
|
||||
'save_update', 'delete', 'refresh_expire', 'merge',
|
||||
'expunge', 'delete_orphan')
|
||||
|
||||
def __new__(cls, value_list):
|
||||
if isinstance(value_list, util.string_types) or value_list is None:
|
||||
return cls.from_string(value_list)
|
||||
values = set(value_list)
|
||||
if values.difference(cls._allowed_cascades):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Invalid cascade option(s): %s" %
|
||||
", ".join([repr(x) for x in
|
||||
sorted(
|
||||
values.difference(cls._allowed_cascades)
|
||||
)])
|
||||
)
|
||||
sorted(values.difference(cls._allowed_cascades))]))
|
||||
|
||||
if "all" in values:
|
||||
values.update(cls._add_w_all_cascades)
|
||||
|
|
@ -70,6 +68,15 @@ class CascadeOptions(frozenset):
|
|||
",".join([x for x in sorted(self)])
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, arg):
|
||||
values = [
|
||||
c for c
|
||||
in re.split('\s*,\s*', arg or "")
|
||||
if c
|
||||
]
|
||||
return cls(values)
|
||||
|
||||
|
||||
def _validator_events(
|
||||
desc, key, validator, include_removes, include_backrefs):
|
||||
|
|
@ -270,15 +277,14 @@ first()
|
|||
|
||||
|
||||
class ORMAdapter(sql_util.ColumnAdapter):
|
||||
"""Extends ColumnAdapter to accept ORM entities.
|
||||
|
||||
The selectable is extracted from the given entity,
|
||||
and the AliasedClass if any is referenced.
|
||||
"""ColumnAdapter subclass which excludes adaptation of entities from
|
||||
non-matching mappers.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, entity, equivalents=None, adapt_required=False,
|
||||
chain_to=None):
|
||||
chain_to=None, allow_label_resolve=True,
|
||||
anonymize_labels=False):
|
||||
info = inspection.inspect(entity)
|
||||
|
||||
self.mapper = info.mapper
|
||||
|
|
@ -288,16 +294,18 @@ class ORMAdapter(sql_util.ColumnAdapter):
|
|||
self.aliased_class = entity
|
||||
else:
|
||||
self.aliased_class = None
|
||||
sql_util.ColumnAdapter.__init__(self, selectable,
|
||||
equivalents, chain_to,
|
||||
adapt_required=adapt_required)
|
||||
|
||||
def replace(self, elem):
|
||||
sql_util.ColumnAdapter.__init__(
|
||||
self, selectable, equivalents, chain_to,
|
||||
adapt_required=adapt_required,
|
||||
allow_label_resolve=allow_label_resolve,
|
||||
anonymize_labels=anonymize_labels,
|
||||
include_fn=self._include_fn
|
||||
)
|
||||
|
||||
def _include_fn(self, elem):
|
||||
entity = elem._annotations.get('parentmapper', None)
|
||||
if not entity or entity.isa(self.mapper):
|
||||
return sql_util.ColumnAdapter.replace(self, elem)
|
||||
else:
|
||||
return None
|
||||
return not entity or entity.isa(self.mapper)
|
||||
|
||||
|
||||
class AliasedClass(object):
|
||||
|
|
@ -354,6 +362,7 @@ class AliasedClass(object):
|
|||
if alias is None:
|
||||
alias = mapper._with_polymorphic_selectable.alias(
|
||||
name=name, flat=flat)
|
||||
|
||||
self._aliased_insp = AliasedInsp(
|
||||
self,
|
||||
mapper,
|
||||
|
|
@ -412,7 +421,7 @@ class AliasedClass(object):
|
|||
id(self), self._aliased_insp._target.__name__)
|
||||
|
||||
|
||||
class AliasedInsp(_InspectionAttr):
|
||||
class AliasedInsp(InspectionAttr):
|
||||
"""Provide an inspection interface for an
|
||||
:class:`.AliasedClass` object.
|
||||
|
||||
|
|
@ -460,9 +469,9 @@ class AliasedInsp(_InspectionAttr):
|
|||
self._base_alias = _base_alias or self
|
||||
self._use_mapper_path = _use_mapper_path
|
||||
|
||||
self._adapter = sql_util.ClauseAdapter(
|
||||
self._adapter = sql_util.ColumnAdapter(
|
||||
selectable, equivalents=mapper._equivalent_columns,
|
||||
adapt_on_names=adapt_on_names)
|
||||
adapt_on_names=adapt_on_names, anonymize_labels=True)
|
||||
|
||||
self._adapt_on_names = adapt_on_names
|
||||
self._target = mapper.class_
|
||||
|
|
@ -521,14 +530,18 @@ class AliasedInsp(_InspectionAttr):
|
|||
def _adapt_element(self, elem):
|
||||
return self._adapter.traverse(elem).\
|
||||
_annotate({
|
||||
'parententity': self.entity,
|
||||
'parententity': self,
|
||||
'parentmapper': self.mapper}
|
||||
)
|
||||
|
||||
def _entity_for_mapper(self, mapper):
|
||||
self_poly = self.with_polymorphic_mappers
|
||||
if mapper in self_poly:
|
||||
return getattr(self.entity, mapper.class_.__name__)._aliased_insp
|
||||
if mapper is self.mapper:
|
||||
return self
|
||||
else:
|
||||
return getattr(
|
||||
self.entity, mapper.class_.__name__)._aliased_insp
|
||||
elif mapper.isa(self.mapper):
|
||||
return self
|
||||
else:
|
||||
|
|
@ -536,8 +549,13 @@ class AliasedInsp(_InspectionAttr):
|
|||
mapper, self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<AliasedInsp at 0x%x; %s>' % (
|
||||
id(self), self.class_.__name__)
|
||||
if self.with_polymorphic_mappers:
|
||||
with_poly = "(%s)" % ", ".join(
|
||||
mp.class_.__name__ for mp in self.with_polymorphic_mappers)
|
||||
else:
|
||||
with_poly = ""
|
||||
return '<AliasedInsp at 0x%x; %s%s>' % (
|
||||
id(self), self.class_.__name__, with_poly)
|
||||
|
||||
|
||||
inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
|
||||
|
|
@ -641,7 +659,8 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
|
|||
def with_polymorphic(base, classes, selectable=False,
|
||||
flat=False,
|
||||
polymorphic_on=None, aliased=False,
|
||||
innerjoin=False, _use_mapper_path=False):
|
||||
innerjoin=False, _use_mapper_path=False,
|
||||
_existing_alias=None):
|
||||
"""Produce an :class:`.AliasedClass` construct which specifies
|
||||
columns for descendant mappers of the given base.
|
||||
|
||||
|
|
@ -706,6 +725,16 @@ def with_polymorphic(base, classes, selectable=False,
|
|||
only be specified if querying for one specific subtype only
|
||||
"""
|
||||
primary_mapper = _class_to_mapper(base)
|
||||
if _existing_alias:
|
||||
assert _existing_alias.mapper is primary_mapper
|
||||
classes = util.to_set(classes)
|
||||
new_classes = set([
|
||||
mp.class_ for mp in
|
||||
_existing_alias.with_polymorphic_mappers])
|
||||
if classes == new_classes:
|
||||
return _existing_alias
|
||||
else:
|
||||
classes = classes.union(new_classes)
|
||||
mappers, selectable = primary_mapper.\
|
||||
_with_polymorphic_args(classes, selectable,
|
||||
innerjoin=innerjoin)
|
||||
|
|
@ -751,7 +780,10 @@ class _ORMJoin(expression.Join):
|
|||
|
||||
__visit_name__ = expression.Join.__visit_name__
|
||||
|
||||
def __init__(self, left, right, onclause=None, isouter=False):
|
||||
def __init__(
|
||||
self,
|
||||
left, right, onclause=None, isouter=False,
|
||||
_left_memo=None, _right_memo=None):
|
||||
|
||||
left_info = inspection.inspect(left)
|
||||
left_orm_info = getattr(left, '_joined_from_info', left_info)
|
||||
|
|
@ -761,6 +793,9 @@ class _ORMJoin(expression.Join):
|
|||
|
||||
self._joined_from_info = right_info
|
||||
|
||||
self._left_memo = _left_memo
|
||||
self._right_memo = _right_memo
|
||||
|
||||
if isinstance(onclause, util.string_types):
|
||||
onclause = getattr(left_orm_info.entity, onclause)
|
||||
|
||||
|
|
@ -802,6 +837,43 @@ class _ORMJoin(expression.Join):
|
|||
|
||||
expression.Join.__init__(self, left, right, onclause, isouter)
|
||||
|
||||
if not prop and getattr(right_info, 'mapper', None) \
|
||||
and right_info.mapper.single:
|
||||
# if single inheritance target and we are using a manual
|
||||
# or implicit ON clause, augment it the same way we'd augment the
|
||||
# WHERE.
|
||||
single_crit = right_info.mapper._single_table_criterion
|
||||
if single_crit is not None:
|
||||
if right_info.is_aliased_class:
|
||||
single_crit = right_info._adapter.traverse(single_crit)
|
||||
self.onclause = self.onclause & single_crit
|
||||
|
||||
def _splice_into_center(self, other):
|
||||
"""Splice a join into the center.
|
||||
|
||||
Given join(a, b) and join(b, c), return join(a, b).join(c)
|
||||
|
||||
"""
|
||||
leftmost = other
|
||||
while isinstance(leftmost, sql.Join):
|
||||
leftmost = leftmost.left
|
||||
|
||||
assert self.right is leftmost
|
||||
|
||||
left = _ORMJoin(
|
||||
self.left, other.left,
|
||||
self.onclause, isouter=self.isouter,
|
||||
_left_memo=self._left_memo,
|
||||
_right_memo=other._left_memo
|
||||
)
|
||||
|
||||
return _ORMJoin(
|
||||
left,
|
||||
other.right,
|
||||
other.onclause, isouter=other.isouter,
|
||||
_right_memo=other._right_memo
|
||||
)
|
||||
|
||||
def join(self, right, onclause=None, isouter=False, join_to_left=None):
|
||||
return _ORMJoin(self, right, onclause, isouter)
|
||||
|
||||
|
|
@ -894,9 +966,7 @@ def with_parent(instance, prop):
|
|||
elif isinstance(prop, attributes.QueryableAttribute):
|
||||
prop = prop.property
|
||||
|
||||
return prop.compare(operators.eq,
|
||||
instance,
|
||||
value_is_parent=True)
|
||||
return prop._with_parent(instance)
|
||||
|
||||
|
||||
def has_identity(object):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue