update sqlalchemy
This commit is contained in:
parent
7365367c61
commit
3b436646a2
362 changed files with 37720 additions and 11021 deletions
|
|
@ -1,5 +1,5 @@
|
|||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -72,6 +72,7 @@ from .base import (
|
|||
)
|
||||
|
||||
from .result import (
|
||||
BaseRowProxy,
|
||||
BufferedColumnResultProxy,
|
||||
BufferedColumnRow,
|
||||
BufferedRowResultProxy,
|
||||
|
|
@ -248,6 +249,34 @@ def create_engine(*args, **kwargs):
|
|||
Microsoft SQL Server. Set this to ``False`` to disable
|
||||
the automatic usage of RETURNING.
|
||||
|
||||
:param isolation_level: this string parameter is interpreted by various
|
||||
dialects in order to affect the transaction isolation level of the
|
||||
database connection. The parameter essentially accepts some subset of
|
||||
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
|
||||
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
|
||||
Behavior here varies per backend, and
|
||||
individual dialects should be consulted directly.
|
||||
|
||||
Note that the isolation level can also be set on a per-:class:`.Connection`
|
||||
basis as well, using the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
|
|
@ -276,6 +305,17 @@ def create_engine(*args, **kwargs):
|
|||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
|
|
@ -349,14 +389,33 @@ def create_engine(*args, **kwargs):
|
|||
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file where keys
|
||||
are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
|
||||
'prefix' argument indicates the prefix to be searched for.
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. In a future release, this
|
||||
functionality will be expanded and include dialect-specific
|
||||
arguments.
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:ref:`create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -45,7 +45,7 @@ class Connection(Connectable):
|
|||
"""
|
||||
|
||||
def __init__(self, engine, connection=None, close_with_result=False,
|
||||
_branch=False, _execution_options=None,
|
||||
_branch_from=None, _execution_options=None,
|
||||
_dispatch=None,
|
||||
_has_events=None):
|
||||
"""Construct a new Connection.
|
||||
|
|
@ -57,48 +57,80 @@ class Connection(Connectable):
|
|||
"""
|
||||
self.engine = engine
|
||||
self.dialect = engine.dialect
|
||||
self.__connection = connection or engine.raw_connection()
|
||||
self.__transaction = None
|
||||
self.should_close_with_result = close_with_result
|
||||
self.__savepoint_seq = 0
|
||||
self.__branch = _branch
|
||||
self.__invalid = False
|
||||
self.__can_reconnect = True
|
||||
if _dispatch:
|
||||
self.dispatch = _dispatch
|
||||
elif _has_events is None:
|
||||
# if _has_events is sent explicitly as False,
|
||||
# then don't join the dispatch of the engine; we don't
|
||||
# want to handle any of the engine's events in that case.
|
||||
self.dispatch = self.dispatch._join(engine.dispatch)
|
||||
self._has_events = _has_events or (
|
||||
_has_events is None and engine._has_events)
|
||||
self.__branch_from = _branch_from
|
||||
self.__branch = _branch_from is not None
|
||||
|
||||
self._echo = self.engine._should_log_info()
|
||||
if _execution_options:
|
||||
self._execution_options =\
|
||||
engine._execution_options.union(_execution_options)
|
||||
if _branch_from:
|
||||
self.__connection = connection
|
||||
self._execution_options = _execution_options
|
||||
self._echo = _branch_from._echo
|
||||
self.should_close_with_result = False
|
||||
self.dispatch = _dispatch
|
||||
self._has_events = _branch_from._has_events
|
||||
else:
|
||||
self.__connection = connection \
|
||||
if connection is not None else engine.raw_connection()
|
||||
self.__transaction = None
|
||||
self.__savepoint_seq = 0
|
||||
self.should_close_with_result = close_with_result
|
||||
self.__invalid = False
|
||||
self.__can_reconnect = True
|
||||
self._echo = self.engine._should_log_info()
|
||||
|
||||
if _has_events is None:
|
||||
# if _has_events is sent explicitly as False,
|
||||
# then don't join the dispatch of the engine; we don't
|
||||
# want to handle any of the engine's events in that case.
|
||||
self.dispatch = self.dispatch._join(engine.dispatch)
|
||||
self._has_events = _has_events or (
|
||||
_has_events is None and engine._has_events)
|
||||
|
||||
assert not _execution_options
|
||||
self._execution_options = engine._execution_options
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.engine_connect(self, _branch)
|
||||
self.dispatch.engine_connect(self, self.__branch)
|
||||
|
||||
def _branch(self):
|
||||
"""Return a new Connection which references this Connection's
|
||||
engine and connection; but does not have close_with_result enabled,
|
||||
and also whose close() method does nothing.
|
||||
|
||||
This is used to execute "sub" statements within a single execution,
|
||||
usually an INSERT statement.
|
||||
The Core uses this very sparingly, only in the case of
|
||||
custom SQL default functions that are to be INSERTed as the
|
||||
primary key of a row where we need to get the value back, so we have
|
||||
to invoke it distinctly - this is a very uncommon case.
|
||||
|
||||
Userland code accesses _branch() when the connect() or
|
||||
contextual_connect() methods are called. The branched connection
|
||||
acts as much as possible like the parent, except that it stays
|
||||
connected when a close() event occurs.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from._branch()
|
||||
else:
|
||||
return self.engine._connection_cls(
|
||||
self.engine,
|
||||
self.__connection,
|
||||
_branch_from=self,
|
||||
_execution_options=self._execution_options,
|
||||
_has_events=self._has_events,
|
||||
_dispatch=self.dispatch)
|
||||
|
||||
@property
|
||||
def _root(self):
|
||||
"""return the 'root' connection.
|
||||
|
||||
Returns 'self' if this connection is not a branch, else
|
||||
returns the root connection from which we ultimately branched.
|
||||
|
||||
"""
|
||||
|
||||
return self.engine._connection_cls(
|
||||
self.engine,
|
||||
self.__connection,
|
||||
_branch=True,
|
||||
_has_events=self._has_events,
|
||||
_dispatch=self.dispatch)
|
||||
if self.__branch_from:
|
||||
return self.__branch_from
|
||||
else:
|
||||
return self
|
||||
|
||||
def _clone(self):
|
||||
"""Create a shallow copy of this Connection.
|
||||
|
|
@ -169,14 +201,19 @@ class Connection(Connectable):
|
|||
used by the ORM internally supersedes a cache dictionary
|
||||
specified here.
|
||||
|
||||
:param isolation_level: Available on: Connection.
|
||||
:param isolation_level: Available on: :class:`.Connection`.
|
||||
Set the transaction isolation level for
|
||||
the lifespan of this connection. Valid values include
|
||||
those string values accepted by the ``isolation_level``
|
||||
parameter passed to :func:`.create_engine`, and are
|
||||
database specific, including those for :ref:`sqlite_toplevel`,
|
||||
:ref:`postgresql_toplevel` - see those dialect's documentation
|
||||
for further info.
|
||||
the lifespan of this :class:`.Connection` object (*not* the
|
||||
underyling DBAPI connection, for which the level is reset
|
||||
to its original setting upon termination of this
|
||||
:class:`.Connection` object).
|
||||
|
||||
Valid values include
|
||||
those string values accepted by the
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
parameter passed to :func:`.create_engine`. These levels are
|
||||
semi-database specific; see individual dialect documentation for
|
||||
valid levels.
|
||||
|
||||
Note that this option necessarily affects the underlying
|
||||
DBAPI connection for the lifespan of the originating
|
||||
|
|
@ -185,6 +222,41 @@ class Connection(Connectable):
|
|||
is returned to the connection pool, i.e.
|
||||
the :meth:`.Connection.close` method is called.
|
||||
|
||||
.. warning:: The ``isolation_level`` execution option should
|
||||
**not** be used when a transaction is already established, that
|
||||
is, the :meth:`.Connection.begin` method or similar has been
|
||||
called. A database cannot change the isolation level on a
|
||||
transaction in progress, and different DBAPIs and/or
|
||||
SQLAlchemy dialects may implicitly roll back or commit
|
||||
the transaction, or not affect the connection at all.
|
||||
|
||||
.. versionchanged:: 0.9.9 A warning is emitted when the
|
||||
``isolation_level`` execution option is used after a
|
||||
transaction has been started with :meth:`.Connection.begin`
|
||||
or similar.
|
||||
|
||||
.. note:: The ``isolation_level`` execution option is implicitly
|
||||
reset if the :class:`.Connection` is invalidated, e.g. via
|
||||
the :meth:`.Connection.invalidate` method, or if a
|
||||
disconnection error occurs. The new connection produced after
|
||||
the invalidation will not have the isolation level re-applied
|
||||
to it automatically.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param no_parameters: When ``True``, if the final parameter
|
||||
list or dictionary is totally empty, will invoke the
|
||||
statement on the cursor as ``cursor.execute(statement)``,
|
||||
|
|
@ -224,24 +296,101 @@ class Connection(Connectable):
|
|||
def invalidated(self):
|
||||
"""Return True if this connection was invalidated."""
|
||||
|
||||
return self.__invalid
|
||||
return self._root.__invalid
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
"The underlying DB-API connection managed by this Connection."
|
||||
"""The underlying DB-API connection managed by this Connection.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
||||
:ref:`dbapi_connections`
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
return self.__connection
|
||||
except AttributeError:
|
||||
return self._revalidate_connection()
|
||||
try:
|
||||
return self._revalidate_connection()
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def get_isolation_level(self):
|
||||
"""Return the current isolation level assigned to this
|
||||
:class:`.Connection`.
|
||||
|
||||
This will typically be the default isolation level as determined
|
||||
by the dialect, unless if the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature has been used to alter the isolation level on a
|
||||
per-:class:`.Connection` basis.
|
||||
|
||||
This attribute will typically perform a live SQL operation in order
|
||||
to procure the current isolation level, so the value returned is the
|
||||
actual level on the underlying DBAPI connection regardless of how
|
||||
this state was set. Compare to the
|
||||
:attr:`.Connection.default_isolation_level` accessor
|
||||
which returns the dialect-level setting without performing a SQL
|
||||
query.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
"""
|
||||
try:
|
||||
return self.dialect.get_isolation_level(self.connection)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
@property
|
||||
def default_isolation_level(self):
|
||||
"""The default isolation level assigned to this :class:`.Connection`.
|
||||
|
||||
This is the isolation level setting that the :class:`.Connection`
|
||||
has when first procured via the :meth:`.Engine.connect` method.
|
||||
This level stays in place until the
|
||||
:paramref:`.Connection.execution_options.isolation_level` is used
|
||||
to change the setting on a per-:class:`.Connection` basis.
|
||||
|
||||
Unlike :meth:`.Connection.get_isolation_level`, this attribute is set
|
||||
ahead of time from the first connection procured by the dialect,
|
||||
so SQL query is not invoked when this accessor is called.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
"""
|
||||
return self.dialect.default_isolation_level
|
||||
|
||||
def _revalidate_connection(self):
|
||||
if self.__branch_from:
|
||||
return self.__branch_from._revalidate_connection()
|
||||
if self.__can_reconnect and self.__invalid:
|
||||
if self.__transaction is not None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't reconnect until invalid "
|
||||
"transaction is rolled back")
|
||||
self.__connection = self.engine.raw_connection()
|
||||
self.__connection = self.engine.raw_connection(_connection=self)
|
||||
self.__invalid = False
|
||||
return self.__connection
|
||||
raise exc.ResourceClosedError("This Connection is closed")
|
||||
|
|
@ -343,16 +492,17 @@ class Connection(Connectable):
|
|||
:ref:`pool_connection_invalidation`
|
||||
|
||||
"""
|
||||
|
||||
if self.invalidated:
|
||||
return
|
||||
|
||||
if self.closed:
|
||||
raise exc.ResourceClosedError("This Connection is closed")
|
||||
|
||||
if self._connection_is_valid:
|
||||
self.__connection.invalidate(exception)
|
||||
del self.__connection
|
||||
self.__invalid = True
|
||||
if self._root._connection_is_valid:
|
||||
self._root.__connection.invalidate(exception)
|
||||
del self._root.__connection
|
||||
self._root.__invalid = True
|
||||
|
||||
def detach(self):
|
||||
"""Detach the underlying DB-API connection from its connection pool.
|
||||
|
|
@ -415,6 +565,8 @@ class Connection(Connectable):
|
|||
:class:`.Engine`.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin()
|
||||
|
||||
if self.__transaction is None:
|
||||
self.__transaction = RootTransaction(self)
|
||||
|
|
@ -436,6 +588,9 @@ class Connection(Connectable):
|
|||
See also :meth:`.Connection.begin`,
|
||||
:meth:`.Connection.begin_twophase`.
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin_nested()
|
||||
|
||||
if self.__transaction is None:
|
||||
self.__transaction = RootTransaction(self)
|
||||
else:
|
||||
|
|
@ -459,6 +614,9 @@ class Connection(Connectable):
|
|||
|
||||
"""
|
||||
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin_twophase(xid=xid)
|
||||
|
||||
if self.__transaction is not None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Cannot start a two phase transaction when a transaction "
|
||||
|
|
@ -479,10 +637,11 @@ class Connection(Connectable):
|
|||
|
||||
def in_transaction(self):
|
||||
"""Return True if a transaction is in progress."""
|
||||
|
||||
return self.__transaction is not None
|
||||
return self._root.__transaction is not None
|
||||
|
||||
def _begin_impl(self, transaction):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN (implicit)")
|
||||
|
||||
|
|
@ -497,6 +656,8 @@ class Connection(Connectable):
|
|||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def _rollback_impl(self):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback(self)
|
||||
|
||||
|
|
@ -516,6 +677,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _commit_impl(self, autocommit=False):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.commit(self)
|
||||
|
||||
|
|
@ -532,6 +695,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _savepoint_impl(self, name=None):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.savepoint(self, name)
|
||||
|
||||
|
|
@ -543,6 +708,8 @@ class Connection(Connectable):
|
|||
return name
|
||||
|
||||
def _rollback_to_savepoint_impl(self, name, context):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback_savepoint(self, name, context)
|
||||
|
||||
|
|
@ -551,6 +718,8 @@ class Connection(Connectable):
|
|||
self.__transaction = context
|
||||
|
||||
def _release_savepoint_impl(self, name, context):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.release_savepoint(self, name, context)
|
||||
|
||||
|
|
@ -559,6 +728,8 @@ class Connection(Connectable):
|
|||
self.__transaction = context
|
||||
|
||||
def _begin_twophase_impl(self, transaction):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
|
||||
if self._has_events or self.engine._has_events:
|
||||
|
|
@ -571,6 +742,8 @@ class Connection(Connectable):
|
|||
self.connection._reset_agent = transaction
|
||||
|
||||
def _prepare_twophase_impl(self, xid):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.prepare_twophase(self, xid)
|
||||
|
||||
|
|
@ -579,6 +752,8 @@ class Connection(Connectable):
|
|||
self.engine.dialect.do_prepare_twophase(self, xid)
|
||||
|
||||
def _rollback_twophase_impl(self, xid, is_prepared):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback_twophase(self, xid, is_prepared)
|
||||
|
||||
|
|
@ -595,6 +770,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _commit_twophase_impl(self, xid, is_prepared):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.commit_twophase(self, xid, is_prepared)
|
||||
|
||||
|
|
@ -610,8 +787,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _autorollback(self):
|
||||
if not self.in_transaction():
|
||||
self._rollback_impl()
|
||||
if not self._root.in_transaction():
|
||||
self._root._rollback_impl()
|
||||
|
||||
def close(self):
|
||||
"""Close this :class:`.Connection`.
|
||||
|
|
@ -632,13 +809,21 @@ class Connection(Connectable):
|
|||
and will allow no further operations.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
try:
|
||||
del self.__connection
|
||||
except AttributeError:
|
||||
pass
|
||||
finally:
|
||||
self.__can_reconnect = False
|
||||
return
|
||||
try:
|
||||
conn = self.__connection
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if not self.__branch:
|
||||
conn.close()
|
||||
|
||||
conn.close()
|
||||
if conn._reset_agent is self.__transaction:
|
||||
conn._reset_agent = None
|
||||
|
||||
|
|
@ -670,7 +855,7 @@ class Connection(Connectable):
|
|||
a subclass of :class:`.Executable`, such as a
|
||||
:func:`~.expression.select` construct
|
||||
* a :class:`.FunctionElement`, such as that generated
|
||||
by :attr:`.func`, will be automatically wrapped in
|
||||
by :data:`.func`, will be automatically wrapped in
|
||||
a SELECT statement, which is then executed.
|
||||
* a :class:`.DDLElement` object
|
||||
* a :class:`.DefaultGenerator` object
|
||||
|
|
@ -798,17 +983,16 @@ class Connection(Connectable):
|
|||
distilled_params = _distill_params(multiparams, params)
|
||||
if distilled_params:
|
||||
# note this is usually dict but we support RowProxy
|
||||
# as well; but dict.keys() as an iterator is OK
|
||||
# as well; but dict.keys() as an iterable is OK
|
||||
keys = distilled_params[0].keys()
|
||||
else:
|
||||
keys = []
|
||||
|
||||
dialect = self.dialect
|
||||
if 'compiled_cache' in self._execution_options:
|
||||
key = dialect, elem, tuple(keys), len(distilled_params) > 1
|
||||
if key in self._execution_options['compiled_cache']:
|
||||
compiled_sql = self._execution_options['compiled_cache'][key]
|
||||
else:
|
||||
key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1
|
||||
compiled_sql = self._execution_options['compiled_cache'].get(key)
|
||||
if compiled_sql is None:
|
||||
compiled_sql = elem.compile(
|
||||
dialect=dialect, column_keys=keys,
|
||||
inline=len(distilled_params) > 1)
|
||||
|
|
@ -888,9 +1072,10 @@ class Connection(Connectable):
|
|||
|
||||
context = constructor(dialect, self, conn, *args)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e,
|
||||
util.text_type(statement), parameters,
|
||||
None, None)
|
||||
self._handle_dbapi_exception(
|
||||
e,
|
||||
util.text_type(statement), parameters,
|
||||
None, None)
|
||||
|
||||
if context.compiled:
|
||||
context.pre_exec()
|
||||
|
|
@ -914,36 +1099,39 @@ class Connection(Connectable):
|
|||
"%r",
|
||||
sql_util._repr_params(parameters, batches=10)
|
||||
)
|
||||
|
||||
evt_handled = False
|
||||
try:
|
||||
if context.executemany:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_executemany:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
break
|
||||
else:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_executemany:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
evt_handled = True
|
||||
break
|
||||
if not evt_handled:
|
||||
self.dialect.do_executemany(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
|
||||
elif not parameters and context.no_parameters:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute_no_params:
|
||||
if fn(cursor, statement, context):
|
||||
break
|
||||
else:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_execute_no_params:
|
||||
if fn(cursor, statement, context):
|
||||
evt_handled = True
|
||||
break
|
||||
if not evt_handled:
|
||||
self.dialect.do_execute_no_params(
|
||||
cursor,
|
||||
statement,
|
||||
context)
|
||||
|
||||
else:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
break
|
||||
else:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_execute:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
evt_handled = True
|
||||
break
|
||||
if not evt_handled:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
|
|
@ -967,36 +1155,17 @@ class Connection(Connectable):
|
|||
if context.compiled:
|
||||
context.post_exec()
|
||||
|
||||
if context.isinsert and not context.executemany:
|
||||
context.post_insert()
|
||||
if context.is_crud or context.is_text:
|
||||
result = context._setup_crud_result_proxy()
|
||||
else:
|
||||
result = context.get_result_proxy()
|
||||
if result._metadata is None:
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
|
||||
# create a resultproxy, get rowcount/implicit RETURNING
|
||||
# rows, close cursor if no further results pending
|
||||
result = context.get_result_proxy()
|
||||
if context.isinsert:
|
||||
if context._is_implicit_returning:
|
||||
context._fetch_implicit_returning(result)
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif not context._is_explicit_returning:
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif context.isupdate and context._is_implicit_returning:
|
||||
context._fetch_implicit_update_returning(result)
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
if context.should_autocommit and self._root.__transaction is None:
|
||||
self._root._commit_impl(autocommit=True)
|
||||
|
||||
elif result._metadata is None:
|
||||
# no results, get rowcount
|
||||
# (which requires open cursor on some drivers
|
||||
# such as kintersbasdb, mxodbc),
|
||||
result.rowcount
|
||||
result.close(_autoclose_connection=False)
|
||||
|
||||
if self.__transaction is None and context.should_autocommit:
|
||||
self._commit_impl(autocommit=True)
|
||||
|
||||
if result.closed and self.should_close_with_result:
|
||||
if result._soft_closed and self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
return result
|
||||
|
|
@ -1055,8 +1224,6 @@ class Connection(Connectable):
|
|||
"""
|
||||
try:
|
||||
cursor.close()
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except Exception:
|
||||
# log the error through the connection pool's logger.
|
||||
self.engine.pool.logger.error(
|
||||
|
|
@ -1071,7 +1238,6 @@ class Connection(Connectable):
|
|||
parameters,
|
||||
cursor,
|
||||
context):
|
||||
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
if context and context.exception is None:
|
||||
|
|
@ -1081,16 +1247,22 @@ class Connection(Connectable):
|
|||
self._is_disconnect = \
|
||||
isinstance(e, self.dialect.dbapi.Error) and \
|
||||
not self.closed and \
|
||||
self.dialect.is_disconnect(e, self.__connection, cursor)
|
||||
self.dialect.is_disconnect(
|
||||
e,
|
||||
self.__connection if not self.invalidated else None,
|
||||
cursor)
|
||||
if context:
|
||||
context.is_disconnect = self._is_disconnect
|
||||
|
||||
invalidate_pool_on_disconnect = True
|
||||
|
||||
if self._reentrant_error:
|
||||
util.raise_from_cause(
|
||||
exc.DBAPIError.instance(statement,
|
||||
parameters,
|
||||
e,
|
||||
self.dialect.dbapi.Error),
|
||||
self.dialect.dbapi.Error,
|
||||
dialect=self.dialect),
|
||||
exc_info
|
||||
)
|
||||
self._reentrant_error = True
|
||||
|
|
@ -1106,13 +1278,16 @@ class Connection(Connectable):
|
|||
parameters,
|
||||
e,
|
||||
self.dialect.dbapi.Error,
|
||||
connection_invalidated=self._is_disconnect)
|
||||
connection_invalidated=self._is_disconnect,
|
||||
dialect=self.dialect)
|
||||
else:
|
||||
sqlalchemy_exception = None
|
||||
|
||||
newraise = None
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if (self._has_events or self.engine._has_events) and \
|
||||
not self._execution_options.get(
|
||||
'skip_user_error_events', False):
|
||||
# legacy dbapi_error event
|
||||
if should_wrap and context:
|
||||
self.dispatch.dbapi_error(self,
|
||||
|
|
@ -1124,7 +1299,8 @@ class Connection(Connectable):
|
|||
|
||||
# new handle_error event
|
||||
ctx = ExceptionContextImpl(
|
||||
e, sqlalchemy_exception, self, cursor, statement,
|
||||
e, sqlalchemy_exception, self.engine,
|
||||
self, cursor, statement,
|
||||
parameters, context, self._is_disconnect)
|
||||
|
||||
for fn in self.dispatch.handle_error:
|
||||
|
|
@ -1144,6 +1320,11 @@ class Connection(Connectable):
|
|||
sqlalchemy_exception.connection_invalidated = \
|
||||
self._is_disconnect = ctx.is_disconnect
|
||||
|
||||
# set up potentially user-defined value for
|
||||
# invalidate pool.
|
||||
invalidate_pool_on_disconnect = \
|
||||
ctx.invalidate_pool_on_disconnect
|
||||
|
||||
if should_wrap and context:
|
||||
context.handle_dbapi_exception(e)
|
||||
|
||||
|
|
@ -1166,12 +1347,66 @@ class Connection(Connectable):
|
|||
del self._reentrant_error
|
||||
if self._is_disconnect:
|
||||
del self._is_disconnect
|
||||
dbapi_conn_wrapper = self.connection
|
||||
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
|
||||
self.invalidate(e)
|
||||
if not self.invalidated:
|
||||
dbapi_conn_wrapper = self.__connection
|
||||
if invalidate_pool_on_disconnect:
|
||||
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
|
||||
self.invalidate(e)
|
||||
if self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
@classmethod
|
||||
def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
|
||||
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
is_disconnect = dialect.is_disconnect(e, None, None)
|
||||
|
||||
should_wrap = isinstance(e, dialect.dbapi.Error)
|
||||
|
||||
if should_wrap:
|
||||
sqlalchemy_exception = exc.DBAPIError.instance(
|
||||
None,
|
||||
None,
|
||||
e,
|
||||
dialect.dbapi.Error,
|
||||
connection_invalidated=is_disconnect)
|
||||
else:
|
||||
sqlalchemy_exception = None
|
||||
|
||||
newraise = None
|
||||
|
||||
if engine._has_events:
|
||||
ctx = ExceptionContextImpl(
|
||||
e, sqlalchemy_exception, engine, None, None, None,
|
||||
None, None, is_disconnect)
|
||||
for fn in engine.dispatch.handle_error:
|
||||
try:
|
||||
# handler returns an exception;
|
||||
# call next handler in a chain
|
||||
per_fn = fn(ctx)
|
||||
if per_fn is not None:
|
||||
ctx.chained_exception = newraise = per_fn
|
||||
except Exception as _raised:
|
||||
# handler raises an exception - stop processing
|
||||
newraise = _raised
|
||||
break
|
||||
|
||||
if sqlalchemy_exception and \
|
||||
is_disconnect != ctx.is_disconnect:
|
||||
sqlalchemy_exception.connection_invalidated = \
|
||||
is_disconnect = ctx.is_disconnect
|
||||
|
||||
if newraise:
|
||||
util.raise_from_cause(newraise, exc_info)
|
||||
elif should_wrap:
|
||||
util.raise_from_cause(
|
||||
sqlalchemy_exception,
|
||||
exc_info
|
||||
)
|
||||
else:
|
||||
util.reraise(*exc_info)
|
||||
|
||||
def default_schema_name(self):
|
||||
return self.engine.dialect.get_default_schema_name(self)
|
||||
|
||||
|
|
@ -1250,8 +1485,9 @@ class ExceptionContextImpl(ExceptionContext):
|
|||
"""Implement the :class:`.ExceptionContext` interface."""
|
||||
|
||||
def __init__(self, exception, sqlalchemy_exception,
|
||||
connection, cursor, statement, parameters,
|
||||
engine, connection, cursor, statement, parameters,
|
||||
context, is_disconnect):
|
||||
self.engine = engine
|
||||
self.connection = connection
|
||||
self.sqlalchemy_exception = sqlalchemy_exception
|
||||
self.original_exception = exception
|
||||
|
|
@ -1295,9 +1531,13 @@ class Transaction(object):
|
|||
|
||||
def __init__(self, connection, parent):
|
||||
self.connection = connection
|
||||
self._parent = parent or self
|
||||
self._actual_parent = parent
|
||||
self.is_active = True
|
||||
|
||||
@property
|
||||
def _parent(self):
|
||||
return self._actual_parent or self
|
||||
|
||||
def close(self):
|
||||
"""Close this :class:`.Transaction`.
|
||||
|
||||
|
|
@ -1575,29 +1815,28 @@ class Engine(Connectable, log.Identified):
|
|||
def dispose(self):
|
||||
"""Dispose of the connection pool used by this :class:`.Engine`.
|
||||
|
||||
This has the effect of fully closing all **currently checked in**
|
||||
database connections. Connections that are still checked out
|
||||
will **not** be closed, however they will no longer be associated
|
||||
with this :class:`.Engine`, so when they are closed individually,
|
||||
eventually the :class:`.Pool` which they are associated with will
|
||||
be garbage collected and they will be closed out fully, if
|
||||
not already closed on checkin.
|
||||
|
||||
A new connection pool is created immediately after the old one has
|
||||
been disposed. This new pool, like all SQLAlchemy connection pools,
|
||||
does not make any actual connections to the database until one is
|
||||
first requested.
|
||||
first requested, so as long as the :class:`.Engine` isn't used again,
|
||||
no new connections will be made.
|
||||
|
||||
This method has two general use cases:
|
||||
.. seealso::
|
||||
|
||||
* When a dropped connection is detected, it is assumed that all
|
||||
connections held by the pool are potentially dropped, and
|
||||
the entire pool is replaced.
|
||||
|
||||
* An application may want to use :meth:`dispose` within a test
|
||||
suite that is creating multiple engines.
|
||||
|
||||
It is critical to note that :meth:`dispose` does **not** guarantee
|
||||
that the application will release all open database connections - only
|
||||
those connections that are checked into the pool are closed.
|
||||
Connections which remain checked out or have been detached from
|
||||
the engine are not affected.
|
||||
:ref:`engine_disposal`
|
||||
|
||||
"""
|
||||
self.pool.dispose()
|
||||
self.pool = self.pool.recreate()
|
||||
self.dispatch.engine_disposed(self)
|
||||
|
||||
def _execute_default(self, default):
|
||||
with self.contextual_connect() as conn:
|
||||
|
|
@ -1795,10 +2034,11 @@ class Engine(Connectable, log.Identified):
|
|||
|
||||
"""
|
||||
|
||||
return self._connection_cls(self,
|
||||
self.pool.connect(),
|
||||
close_with_result=close_with_result,
|
||||
**kwargs)
|
||||
return self._connection_cls(
|
||||
self,
|
||||
self._wrap_pool_connect(self.pool.connect, None),
|
||||
close_with_result=close_with_result,
|
||||
**kwargs)
|
||||
|
||||
def table_names(self, schema=None, connection=None):
|
||||
"""Return a list of all table names available in the database.
|
||||
|
|
@ -1828,7 +2068,18 @@ class Engine(Connectable, log.Identified):
|
|||
"""
|
||||
return self.run_callable(self.dialect.has_table, table_name, schema)
|
||||
|
||||
def raw_connection(self):
|
||||
def _wrap_pool_connect(self, fn, connection):
|
||||
dialect = self.dialect
|
||||
try:
|
||||
return fn()
|
||||
except dialect.dbapi.Error as e:
|
||||
if connection is None:
|
||||
Connection._handle_dbapi_exception_noconnection(
|
||||
e, dialect, self)
|
||||
else:
|
||||
util.reraise(*sys.exc_info())
|
||||
|
||||
def raw_connection(self, _connection=None):
|
||||
"""Return a "raw" DBAPI connection from the connection pool.
|
||||
|
||||
The returned object is a proxied version of the DBAPI
|
||||
|
|
@ -1839,13 +2090,18 @@ class Engine(Connectable, log.Identified):
|
|||
for real.
|
||||
|
||||
This method provides direct DBAPI connection access for
|
||||
special situations. In most situations, the :class:`.Connection`
|
||||
object should be used, which is procured using the
|
||||
:meth:`.Engine.connect` method.
|
||||
special situations when the API provided by :class:`.Connection`
|
||||
is not needed. When a :class:`.Connection` object is already
|
||||
present, the DBAPI connection is available using
|
||||
the :attr:`.Connection.connection` accessor.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbapi_connections`
|
||||
|
||||
"""
|
||||
|
||||
return self.pool.unique_connection()
|
||||
return self._wrap_pool_connect(
|
||||
self.pool.unique_connection, _connection)
|
||||
|
||||
|
||||
class OptionEngine(Engine):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/default.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -61,14 +61,13 @@ class DefaultDialect(interfaces.Dialect):
|
|||
|
||||
engine_config_types = util.immutabledict([
|
||||
('convert_unicode', util.bool_or_str('force')),
|
||||
('pool_timeout', int),
|
||||
('pool_timeout', util.asint),
|
||||
('echo', util.bool_or_str('debug')),
|
||||
('echo_pool', util.bool_or_str('debug')),
|
||||
('pool_recycle', int),
|
||||
('pool_size', int),
|
||||
('max_overflow', int),
|
||||
('pool_threadlocal', bool),
|
||||
('use_native_unicode', bool),
|
||||
('pool_recycle', util.asint),
|
||||
('pool_size', util.asint),
|
||||
('max_overflow', util.asint),
|
||||
('pool_threadlocal', util.asbool),
|
||||
])
|
||||
|
||||
# if the NUMERIC type
|
||||
|
|
@ -157,6 +156,15 @@ class DefaultDialect(interfaces.Dialect):
|
|||
|
||||
reflection_options = ()
|
||||
|
||||
dbapi_exception_translation_map = util.immutabledict()
|
||||
"""mapping used in the extremely unusual case that a DBAPI's
|
||||
published exceptions don't actually have the __name__ that they
|
||||
are linked towards.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, convert_unicode=False,
|
||||
encoding='utf-8', paramstyle=None, dbapi=None,
|
||||
implicit_returning=None,
|
||||
|
|
@ -395,6 +403,12 @@ class DefaultDialect(interfaces.Dialect):
|
|||
self._set_connection_isolation(connection, opts['isolation_level'])
|
||||
|
||||
def _set_connection_isolation(self, connection, level):
|
||||
if connection.in_transaction():
|
||||
util.warn(
|
||||
"Connection is already established with a Transaction; "
|
||||
"setting isolation_level may implicitly rollback or commit "
|
||||
"the existing transaction, or have no effect until "
|
||||
"next transaction")
|
||||
self.set_isolation_level(connection.connection, level)
|
||||
connection.connection._connection_record.\
|
||||
finalize_callback.append(self.reset_isolation_level)
|
||||
|
|
@ -452,14 +466,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
isinsert = False
|
||||
isupdate = False
|
||||
isdelete = False
|
||||
is_crud = False
|
||||
is_text = False
|
||||
isddl = False
|
||||
executemany = False
|
||||
result_map = None
|
||||
compiled = None
|
||||
statement = None
|
||||
postfetch_cols = None
|
||||
prefetch_cols = None
|
||||
returning_cols = None
|
||||
result_column_struct = None
|
||||
_is_implicit_returning = False
|
||||
_is_explicit_returning = False
|
||||
|
||||
|
|
@ -472,10 +485,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a DDLElement construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
|
||||
self.compiled = compiled = compiled_ddl
|
||||
self.isddl = True
|
||||
|
|
@ -507,25 +519,20 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a Compiled construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
|
||||
self.compiled = compiled
|
||||
|
||||
if not compiled.can_execute:
|
||||
raise exc.ArgumentError("Not an executable clause")
|
||||
|
||||
self.execution_options = compiled.statement._execution_options
|
||||
if connection._execution_options:
|
||||
self.execution_options = dict(self.execution_options)
|
||||
self.execution_options.update(connection._execution_options)
|
||||
self.execution_options = compiled.statement._execution_options.union(
|
||||
connection._execution_options)
|
||||
|
||||
# compiled clauseelement. process bind params, process table defaults,
|
||||
# track collections used by ResultProxy to target and process results
|
||||
|
||||
self.result_map = compiled.result_map
|
||||
self.result_column_struct = (
|
||||
compiled._result_columns, compiled._ordered_columns)
|
||||
|
||||
self.unicode_statement = util.text_type(compiled)
|
||||
if not dialect.supports_unicode_statements:
|
||||
|
|
@ -537,11 +544,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.isinsert = compiled.isinsert
|
||||
self.isupdate = compiled.isupdate
|
||||
self.isdelete = compiled.isdelete
|
||||
|
||||
if self.isinsert or self.isupdate or self.isdelete:
|
||||
self._is_explicit_returning = bool(compiled.statement._returning)
|
||||
self._is_implicit_returning = bool(
|
||||
compiled.returning and not compiled.statement._returning)
|
||||
self.is_text = compiled.isplaintext
|
||||
|
||||
if not parameters:
|
||||
self.compiled_parameters = [compiled.construct_params()]
|
||||
|
|
@ -553,11 +556,19 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.executemany = len(parameters) > 1
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
if self.isinsert or self.isupdate:
|
||||
self.postfetch_cols = self.compiled.postfetch
|
||||
self.prefetch_cols = self.compiled.prefetch
|
||||
self.returning_cols = self.compiled.returning
|
||||
self.__process_defaults()
|
||||
|
||||
if self.isinsert or self.isupdate or self.isdelete:
|
||||
self.is_crud = True
|
||||
self._is_explicit_returning = bool(compiled.statement._returning)
|
||||
self._is_implicit_returning = bool(
|
||||
compiled.returning and not compiled.statement._returning)
|
||||
|
||||
if not self.isdelete:
|
||||
if self.compiled.prefetch:
|
||||
if self.executemany:
|
||||
self._process_executemany_defaults()
|
||||
else:
|
||||
self._process_executesingle_defaults()
|
||||
|
||||
processors = compiled._bind_processors
|
||||
|
||||
|
|
@ -577,21 +588,28 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
else:
|
||||
encode = not dialect.supports_unicode_statements
|
||||
for compiled_params in self.compiled_parameters:
|
||||
param = {}
|
||||
|
||||
if encode:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
processors[key](compiled_params[key])
|
||||
else:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
compiled_params[key]
|
||||
param = dict(
|
||||
(
|
||||
dialect._encoder(key)[0],
|
||||
processors[key](compiled_params[key])
|
||||
if key in processors
|
||||
else compiled_params[key]
|
||||
)
|
||||
for key in compiled_params
|
||||
)
|
||||
else:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[key] = processors[key](compiled_params[key])
|
||||
else:
|
||||
param[key] = compiled_params[key]
|
||||
param = dict(
|
||||
(
|
||||
key,
|
||||
processors[key](compiled_params[key])
|
||||
if key in processors
|
||||
else compiled_params[key]
|
||||
)
|
||||
for key in compiled_params
|
||||
)
|
||||
|
||||
parameters.append(param)
|
||||
self.parameters = dialect.execute_sequence_format(parameters)
|
||||
|
||||
|
|
@ -603,10 +621,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a string SQL statement."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
self.is_text = True
|
||||
|
||||
# plain text statement
|
||||
self.execution_options = connection._execution_options
|
||||
|
|
@ -647,21 +665,32 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a ColumnDefault construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
self.execution_options = connection._execution_options
|
||||
self.cursor = self.create_cursor()
|
||||
return self
|
||||
|
||||
@util.memoized_property
|
||||
def no_parameters(self):
|
||||
return self.execution_options.get("no_parameters", False)
|
||||
def engine(self):
|
||||
return self.root_connection.engine
|
||||
|
||||
@util.memoized_property
|
||||
def is_crud(self):
|
||||
return self.isinsert or self.isupdate or self.isdelete
|
||||
def postfetch_cols(self):
|
||||
return self.compiled.postfetch
|
||||
|
||||
@util.memoized_property
|
||||
def prefetch_cols(self):
|
||||
return self.compiled.prefetch
|
||||
|
||||
@util.memoized_property
|
||||
def returning_cols(self):
|
||||
self.compiled.returning
|
||||
|
||||
@util.memoized_property
|
||||
def no_parameters(self):
|
||||
return self.execution_options.get("no_parameters", False)
|
||||
|
||||
@util.memoized_property
|
||||
def should_autocommit(self):
|
||||
|
|
@ -778,16 +807,51 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
def supports_sane_multi_rowcount(self):
|
||||
return self.dialect.supports_sane_multi_rowcount
|
||||
|
||||
def post_insert(self):
|
||||
if not self._is_implicit_returning and \
|
||||
not self._is_explicit_returning and \
|
||||
not self.compiled.inline and \
|
||||
self.dialect.postfetch_lastrowid and \
|
||||
(not self.inserted_primary_key or
|
||||
None in self.inserted_primary_key):
|
||||
def _setup_crud_result_proxy(self):
|
||||
if self.isinsert and \
|
||||
not self.executemany:
|
||||
if not self._is_implicit_returning and \
|
||||
not self.compiled.inline and \
|
||||
self.dialect.postfetch_lastrowid:
|
||||
|
||||
table = self.compiled.statement.table
|
||||
lastrowid = self.get_lastrowid()
|
||||
self._setup_ins_pk_from_lastrowid()
|
||||
|
||||
elif not self._is_implicit_returning:
|
||||
self._setup_ins_pk_from_empty()
|
||||
|
||||
result = self.get_result_proxy()
|
||||
|
||||
if self.isinsert:
|
||||
if self._is_implicit_returning:
|
||||
row = result.fetchone()
|
||||
self.returned_defaults = row
|
||||
self._setup_ins_pk_from_implicit_returning(row)
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif not self._is_explicit_returning:
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif self.isupdate and self._is_implicit_returning:
|
||||
row = result.fetchone()
|
||||
self.returned_defaults = row
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
|
||||
elif result._metadata is None:
|
||||
# no results, get rowcount
|
||||
# (which requires open cursor on some drivers
|
||||
# such as kintersbasdb, mxodbc)
|
||||
result.rowcount
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
return result
|
||||
|
||||
def _setup_ins_pk_from_lastrowid(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
|
||||
lastrowid = self.get_lastrowid()
|
||||
if lastrowid is not None:
|
||||
autoinc_col = table._autoincrement_column
|
||||
if autoinc_col is not None:
|
||||
# apply type post processors to the lastrowid
|
||||
|
|
@ -795,35 +859,44 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.dialect, None)
|
||||
if proc is not None:
|
||||
lastrowid = proc(lastrowid)
|
||||
|
||||
self.inserted_primary_key = [
|
||||
lastrowid if c is autoinc_col else v
|
||||
for c, v in zip(
|
||||
table.primary_key,
|
||||
self.inserted_primary_key)
|
||||
lastrowid if c is autoinc_col else
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
else:
|
||||
# don't have a usable lastrowid, so
|
||||
# do the same as _setup_ins_pk_from_empty
|
||||
self.inserted_primary_key = [
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
|
||||
def _fetch_implicit_returning(self, resultproxy):
|
||||
def _setup_ins_pk_from_empty(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
row = resultproxy.fetchone()
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
self.inserted_primary_key = [
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
|
||||
ipk = []
|
||||
for c, v in zip(table.primary_key, self.inserted_primary_key):
|
||||
if v is not None:
|
||||
ipk.append(v)
|
||||
else:
|
||||
ipk.append(row[c])
|
||||
def _setup_ins_pk_from_implicit_returning(self, row):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
|
||||
self.inserted_primary_key = ipk
|
||||
self.returned_defaults = row
|
||||
|
||||
def _fetch_implicit_update_returning(self, resultproxy):
|
||||
row = resultproxy.fetchone()
|
||||
self.returned_defaults = row
|
||||
self.inserted_primary_key = [
|
||||
row[col] if value is None else value
|
||||
for col, value in [
|
||||
(col, compiled_params.get(key_getter(col), None))
|
||||
for col in table.primary_key
|
||||
]
|
||||
]
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
return (self.isinsert or self.isupdate) and \
|
||||
bool(self.postfetch_cols)
|
||||
bool(self.compiled.postfetch)
|
||||
|
||||
def set_input_sizes(self, translate=None, exclude_types=None):
|
||||
"""Given a cursor and ClauseParameters, call the appropriate
|
||||
|
|
@ -901,58 +974,53 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
else:
|
||||
return self._exec_default(column.onupdate, column.type)
|
||||
|
||||
def __process_defaults(self):
|
||||
"""Generate default values for compiled insert/update statements,
|
||||
and generate inserted_primary_key collection.
|
||||
"""
|
||||
|
||||
def _process_executemany_defaults(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
|
||||
if self.executemany:
|
||||
if len(self.compiled.prefetch):
|
||||
scalar_defaults = {}
|
||||
prefetch = self.compiled.prefetch
|
||||
scalar_defaults = {}
|
||||
|
||||
# pre-determine scalar Python-side defaults
|
||||
# to avoid many calls of get_insert_default()/
|
||||
# get_update_default()
|
||||
for c in self.prefetch_cols:
|
||||
if self.isinsert and c.default and c.default.is_scalar:
|
||||
scalar_defaults[c] = c.default.arg
|
||||
elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
|
||||
scalar_defaults[c] = c.onupdate.arg
|
||||
# pre-determine scalar Python-side defaults
|
||||
# to avoid many calls of get_insert_default()/
|
||||
# get_update_default()
|
||||
for c in prefetch:
|
||||
if self.isinsert and c.default and c.default.is_scalar:
|
||||
scalar_defaults[c] = c.default.arg
|
||||
elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
|
||||
scalar_defaults[c] = c.onupdate.arg
|
||||
|
||||
for param in self.compiled_parameters:
|
||||
self.current_parameters = param
|
||||
for c in self.prefetch_cols:
|
||||
if c in scalar_defaults:
|
||||
val = scalar_defaults[c]
|
||||
elif self.isinsert:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
if val is not None:
|
||||
param[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
else:
|
||||
self.current_parameters = compiled_parameters = \
|
||||
self.compiled_parameters[0]
|
||||
|
||||
for c in self.compiled.prefetch:
|
||||
if self.isinsert:
|
||||
for param in self.compiled_parameters:
|
||||
self.current_parameters = param
|
||||
for c in prefetch:
|
||||
if c in scalar_defaults:
|
||||
val = scalar_defaults[c]
|
||||
elif self.isinsert:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
|
||||
if val is not None:
|
||||
compiled_parameters[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
param[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
|
||||
def _process_executesingle_defaults(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
prefetch = self.compiled.prefetch
|
||||
self.current_parameters = compiled_parameters = \
|
||||
self.compiled_parameters[0]
|
||||
|
||||
for c in prefetch:
|
||||
if self.isinsert:
|
||||
self.inserted_primary_key = [
|
||||
self.compiled_parameters[0].get(key_getter(c), None)
|
||||
for c in self.compiled.
|
||||
statement.table.primary_key
|
||||
]
|
||||
if c.default and \
|
||||
not c.default.is_sequence and c.default.is_scalar:
|
||||
val = c.default.arg
|
||||
else:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
|
||||
if val is not None:
|
||||
compiled_parameters[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
|
||||
|
||||
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -150,6 +150,16 @@ class Dialect(object):
|
|||
This will prevent types.Boolean from generating a CHECK
|
||||
constraint when that type is used.
|
||||
|
||||
dbapi_exception_translation_map
|
||||
A dictionary of names that will contain as values the names of
|
||||
pep-249 exceptions ("IntegrityError", "OperationalError", etc)
|
||||
keyed to alternate class names, to support the case where a
|
||||
DBAPI has exception classes that aren't named as they are
|
||||
referred to (e.g. IntegrityError = MyException). In the vast
|
||||
majority of cases this dictionary is empty.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
|
||||
_has_events = False
|
||||
|
|
@ -242,7 +252,9 @@ class Dialect(object):
|
|||
|
||||
sequence
|
||||
a dictionary of the form
|
||||
{'name' : str, 'start' :int, 'increment': int}
|
||||
{'name' : str, 'start' :int, 'increment': int, 'minvalue': int,
|
||||
'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool,
|
||||
'cycle': bool}
|
||||
|
||||
Additional column attributes may be present.
|
||||
"""
|
||||
|
|
@ -308,7 +320,15 @@ class Dialect(object):
|
|||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of table names for `schema`."""
|
||||
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_temp_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of temporary table names on the given connection,
|
||||
if supported by the underlying backend.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of all view names available in the database.
|
||||
|
|
@ -319,6 +339,14 @@ class Dialect(object):
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_temp_view_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of temporary view names on the given connection,
|
||||
if supported by the underlying backend.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
"""Return view definition.
|
||||
|
||||
|
|
@ -638,20 +666,120 @@ class Dialect(object):
|
|||
return None
|
||||
|
||||
def reset_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, revert its isolation to the default."""
|
||||
"""Given a DBAPI connection, revert its isolation to the default.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine`
|
||||
isolation level facilities; these APIs should be preferred for
|
||||
most typical use cases.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_isolation_level(self, dbapi_conn, level):
|
||||
"""Given a DBAPI connection, set its isolation level."""
|
||||
"""Given a DBAPI connection, set its isolation level.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine`
|
||||
isolation level facilities; these APIs should be preferred for
|
||||
most typical use cases.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, return its isolation level."""
|
||||
"""Given a DBAPI connection, return its isolation level.
|
||||
|
||||
When working with a :class:`.Connection` object, the corresponding
|
||||
DBAPI connection may be procured using the
|
||||
:attr:`.Connection.connection` accessor.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine` isolation level facilities;
|
||||
these APIs should be preferred for most typical use cases.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def get_dialect_cls(cls, url):
|
||||
"""Given a URL, return the :class:`.Dialect` that will be used.
|
||||
|
||||
This is a hook that allows an external plugin to provide functionality
|
||||
around an existing dialect, by allowing the plugin to be loaded
|
||||
from the url based on an entrypoint, and then the plugin returns
|
||||
the actual dialect to be used.
|
||||
|
||||
By default this just returns the cls.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def engine_created(cls, engine):
|
||||
"""A convenience hook called before returning the final :class:`.Engine`.
|
||||
|
||||
If the dialect returned a different class from the
|
||||
:meth:`.get_dialect_cls`
|
||||
method, then the hook is called on both classes, first on
|
||||
the dialect class returned by the :meth:`.get_dialect_cls` method and
|
||||
then on the class on which the method was called.
|
||||
|
||||
The hook should be used by dialects and/or wrappers to apply special
|
||||
events to the engine or its components. In particular, it allows
|
||||
a dialect-wrapping class to apply dialect-level events.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ExecutionContext(object):
|
||||
"""A messenger object for a Dialect that corresponds to a single
|
||||
|
|
@ -901,7 +1029,23 @@ class ExceptionContext(object):
|
|||
connection = None
|
||||
"""The :class:`.Connection` in use during the exception.
|
||||
|
||||
This member is always present.
|
||||
This member is present, except in the case of a failure when
|
||||
first connecting.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.ExceptionContext.engine`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
engine = None
|
||||
"""The :class:`.Engine` in use during the exception.
|
||||
|
||||
This member should always be present, even in the case of a failure
|
||||
when first connecting.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -988,3 +1132,21 @@ class ExceptionContext(object):
|
|||
changing this flag.
|
||||
|
||||
"""
|
||||
|
||||
invalidate_pool_on_disconnect = True
|
||||
"""Represent whether all connections in the pool should be invalidated
|
||||
when a "disconnect" condition is in effect.
|
||||
|
||||
Setting this flag to False within the scope of the
|
||||
:meth:`.ConnectionEvents.handle_error` event will have the effect such
|
||||
that the full collection of connections in the pool will not be
|
||||
invalidated during a disconnect; only the current connection that is the
|
||||
subject of the error will actually be invalidated.
|
||||
|
||||
The purpose of this flag is for custom disconnect-handling schemes where
|
||||
the invalidation of other connections in the pool is to be performed
|
||||
based on other conditions, or even on a per-connection basis.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/reflection.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -173,7 +173,14 @@ class Inspector(object):
|
|||
passed as ``None``. For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies.
|
||||
the result on foreign key dependencies. Does not automatically
|
||||
resolve cycles, and will raise :class:`.CircularDependencyError`
|
||||
if cycles exist.
|
||||
|
||||
.. deprecated:: 1.0.0 - see
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
|
||||
of this which resolves foreign key cycles between tables
|
||||
automatically.
|
||||
|
||||
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
|
||||
in order of dependee to dependent; that is, in creation
|
||||
|
|
@ -183,6 +190,8 @@ class Inspector(object):
|
|||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names`
|
||||
|
||||
:attr:`.MetaData.sorted_tables`
|
||||
|
||||
"""
|
||||
|
|
@ -201,6 +210,88 @@ class Inspector(object):
|
|||
tnames = list(topological.sort(tuples, tnames))
|
||||
return tnames
|
||||
|
||||
def get_sorted_table_and_fkc_names(self, schema=None):
|
||||
"""Return dependency-sorted table and foreign key constraint names in
|
||||
referred to within a particular schema.
|
||||
|
||||
This will yield 2-tuples of
|
||||
``(tablename, [(tname, fkname), (tname, fkname), ...])``
|
||||
consisting of table names in CREATE order grouped with the foreign key
|
||||
constraint names that are not detected as belonging to a cycle.
|
||||
The final element
|
||||
will be ``(None, [(tname, fkname), (tname, fkname), ..])``
|
||||
which will consist of remaining
|
||||
foreign key constraint names that would require a separate CREATE
|
||||
step after-the-fact, based on dependencies between tables.
|
||||
|
||||
.. versionadded:: 1.0.-
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_table_names`
|
||||
|
||||
:func:`.sort_tables_and_constraints` - similar method which works
|
||||
with an already-given :class:`.MetaData`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
|
||||
tuples = set()
|
||||
remaining_fkcs = set()
|
||||
|
||||
fknames_for_table = {}
|
||||
for tname in tnames:
|
||||
fkeys = self.get_foreign_keys(tname, schema)
|
||||
fknames_for_table[tname] = set(
|
||||
[fk['name'] for fk in fkeys]
|
||||
)
|
||||
for fkey in fkeys:
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.add((fkey['referred_table'], tname))
|
||||
try:
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
except exc.CircularDependencyError as err:
|
||||
for edge in err.edges:
|
||||
tuples.remove(edge)
|
||||
remaining_fkcs.update(
|
||||
(edge[1], fkc)
|
||||
for fkc in fknames_for_table[edge[1]]
|
||||
)
|
||||
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
return [
|
||||
(tname, fknames_for_table[tname].difference(remaining_fkcs))
|
||||
for tname in candidate_sort
|
||||
] + [(None, list(remaining_fkcs))]
|
||||
|
||||
def get_temp_table_names(self):
|
||||
"""return a list of temporary table names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_table_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_temp_view_names(self):
|
||||
"""return a list of temporary view names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_view_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_table_options(self, table_name, schema=None, **kw):
|
||||
"""Return a dictionary of options specified when the table of the
|
||||
given name was created.
|
||||
|
|
@ -370,6 +461,12 @@ class Inspector(object):
|
|||
unique
|
||||
boolean
|
||||
|
||||
dialect_options
|
||||
dict of dialect-specific index options. May not be present
|
||||
for all dialects.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
|
|
@ -465,55 +562,87 @@ class Inspector(object):
|
|||
for col_d in self.get_columns(
|
||||
table_name, schema, **table.dialect_kwargs):
|
||||
found_table = True
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
name = col_d['name']
|
||||
if include_columns and name not in include_columns:
|
||||
continue
|
||||
if exclude_columns and name in exclude_columns:
|
||||
continue
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
col_kw = dict(
|
||||
(k, col_d[k])
|
||||
for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
|
||||
if k in col_d
|
||||
)
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
# the "default" value is assumed to be a literal SQL
|
||||
# expression, so is wrapped in text() so that no quoting
|
||||
# occurs on re-issuance.
|
||||
colargs.append(
|
||||
sa_schema.DefaultClause(
|
||||
sql.text(col_d['default']), _reflected=True
|
||||
)
|
||||
)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
sequence.start = seq['start']
|
||||
if 'increment' in seq:
|
||||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
self._reflect_column(
|
||||
table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
self._reflect_pk(
|
||||
table_name, schema, table, cols_by_orig_name, exclude_columns)
|
||||
|
||||
self._reflect_fk(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_indexes(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_unique_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
def _reflect_column(
|
||||
self, table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name):
|
||||
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
# fetch name again as column_reflect is allowed to
|
||||
# change it
|
||||
name = col_d['name']
|
||||
if (include_columns and name not in include_columns) \
|
||||
or (exclude_columns and name in exclude_columns):
|
||||
return
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
col_kw = dict(
|
||||
(k, col_d[k])
|
||||
for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
|
||||
if k in col_d
|
||||
)
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
# the "default" value is assumed to be a literal SQL
|
||||
# expression, so is wrapped in text() so that no quoting
|
||||
# occurs on re-issuance.
|
||||
colargs.append(
|
||||
sa_schema.DefaultClause(
|
||||
sql.text(col_d['default']), _reflected=True
|
||||
)
|
||||
)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
self._reflect_col_sequence(col_d, colargs)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
def _reflect_col_sequence(self, col_d, colargs):
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
sequence.start = seq['start']
|
||||
if 'increment' in seq:
|
||||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
def _reflect_pk(
|
||||
self, table_name, schema, table,
|
||||
cols_by_orig_name, exclude_columns):
|
||||
pk_cons = self.get_pk_constraint(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if pk_cons:
|
||||
|
|
@ -530,6 +659,9 @@ class Inspector(object):
|
|||
# its column collection
|
||||
table.primary_key._reload(pk_cols)
|
||||
|
||||
def _reflect_fk(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, reflection_options):
|
||||
fkeys = self.get_foreign_keys(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
for fkey_d in fkeys:
|
||||
|
|
@ -572,24 +704,85 @@ class Inspector(object):
|
|||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True,
|
||||
**options))
|
||||
|
||||
def _reflect_indexes(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
name = index_d['name']
|
||||
columns = index_d['column_names']
|
||||
unique = index_d['unique']
|
||||
flavor = index_d.get('type', 'unknown type')
|
||||
flavor = index_d.get('type', 'index')
|
||||
dialect_options = index_d.get('dialect_options', {})
|
||||
|
||||
duplicates = index_d.get('duplicates_constraint')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting %s KEY for (%s), key covers omitted columns." %
|
||||
"Omitting %s key for (%s), key covers omitted columns." %
|
||||
(flavor, ', '.join(columns)))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
sa_schema.Index(name, *[
|
||||
cols_by_orig_name[c] if c in cols_by_orig_name
|
||||
else table.c[c]
|
||||
for c in columns
|
||||
],
|
||||
**dict(unique=unique))
|
||||
idx_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
idx_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"%s key '%s' was not located in "
|
||||
"columns for table '%s'" % (
|
||||
flavor, c, table_name
|
||||
))
|
||||
else:
|
||||
idx_cols.append(idx_col)
|
||||
|
||||
sa_schema.Index(
|
||||
name, *idx_cols,
|
||||
**dict(list(dialect_options.items()) + [('unique', unique)])
|
||||
)
|
||||
|
||||
def _reflect_unique_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
|
||||
# Unique Constraints
|
||||
try:
|
||||
constraints = self.get_unique_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
conname = const_d['name']
|
||||
columns = const_d['column_names']
|
||||
duplicates = const_d.get('duplicates_index')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting unique constraint key for (%s), "
|
||||
"key covers omitted columns." %
|
||||
', '.join(columns))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
constrained_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"unique constraint key '%s' was not located in "
|
||||
"columns for table '%s'" % (c, table_name))
|
||||
else:
|
||||
constrained_cols.append(constrained_col)
|
||||
table.append_constraint(
|
||||
sa_schema.UniqueConstraint(*constrained_cols, name=conname))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/result.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -110,7 +110,7 @@ class RowProxy(BaseRowProxy):
|
|||
__slots__ = ()
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._parent._has_key(self._row, key)
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
|
|
@ -155,7 +155,7 @@ class RowProxy(BaseRowProxy):
|
|||
def has_key(self, key):
|
||||
"""Return True if this RowProxy contains the given key."""
|
||||
|
||||
return self._parent._has_key(self._row, key)
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def items(self):
|
||||
"""Return a list of tuples, each tuple containing a key/value pair."""
|
||||
|
|
@ -187,90 +187,165 @@ class ResultMetaData(object):
|
|||
context."""
|
||||
|
||||
def __init__(self, parent, metadata):
|
||||
self._processors = processors = []
|
||||
|
||||
# We do not strictly need to store the processor in the key mapping,
|
||||
# though it is faster in the Python version (probably because of the
|
||||
# saved attribute lookup self._processors)
|
||||
self._keymap = keymap = {}
|
||||
self.keys = []
|
||||
context = parent.context
|
||||
dialect = context.dialect
|
||||
typemap = dialect.dbapi_type_map
|
||||
translate_colname = context._translate_colname
|
||||
self.case_sensitive = dialect.case_sensitive
|
||||
self.case_sensitive = case_sensitive = dialect.case_sensitive
|
||||
|
||||
# high precedence key values.
|
||||
primary_keymap = {}
|
||||
if context.result_column_struct:
|
||||
result_columns, cols_are_ordered = context.result_column_struct
|
||||
num_ctx_cols = len(result_columns)
|
||||
else:
|
||||
num_ctx_cols = None
|
||||
|
||||
for i, rec in enumerate(metadata):
|
||||
colname = rec[0]
|
||||
coltype = rec[1]
|
||||
if num_ctx_cols and \
|
||||
cols_are_ordered and \
|
||||
num_ctx_cols == len(metadata):
|
||||
# case 1 - SQL expression statement, number of columns
|
||||
# in result matches number of cols in compiled. This is the
|
||||
# vast majority case for SQL expression constructs. In this
|
||||
# case we don't bother trying to parse or match up to
|
||||
# the colnames in the result description.
|
||||
raw = [
|
||||
(
|
||||
idx,
|
||||
key,
|
||||
name.lower() if not case_sensitive else name,
|
||||
context.get_result_processor(
|
||||
type_, key, metadata[idx][1]
|
||||
),
|
||||
obj,
|
||||
None
|
||||
) for idx, (key, name, obj, type_)
|
||||
in enumerate(result_columns)
|
||||
]
|
||||
self.keys = [
|
||||
elem[0] for elem in result_columns
|
||||
]
|
||||
else:
|
||||
# case 2 - raw string, or number of columns in result does
|
||||
# not match number of cols in compiled. The raw string case
|
||||
# is very common. The latter can happen
|
||||
# when text() is used with only a partial typemap, or
|
||||
# in the extremely unlikely cases where the compiled construct
|
||||
# has a single element with multiple col expressions in it
|
||||
# (e.g. has commas embedded) or there's some kind of statement
|
||||
# that is adding extra columns.
|
||||
# In all these cases we fall back to the "named" approach
|
||||
# that SQLAlchemy has used up through 0.9.
|
||||
|
||||
if dialect.description_encoding:
|
||||
colname = dialect._description_decoder(colname)
|
||||
if num_ctx_cols:
|
||||
result_map = self._create_result_map(
|
||||
result_columns, case_sensitive)
|
||||
|
||||
raw = []
|
||||
self.keys = []
|
||||
untranslated = None
|
||||
for idx, rec in enumerate(metadata):
|
||||
colname = rec[0]
|
||||
coltype = rec[1]
|
||||
|
||||
if dialect.description_encoding:
|
||||
colname = dialect._description_decoder(colname)
|
||||
|
||||
if translate_colname:
|
||||
colname, untranslated = translate_colname(colname)
|
||||
|
||||
if dialect.requires_name_normalize:
|
||||
colname = dialect.normalize_name(colname)
|
||||
|
||||
self.keys.append(colname)
|
||||
if not case_sensitive:
|
||||
colname = colname.lower()
|
||||
|
||||
if num_ctx_cols:
|
||||
try:
|
||||
ctx_rec = result_map[colname]
|
||||
except KeyError:
|
||||
mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
obj = None
|
||||
else:
|
||||
obj = ctx_rec[1]
|
||||
mapped_type = ctx_rec[2]
|
||||
else:
|
||||
mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
obj = None
|
||||
processor = context.get_result_processor(
|
||||
mapped_type, colname, coltype)
|
||||
|
||||
raw.append(
|
||||
(idx, colname, colname, processor, obj, untranslated)
|
||||
)
|
||||
|
||||
# keymap indexes by integer index...
|
||||
self._keymap = dict([
|
||||
(elem[0], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
|
||||
# processors in key order for certain per-row
|
||||
# views like __iter__ and slices
|
||||
self._processors = [elem[3] for elem in raw]
|
||||
|
||||
if num_ctx_cols:
|
||||
# keymap by primary string...
|
||||
by_key = dict([
|
||||
(elem[2], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
|
||||
# if by-primary-string dictionary smaller (or bigger?!) than
|
||||
# number of columns, assume we have dupes, rewrite
|
||||
# dupe records with "None" for index which results in
|
||||
# ambiguous column exception when accessed.
|
||||
if len(by_key) != num_ctx_cols:
|
||||
seen = set()
|
||||
for rec in raw:
|
||||
key = rec[1]
|
||||
if key in seen:
|
||||
by_key[key] = (None, by_key[key][1], None)
|
||||
seen.add(key)
|
||||
|
||||
# update keymap with secondary "object"-based keys
|
||||
self._keymap.update([
|
||||
(obj_elem, by_key[elem[2]])
|
||||
for elem in raw if elem[4]
|
||||
for obj_elem in elem[4]
|
||||
])
|
||||
|
||||
# update keymap with primary string names taking
|
||||
# precedence
|
||||
self._keymap.update(by_key)
|
||||
else:
|
||||
self._keymap.update([
|
||||
(elem[2], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
# update keymap with "translated" names (sqlite-only thing)
|
||||
if translate_colname:
|
||||
colname, untranslated = translate_colname(colname)
|
||||
self._keymap.update([
|
||||
(elem[5], self._keymap[elem[2]])
|
||||
for elem in raw if elem[5]
|
||||
])
|
||||
|
||||
if dialect.requires_name_normalize:
|
||||
colname = dialect.normalize_name(colname)
|
||||
|
||||
if context.result_map:
|
||||
try:
|
||||
name, obj, type_ = context.result_map[
|
||||
colname if self.case_sensitive else colname.lower()]
|
||||
except KeyError:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
@classmethod
|
||||
def _create_result_map(cls, result_columns, case_sensitive=True):
|
||||
d = {}
|
||||
for elem in result_columns:
|
||||
key, rec = elem[0], elem[1:]
|
||||
if not case_sensitive:
|
||||
key = key.lower()
|
||||
if key in d:
|
||||
# conflicting keyname, just double up the list
|
||||
# of objects. this will cause an "ambiguous name"
|
||||
# error if an attempt is made by the result set to
|
||||
# access.
|
||||
e_name, e_obj, e_type = d[key]
|
||||
d[key] = e_name, e_obj + rec[1], e_type
|
||||
else:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
|
||||
processor = context.get_result_processor(type_, colname, coltype)
|
||||
|
||||
processors.append(processor)
|
||||
rec = (processor, obj, i)
|
||||
|
||||
# indexes as keys. This is only needed for the Python version of
|
||||
# RowProxy (the C version uses a faster path for integer indexes).
|
||||
primary_keymap[i] = rec
|
||||
|
||||
# populate primary keymap, looking for conflicts.
|
||||
if primary_keymap.setdefault(
|
||||
name if self.case_sensitive
|
||||
else name.lower(),
|
||||
rec) is not rec:
|
||||
# place a record that doesn't have the "index" - this
|
||||
# is interpreted later as an AmbiguousColumnError,
|
||||
# but only when actually accessed. Columns
|
||||
# colliding by name is not a problem if those names
|
||||
# aren't used; integer access is always
|
||||
# unambiguous.
|
||||
primary_keymap[name
|
||||
if self.case_sensitive
|
||||
else name.lower()] = rec = (None, obj, None)
|
||||
|
||||
self.keys.append(colname)
|
||||
if obj:
|
||||
for o in obj:
|
||||
keymap[o] = rec
|
||||
# technically we should be doing this but we
|
||||
# are saving on callcounts by not doing so.
|
||||
# if keymap.setdefault(o, rec) is not rec:
|
||||
# keymap[o] = (None, obj, None)
|
||||
|
||||
if translate_colname and \
|
||||
untranslated:
|
||||
keymap[untranslated] = rec
|
||||
|
||||
# overwrite keymap values with those of the
|
||||
# high precedence keymap.
|
||||
keymap.update(primary_keymap)
|
||||
|
||||
if parent._echo:
|
||||
context.engine.logger.debug(
|
||||
"Col %r", tuple(x[0] for x in metadata))
|
||||
d[key] = rec
|
||||
return d
|
||||
|
||||
@util.pending_deprecation("0.8", "sqlite dialect uses "
|
||||
"_translate_colname() now")
|
||||
|
|
@ -335,12 +410,28 @@ class ResultMetaData(object):
|
|||
map[key] = result
|
||||
return result
|
||||
|
||||
def _has_key(self, row, key):
|
||||
def _has_key(self, key):
|
||||
if key in self._keymap:
|
||||
return True
|
||||
else:
|
||||
return self._key_fallback(key, False) is not None
|
||||
|
||||
def _getter(self, key):
|
||||
if key in self._keymap:
|
||||
processor, obj, index = self._keymap[key]
|
||||
else:
|
||||
ret = self._key_fallback(key, False)
|
||||
if ret is None:
|
||||
return None
|
||||
processor, obj, index = ret
|
||||
|
||||
if index is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Ambiguous column name '%s' in result set! "
|
||||
"try 'use_labels' option on select statement." % key)
|
||||
|
||||
return operator.itemgetter(index)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'_pickled_keymap': dict(
|
||||
|
|
@ -391,21 +482,49 @@ class ResultProxy(object):
|
|||
out_parameters = None
|
||||
_can_close_connection = False
|
||||
_metadata = None
|
||||
_soft_closed = False
|
||||
closed = False
|
||||
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
self.dialect = context.dialect
|
||||
self.closed = False
|
||||
self.cursor = self._saved_cursor = context.cursor
|
||||
self.connection = context.root_connection
|
||||
self._echo = self.connection._echo and \
|
||||
context.engine._should_log_debug()
|
||||
self._init_metadata()
|
||||
|
||||
def _getter(self, key):
|
||||
try:
|
||||
getter = self._metadata._getter
|
||||
except AttributeError:
|
||||
return self._non_result(None)
|
||||
else:
|
||||
return getter(key)
|
||||
|
||||
def _has_key(self, key):
|
||||
try:
|
||||
has_key = self._metadata._has_key
|
||||
except AttributeError:
|
||||
return self._non_result(None)
|
||||
else:
|
||||
return has_key(key)
|
||||
|
||||
def _init_metadata(self):
|
||||
metadata = self._cursor_description()
|
||||
if metadata is not None:
|
||||
self._metadata = ResultMetaData(self, metadata)
|
||||
if self.context.compiled and \
|
||||
'compiled_cache' in self.context.execution_options:
|
||||
if self.context.compiled._cached_metadata:
|
||||
self._metadata = self.context.compiled._cached_metadata
|
||||
else:
|
||||
self._metadata = self.context.compiled._cached_metadata = \
|
||||
ResultMetaData(self, metadata)
|
||||
else:
|
||||
self._metadata = ResultMetaData(self, metadata)
|
||||
if self._echo:
|
||||
self.context.engine.logger.debug(
|
||||
"Col %r", tuple(x[0] for x in metadata))
|
||||
|
||||
def keys(self):
|
||||
"""Return the current set of string keys for rows."""
|
||||
|
|
@ -515,39 +634,85 @@ class ResultProxy(object):
|
|||
|
||||
return self._saved_cursor.description
|
||||
|
||||
def close(self, _autoclose_connection=True):
|
||||
"""Close this ResultProxy.
|
||||
def _soft_close(self, _autoclose_connection=True):
|
||||
"""Soft close this :class:`.ResultProxy`.
|
||||
|
||||
Closes the underlying DBAPI cursor corresponding to the execution.
|
||||
|
||||
Note that any data cached within this ResultProxy is still available.
|
||||
For some types of results, this may include buffered rows.
|
||||
|
||||
If this ResultProxy was generated from an implicit execution,
|
||||
the underlying Connection will also be closed (returns the
|
||||
underlying DBAPI connection to the connection pool.)
|
||||
This releases all DBAPI cursor resources, but leaves the
|
||||
ResultProxy "open" from a semantic perspective, meaning the
|
||||
fetchXXX() methods will continue to return empty results.
|
||||
|
||||
This method is called automatically when:
|
||||
|
||||
* all result rows are exhausted using the fetchXXX() methods.
|
||||
* cursor.description is None.
|
||||
|
||||
This method is **not public**, but is documented in order to clarify
|
||||
the "autoclose" process used.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ResultProxy.close`
|
||||
|
||||
|
||||
"""
|
||||
if self._soft_closed:
|
||||
return
|
||||
self._soft_closed = True
|
||||
cursor = self.cursor
|
||||
self.connection._safe_close_cursor(cursor)
|
||||
if _autoclose_connection and \
|
||||
self.connection.should_close_with_result:
|
||||
self.connection.close()
|
||||
self.cursor = None
|
||||
|
||||
def close(self):
|
||||
"""Close this ResultProxy.
|
||||
|
||||
This closes out the underlying DBAPI cursor corresonding
|
||||
to the statement execution, if one is stil present. Note that the
|
||||
DBAPI cursor is automatically released when the :class:`.ResultProxy`
|
||||
exhausts all available rows. :meth:`.ResultProxy.close` is generally
|
||||
an optional method except in the case when discarding a
|
||||
:class:`.ResultProxy` that still has additional rows pending for fetch.
|
||||
|
||||
In the case of a result that is the product of
|
||||
:ref:`connectionless execution <dbengine_implicit>`,
|
||||
the underyling :class:`.Connection` object is also closed, which
|
||||
:term:`releases` DBAPI connection resources.
|
||||
|
||||
After this method is called, it is no longer valid to call upon
|
||||
the fetch methods, which will raise a :class:`.ResourceClosedError`
|
||||
on subsequent use.
|
||||
|
||||
.. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
|
||||
has been separated out from the process that releases the underlying
|
||||
DBAPI cursor resource. The "auto close" feature of the
|
||||
:class:`.Connection` now performs a so-called "soft close", which
|
||||
releases the underlying DBAPI cursor, but allows the
|
||||
:class:`.ResultProxy` to still behave as an open-but-exhausted
|
||||
result set; the actual :meth:`.ResultProxy.close` method is never
|
||||
called. It is still safe to discard a :class:`.ResultProxy`
|
||||
that has been fully exhausted without calling this method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:meth:`.ResultProxy._soft_close`
|
||||
|
||||
"""
|
||||
|
||||
if not self.closed:
|
||||
self._soft_close()
|
||||
self.closed = True
|
||||
self.connection._safe_close_cursor(self.cursor)
|
||||
if _autoclose_connection and \
|
||||
self.connection.should_close_with_result:
|
||||
self.connection.close()
|
||||
# allow consistent errors
|
||||
self.cursor = None
|
||||
|
||||
def __iter__(self):
|
||||
while True:
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
raise StopIteration
|
||||
return
|
||||
else:
|
||||
yield row
|
||||
|
||||
|
|
@ -732,7 +897,7 @@ class ResultProxy(object):
|
|||
try:
|
||||
return self.cursor.fetchone()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result(None)
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
try:
|
||||
|
|
@ -741,22 +906,24 @@ class ResultProxy(object):
|
|||
else:
|
||||
return self.cursor.fetchmany(size)
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result([])
|
||||
|
||||
def _fetchall_impl(self):
|
||||
try:
|
||||
return self.cursor.fetchall()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result([])
|
||||
|
||||
def _non_result(self):
|
||||
def _non_result(self, default):
|
||||
if self._metadata is None:
|
||||
raise exc.ResourceClosedError(
|
||||
"This result object does not return rows. "
|
||||
"It has been closed automatically.",
|
||||
)
|
||||
else:
|
||||
elif self.closed:
|
||||
raise exc.ResourceClosedError("This result object is closed.")
|
||||
else:
|
||||
return default
|
||||
|
||||
def process_rows(self, rows):
|
||||
process_row = self._process_row
|
||||
|
|
@ -775,11 +942,25 @@ class ResultProxy(object):
|
|||
for row in rows]
|
||||
|
||||
def fetchall(self):
|
||||
"""Fetch all rows, just like DB-API ``cursor.fetchall()``."""
|
||||
"""Fetch all rows, just like DB-API ``cursor.fetchall()``.
|
||||
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Subsequent calls to :meth:`.ResultProxy.fetchall` will return
|
||||
an empty list. After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchall_impl())
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return l
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
|
@ -790,15 +971,25 @@ class ResultProxy(object):
|
|||
"""Fetch many rows, just like DB-API
|
||||
``cursor.fetchmany(size=cursor.arraysize)``.
|
||||
|
||||
If rows are present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and an empty list is returned.
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
|
||||
exhuasted will return
|
||||
an empty list. After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchmany_impl(size))
|
||||
if len(l) == 0:
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return l
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
|
@ -808,8 +999,18 @@ class ResultProxy(object):
|
|||
def fetchone(self):
|
||||
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
|
||||
|
||||
If a row is present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and None is returned.
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Calls to :meth:`.ResultProxy.fetchone` after all rows have
|
||||
been exhausted will return ``None``.
|
||||
After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
try:
|
||||
|
|
@ -817,7 +1018,7 @@ class ResultProxy(object):
|
|||
if row is not None:
|
||||
return self.process_rows([row])[0]
|
||||
else:
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return None
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
|
@ -829,9 +1030,12 @@ class ResultProxy(object):
|
|||
|
||||
Returns None if no row is present.
|
||||
|
||||
After calling this method, the object is fully closed,
|
||||
e.g. the :meth:`.ResultProxy.close` method will have been called.
|
||||
|
||||
"""
|
||||
if self._metadata is None:
|
||||
self._non_result()
|
||||
return self._non_result(None)
|
||||
|
||||
try:
|
||||
row = self._fetchone_impl()
|
||||
|
|
@ -853,6 +1057,9 @@ class ResultProxy(object):
|
|||
|
||||
Returns None if no row is present.
|
||||
|
||||
After calling this method, the object is fully closed,
|
||||
e.g. the :meth:`.ResultProxy.close` method will have been called.
|
||||
|
||||
"""
|
||||
row = self.first()
|
||||
if row is not None:
|
||||
|
|
@ -873,10 +1080,27 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
|
||||
The pre-fetching behavior fetches only one row initially, and then
|
||||
grows its buffer size by a fixed amount with each successive need
|
||||
for additional rows up to a size of 100.
|
||||
for additional rows up to a size of 1000.
|
||||
|
||||
The size argument is configurable using the ``max_row_buffer``
|
||||
execution option::
|
||||
|
||||
with psycopg2_engine.connect() as conn:
|
||||
|
||||
result = conn.execution_options(
|
||||
stream_results=True, max_row_buffer=50
|
||||
).execute("select * from table")
|
||||
|
||||
.. versionadded:: 1.0.6 Added the ``max_row_buffer`` option.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_execution_options`
|
||||
"""
|
||||
|
||||
def _init_metadata(self):
|
||||
self._max_row_buffer = self.context.execution_options.get(
|
||||
'max_row_buffer', None)
|
||||
self.__buffer_rows()
|
||||
super(BufferedRowResultProxy, self)._init_metadata()
|
||||
|
||||
|
|
@ -896,13 +1120,21 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
}
|
||||
|
||||
def __buffer_rows(self):
|
||||
if self.cursor is None:
|
||||
return
|
||||
size = getattr(self, '_bufsize', 1)
|
||||
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
|
||||
self._bufsize = self.size_growth.get(size, size)
|
||||
if self._max_row_buffer is not None:
|
||||
self._bufsize = min(self._max_row_buffer, self._bufsize)
|
||||
|
||||
def _soft_close(self, **kw):
|
||||
self.__rowbuffer.clear()
|
||||
super(BufferedRowResultProxy, self)._soft_close(**kw)
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.closed:
|
||||
return None
|
||||
if self.cursor is None:
|
||||
return self._non_result(None)
|
||||
if not self.__rowbuffer:
|
||||
self.__buffer_rows()
|
||||
if not self.__rowbuffer:
|
||||
|
|
@ -921,6 +1153,8 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
if self.cursor is None:
|
||||
return self._non_result([])
|
||||
self.__rowbuffer.extend(self.cursor.fetchall())
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
|
|
@ -943,11 +1177,15 @@ class FullyBufferedResultProxy(ResultProxy):
|
|||
def _buffer_rows(self):
|
||||
return collections.deque(self.cursor.fetchall())
|
||||
|
||||
def _soft_close(self, **kw):
|
||||
self.__rowbuffer.clear()
|
||||
super(FullyBufferedResultProxy, self)._soft_close(**kw)
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.__rowbuffer:
|
||||
return self.__rowbuffer.popleft()
|
||||
else:
|
||||
return None
|
||||
return self._non_result(None)
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
if size is None:
|
||||
|
|
@ -961,6 +1199,8 @@ class FullyBufferedResultProxy(ResultProxy):
|
|||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
if not self.cursor:
|
||||
return self._non_result([])
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
return ret
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -48,7 +48,8 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
entrypoint = u._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop('_coerce_config', False):
|
||||
def pop_kwarg(key, default=None):
|
||||
|
|
@ -81,21 +82,19 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg('connect_args', {}))
|
||||
cargs = list(cargs) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg('pool', None)
|
||||
if pool is None:
|
||||
def connect():
|
||||
try:
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
except dialect.dbapi.Error as e:
|
||||
invalidated = dialect.is_disconnect(e, None, None)
|
||||
util.raise_from_cause(
|
||||
exc.DBAPIError.instance(
|
||||
None, None, e, dialect.dbapi.Error,
|
||||
connection_invalidated=invalidated
|
||||
)
|
||||
)
|
||||
def connect(connection_record=None):
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = fn(
|
||||
dialect, connection_record, cargs, cparams)
|
||||
if connection is not None:
|
||||
return connection
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
|
||||
creator = pop_kwarg('creator', connect)
|
||||
|
||||
|
|
@ -162,9 +161,14 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection,
|
||||
_has_events=False)
|
||||
c._execution_options = util.immutabledict()
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect, once=True)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/threadlocal.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -59,7 +59,10 @@ class TLEngine(base.Engine):
|
|||
# guards against pool-level reapers, if desired.
|
||||
# or not connection.connection.is_valid:
|
||||
connection = self._tl_connection_cls(
|
||||
self, self.pool.connect(), **kw)
|
||||
self,
|
||||
self._wrap_pool_connect(
|
||||
self.pool.connect, connection),
|
||||
**kw)
|
||||
self._connections.conn = weakref.ref(connection)
|
||||
|
||||
return connection._increment_connect()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/url.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -105,11 +105,25 @@ class URL(object):
|
|||
self.database == other.database and \
|
||||
self.query == other.query
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
def get_backend_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split('+')[0]
|
||||
|
||||
def get_driver_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split('+')[1]
|
||||
|
||||
def _get_entrypoint(self):
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if '+' not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
|
|
@ -125,6 +139,14 @@ class URL(object):
|
|||
else:
|
||||
return cls
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(self, names=[], **kw):
|
||||
"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# engine/util.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue