update sqlalchemy
This commit is contained in:
parent
22cbffb8a3
commit
e4bd5b5042
362 changed files with 37677 additions and 11013 deletions
|
|
@ -1,12 +1,11 @@
|
|||
# dialects/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
__all__ = (
|
||||
'drizzle',
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
from sqlalchemy.dialects.drizzle import base, mysqldb
|
||||
|
||||
base.dialect = mysqldb.dialect
|
||||
|
||||
from sqlalchemy.dialects.drizzle.base import \
|
||||
BIGINT, BINARY, BLOB, \
|
||||
BOOLEAN, CHAR, DATE, \
|
||||
DATETIME, DECIMAL, DOUBLE, \
|
||||
ENUM, FLOAT, INTEGER, \
|
||||
NUMERIC, REAL, TEXT, \
|
||||
TIME, TIMESTAMP, VARBINARY, \
|
||||
VARCHAR, dialect
|
||||
|
||||
__all__ = (
|
||||
'BIGINT', 'BINARY', 'BLOB',
|
||||
'BOOLEAN', 'CHAR', 'DATE',
|
||||
'DATETIME', 'DECIMAL', 'DOUBLE',
|
||||
'ENUM', 'FLOAT', 'INTEGER',
|
||||
'NUMERIC', 'REAL', 'TEXT',
|
||||
'TIME', 'TIMESTAMP', 'VARBINARY',
|
||||
'VARCHAR', 'dialect'
|
||||
)
|
||||
|
|
@ -1,499 +0,0 @@
|
|||
# drizzle/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: drizzle
|
||||
:name: Drizzle
|
||||
|
||||
Drizzle is a variant of MySQL. Unlike MySQL, Drizzle's default storage engine
|
||||
is InnoDB (transactions, foreign-keys) rather than MyISAM. For more
|
||||
`Notable Differences <http://docs.drizzle.org/mysql_differences.html>`_, visit
|
||||
the `Drizzle Documentation <http://docs.drizzle.org/index.html>`_.
|
||||
|
||||
The SQLAlchemy Drizzle dialect leans heavily on the MySQL dialect, so much of
|
||||
the :doc:`SQLAlchemy MySQL <mysql>` documentation is also relevant.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy import log
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.engine import reflection
|
||||
from sqlalchemy.dialects.mysql import base as mysql_dialect
|
||||
from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \
|
||||
BLOB, BINARY, VARBINARY
|
||||
|
||||
|
||||
class _NumericType(object):
|
||||
"""Base for Drizzle numeric types."""
|
||||
|
||||
def __init__(self, **kw):
|
||||
super(_NumericType, self).__init__(**kw)
|
||||
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
if isinstance(self, (REAL, DOUBLE)) and \
|
||||
(
|
||||
(precision is None and scale is not None) or
|
||||
(precision is not None and scale is None)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether.")
|
||||
|
||||
super(_FloatType, self).__init__(precision=precision,
|
||||
asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
|
||||
class _StringType(mysql_dialect._StringType):
|
||||
"""Base for Drizzle string types."""
|
||||
|
||||
def __init__(self, collation=None, binary=False, **kw):
|
||||
kw['national'] = False
|
||||
super(_StringType, self).__init__(collation=collation, binary=binary,
|
||||
**kw)
|
||||
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""Drizzle NUMERIC type."""
|
||||
|
||||
__visit_name__ = 'NUMERIC'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a NUMERIC.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(NUMERIC, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DECIMAL(_NumericType, sqltypes.DECIMAL):
|
||||
"""Drizzle DECIMAL type."""
|
||||
|
||||
__visit_name__ = 'DECIMAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DECIMAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
super(DECIMAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DOUBLE(_FloatType):
|
||||
"""Drizzle DOUBLE type."""
|
||||
|
||||
__visit_name__ = 'DOUBLE'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(DOUBLE, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class REAL(_FloatType, sqltypes.REAL):
|
||||
"""Drizzle REAL type."""
|
||||
|
||||
__visit_name__ = 'REAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(REAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class FLOAT(_FloatType, sqltypes.FLOAT):
|
||||
"""Drizzle FLOAT type."""
|
||||
|
||||
__visit_name__ = 'FLOAT'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
|
||||
"""Construct a FLOAT.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(FLOAT, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class INTEGER(sqltypes.INTEGER):
|
||||
"""Drizzle INTEGER type."""
|
||||
|
||||
__visit_name__ = 'INTEGER'
|
||||
|
||||
def __init__(self, **kw):
|
||||
"""Construct an INTEGER."""
|
||||
|
||||
super(INTEGER, self).__init__(**kw)
|
||||
|
||||
|
||||
class BIGINT(sqltypes.BIGINT):
|
||||
"""Drizzle BIGINTEGER type."""
|
||||
|
||||
__visit_name__ = 'BIGINT'
|
||||
|
||||
def __init__(self, **kw):
|
||||
"""Construct a BIGINTEGER."""
|
||||
|
||||
super(BIGINT, self).__init__(**kw)
|
||||
|
||||
|
||||
class TIME(mysql_dialect.TIME):
|
||||
"""Drizzle TIME type."""
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""Drizzle TIMESTAMP type."""
|
||||
|
||||
__visit_name__ = 'TIMESTAMP'
|
||||
|
||||
|
||||
class TEXT(_StringType, sqltypes.TEXT):
|
||||
"""Drizzle TEXT type, for text up to 2^16 characters."""
|
||||
|
||||
__visit_name__ = 'TEXT'
|
||||
|
||||
def __init__(self, length=None, **kw):
|
||||
"""Construct a TEXT.
|
||||
|
||||
:param length: Optional, if provided the server may optimize storage
|
||||
by substituting the smallest TEXT type sufficient to store
|
||||
``length`` characters.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
|
||||
super(TEXT, self).__init__(length=length, **kw)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""Drizzle VARCHAR type, for variable-length character data."""
|
||||
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a VARCHAR.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""Drizzle CHAR type, for fixed-length character data."""
|
||||
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a CHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class ENUM(mysql_dialect.ENUM):
|
||||
"""Drizzle ENUM type."""
|
||||
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
Example:
|
||||
|
||||
Column('myenum', ENUM("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
below).
|
||||
|
||||
:param strict: Defaults to False: ensure that a given value is in this
|
||||
ENUM's range of permissible values when inserting or updating rows.
|
||||
Note that Drizzle will not raise a fatal error if you attempt to
|
||||
store an out of range value- an alternate value will be stored
|
||||
instead.
|
||||
(See Drizzle ENUM documentation.)
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine enum value
|
||||
quoting. If all enum values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
"""
|
||||
|
||||
super(ENUM, self).__init__(*enums, **kw)
|
||||
|
||||
|
||||
class _DrizzleBoolean(sqltypes.Boolean):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NUMERIC
|
||||
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: NUMERIC,
|
||||
sqltypes.Float: FLOAT,
|
||||
sqltypes.Time: TIME,
|
||||
sqltypes.Enum: ENUM,
|
||||
sqltypes.Boolean: _DrizzleBoolean,
|
||||
}
|
||||
|
||||
|
||||
# All the types we have in Drizzle
|
||||
ischema_names = {
|
||||
'BIGINT': BIGINT,
|
||||
'BINARY': BINARY,
|
||||
'BLOB': BLOB,
|
||||
'BOOLEAN': BOOLEAN,
|
||||
'CHAR': CHAR,
|
||||
'DATE': DATE,
|
||||
'DATETIME': DATETIME,
|
||||
'DECIMAL': DECIMAL,
|
||||
'DOUBLE': DOUBLE,
|
||||
'ENUM': ENUM,
|
||||
'FLOAT': FLOAT,
|
||||
'INT': INTEGER,
|
||||
'INTEGER': INTEGER,
|
||||
'NUMERIC': NUMERIC,
|
||||
'TEXT': TEXT,
|
||||
'TIME': TIME,
|
||||
'TIMESTAMP': TIMESTAMP,
|
||||
'VARBINARY': VARBINARY,
|
||||
'VARCHAR': VARCHAR,
|
||||
}
|
||||
|
||||
|
||||
class DrizzleCompiler(mysql_dialect.MySQLCompiler):
|
||||
|
||||
def visit_typeclause(self, typeclause):
|
||||
type_ = typeclause.type.dialect_impl(self.dialect)
|
||||
if isinstance(type_, sqltypes.Integer):
|
||||
return 'INTEGER'
|
||||
else:
|
||||
return super(DrizzleCompiler, self).visit_typeclause(typeclause)
|
||||
|
||||
def visit_cast(self, cast, **kwargs):
|
||||
type_ = self.process(cast.typeclause)
|
||||
if type_ is None:
|
||||
return self.process(cast.clause)
|
||||
|
||||
return 'CAST(%s AS %s)' % (self.process(cast.clause), type_)
|
||||
|
||||
|
||||
class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler):
|
||||
def _extend_numeric(self, type_, spec):
|
||||
return spec
|
||||
|
||||
def _extend_string(self, type_, defaults, spec):
|
||||
"""Extend a string-type declaration with standard SQL
|
||||
COLLATE annotations and Drizzle specific extensions.
|
||||
|
||||
"""
|
||||
|
||||
def attr(name):
|
||||
return getattr(type_, name, defaults.get(name))
|
||||
|
||||
if attr('collation'):
|
||||
collation = 'COLLATE %s' % type_.collation
|
||||
elif attr('binary'):
|
||||
collation = 'BINARY'
|
||||
else:
|
||||
collation = None
|
||||
|
||||
return ' '.join([c for c in (spec, collation)
|
||||
if c is not None])
|
||||
|
||||
def visit_NCHAR(self, type):
|
||||
raise NotImplementedError("Drizzle does not support NCHAR")
|
||||
|
||||
def visit_NVARCHAR(self, type):
|
||||
raise NotImplementedError("Drizzle does not support NVARCHAR")
|
||||
|
||||
def visit_FLOAT(self, type_):
|
||||
if type_.scale is not None and type_.precision is not None:
|
||||
return "FLOAT(%s, %s)" % (type_.precision, type_.scale)
|
||||
else:
|
||||
return "FLOAT"
|
||||
|
||||
def visit_BOOLEAN(self, type_):
|
||||
return "BOOLEAN"
|
||||
|
||||
def visit_BLOB(self, type_):
|
||||
return "BLOB"
|
||||
|
||||
|
||||
class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class DrizzleDialect(mysql_dialect.MySQLDialect):
|
||||
"""Details of the Drizzle dialect.
|
||||
|
||||
Not used directly in application code.
|
||||
"""
|
||||
|
||||
name = 'drizzle'
|
||||
|
||||
_supports_cast = True
|
||||
supports_sequences = False
|
||||
supports_native_boolean = True
|
||||
supports_views = False
|
||||
|
||||
default_paramstyle = 'format'
|
||||
colspecs = colspecs
|
||||
|
||||
statement_compiler = DrizzleCompiler
|
||||
ddl_compiler = DrizzleDDLCompiler
|
||||
type_compiler = DrizzleTypeCompiler
|
||||
ischema_names = ischema_names
|
||||
preparer = DrizzleIdentifierPreparer
|
||||
|
||||
def on_connect(self):
|
||||
"""Force autocommit - Drizzle Bug#707842 doesn't set this properly"""
|
||||
|
||||
def connect(conn):
|
||||
conn.autocommit(False)
|
||||
return connect
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a Unicode SHOW TABLES from a given schema."""
|
||||
|
||||
if schema is not None:
|
||||
current_schema = schema
|
||||
else:
|
||||
current_schema = self.default_schema_name
|
||||
|
||||
charset = 'utf8'
|
||||
rp = connection.execute("SHOW TABLES FROM %s" %
|
||||
self.identifier_preparer.quote_identifier(current_schema))
|
||||
return [row[0] for row in self._compat_fetchall(rp, charset=charset)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
raise NotImplementedError
|
||||
|
||||
def _detect_casing(self, connection):
|
||||
"""Sniff out identifier case sensitivity.
|
||||
|
||||
Cached per-connection. This value can not change without a server
|
||||
restart.
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
def _detect_collations(self, connection):
|
||||
"""Pull the active COLLATIONS list from the server.
|
||||
|
||||
Cached per-connection.
|
||||
"""
|
||||
|
||||
collations = {}
|
||||
charset = self._connection_charset
|
||||
rs = connection.execute(
|
||||
'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM'
|
||||
' data_dictionary.COLLATIONS')
|
||||
for row in self._compat_fetchall(rs, charset):
|
||||
collations[row[0]] = row[1]
|
||||
return collations
|
||||
|
||||
def _detect_ansiquotes(self, connection):
|
||||
"""Detect and adjust for the ANSI_QUOTES sql mode."""
|
||||
|
||||
self._server_ansiquotes = False
|
||||
self._backslash_escapes = False
|
||||
|
||||
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
"""
|
||||
.. dialect:: drizzle+mysqldb
|
||||
:name: MySQL-Python
|
||||
:dbapi: mysqldb
|
||||
:connectstring: drizzle+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.drizzle.base import (
|
||||
DrizzleDialect,
|
||||
DrizzleExecutionContext,
|
||||
DrizzleCompiler,
|
||||
DrizzleIdentifierPreparer)
|
||||
from sqlalchemy.connectors.mysqldb import (
|
||||
MySQLDBExecutionContext,
|
||||
MySQLDBCompiler,
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLDBConnector)
|
||||
|
||||
|
||||
class DrizzleExecutionContext_mysqldb(MySQLDBExecutionContext,
|
||||
DrizzleExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer,
|
||||
DrizzleIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect):
|
||||
execution_ctx_cls = DrizzleExecutionContext_mysqldb
|
||||
statement_compiler = DrizzleCompiler_mysqldb
|
||||
preparer = DrizzleIdentifierPreparer_mysqldb
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
return 'utf8'
|
||||
|
||||
|
||||
dialect = DrizzleDialect_mysqldb
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -78,7 +78,6 @@ from sqlalchemy.sql import expression
|
|||
from sqlalchemy.engine import base, default, reflection
|
||||
from sqlalchemy.sql import compiler
|
||||
|
||||
|
||||
from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC,
|
||||
SMALLINT, TEXT, TIME, TIMESTAMP, Integer)
|
||||
|
||||
|
|
@ -181,16 +180,16 @@ ischema_names = {
|
|||
# _FBDate, etc. as bind/result functionality is required)
|
||||
|
||||
class FBTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_boolean(self, type_):
|
||||
return self.visit_SMALLINT(type_)
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_datetime(self, type_):
|
||||
return self.visit_TIMESTAMP(type_)
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_TIMESTAMP(type_, **kw)
|
||||
|
||||
def visit_TEXT(self, type_):
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 1"
|
||||
|
||||
def visit_BLOB(self, type_):
|
||||
def visit_BLOB(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 0"
|
||||
|
||||
def _extend_string(self, type_, basic):
|
||||
|
|
@ -200,16 +199,16 @@ class FBTypeCompiler(compiler.GenericTypeCompiler):
|
|||
else:
|
||||
return '%s CHARACTER SET %s' % (basic, charset)
|
||||
|
||||
def visit_CHAR(self, type_):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_)
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
if not type_.length:
|
||||
raise exc.CompileError(
|
||||
"VARCHAR requires a length on dialect %s" %
|
||||
self.dialect.name)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
|
||||
|
|
@ -294,22 +293,22 @@ class FBCompiler(sql.compiler.SQLCompiler):
|
|||
def visit_sequence(self, seq):
|
||||
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
"""Called when building a ``SELECT`` statement, position is just
|
||||
before column list Firebird puts the limit and offset right
|
||||
after the ``SELECT``...
|
||||
"""
|
||||
|
||||
result = ""
|
||||
if select._limit:
|
||||
result += "FIRST %s " % self.process(sql.literal(select._limit))
|
||||
if select._offset:
|
||||
result += "SKIP %s " % self.process(sql.literal(select._offset))
|
||||
if select._limit_clause is not None:
|
||||
result += "FIRST %s " % self.process(select._limit_clause, **kw)
|
||||
if select._offset_clause is not None:
|
||||
result += "SKIP %s " % self.process(select._offset_clause, **kw)
|
||||
if select._distinct:
|
||||
result += "DISTINCT "
|
||||
return result
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
"""Already taken care of in the `get_select_precolumns` method."""
|
||||
|
||||
return ""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/fdb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/kinterbasdb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/adodbapi.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -166,6 +166,55 @@ how SQLAlchemy handles this:
|
|||
This
|
||||
is an auxilliary use case suitable for testing and bulk insert scenarios.
|
||||
|
||||
.. _legacy_schema_rendering:
|
||||
|
||||
Rendering of SQL statements that include schema qualifiers
|
||||
---------------------------------------------------------
|
||||
|
||||
When using :class:`.Table` metadata that includes a "schema" qualifier,
|
||||
such as::
|
||||
|
||||
account_table = Table(
|
||||
'account', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('info', String(100)),
|
||||
schema="customer_schema"
|
||||
)
|
||||
|
||||
The SQL Server dialect has a long-standing behavior that it will attempt
|
||||
to turn a schema-qualified table name into an alias, such as::
|
||||
|
||||
>>> eng = create_engine("mssql+pymssql://mydsn")
|
||||
>>> print(account_table.select().compile(eng))
|
||||
SELECT account_1.id, account_1.info
|
||||
FROM customer_schema.account AS account_1
|
||||
|
||||
This behavior is legacy, does not function correctly for many forms
|
||||
of SQL statements, and will be disabled by default in the 1.1 series
|
||||
of SQLAlchemy. As of 1.0.5, the above statement will produce the following
|
||||
warning::
|
||||
|
||||
SAWarning: legacy_schema_aliasing flag is defaulted to True;
|
||||
some schema-qualified queries may not function correctly.
|
||||
Consider setting this flag to False for modern SQL Server versions;
|
||||
this flag will default to False in version 1.1
|
||||
|
||||
This warning encourages the :class:`.Engine` to be created as follows::
|
||||
|
||||
>>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False)
|
||||
|
||||
Where the above SELECT statement will produce::
|
||||
|
||||
>>> print(account_table.select().compile(eng))
|
||||
SELECT customer_schema.account.id, customer_schema.account.info
|
||||
FROM customer_schema.account
|
||||
|
||||
The warning will not emit if the ``legacy_schema_aliasing`` flag is set
|
||||
to either True or False.
|
||||
|
||||
.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable
|
||||
the SQL Server dialect's legacy behavior with schema-qualified table
|
||||
names. This flag will default to False in version 1.1.
|
||||
|
||||
Collation Support
|
||||
-----------------
|
||||
|
|
@ -187,7 +236,7 @@ CREATE TABLE statement for this column will yield::
|
|||
LIMIT/OFFSET Support
|
||||
--------------------
|
||||
|
||||
MSSQL has no support for the LIMIT or OFFSET keysowrds. LIMIT is
|
||||
MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is
|
||||
supported directly through the ``TOP`` Transact SQL keyword::
|
||||
|
||||
select.limit
|
||||
|
|
@ -226,6 +275,53 @@ The DATE and TIME types are not available for MSSQL 2005 and
|
|||
previous - if a server version below 2008 is detected, DDL
|
||||
for these types will be issued as DATETIME.
|
||||
|
||||
.. _mssql_large_type_deprecation:
|
||||
|
||||
Large Text/Binary Type Deprecation
|
||||
----------------------------------
|
||||
|
||||
Per `SQL Server 2012/2014 Documentation <http://technet.microsoft.com/en-us/library/ms187993.aspx>`_,
|
||||
the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server
|
||||
in a future release. SQLAlchemy normally relates these types to the
|
||||
:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes.
|
||||
|
||||
In order to accommodate this change, a new flag ``deprecate_large_types``
|
||||
is added to the dialect, which will be automatically set based on detection
|
||||
of the server version in use, if not otherwise set by the user. The
|
||||
behavior of this flag is as follows:
|
||||
|
||||
* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and
|
||||
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
|
||||
types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``,
|
||||
respectively. This is a new behavior as of the addition of this flag.
|
||||
|
||||
* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and
|
||||
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
|
||||
types ``NTEXT``, ``TEXT``, and ``IMAGE``,
|
||||
respectively. This is the long-standing behavior of these types.
|
||||
|
||||
* The flag begins with the value ``None``, before a database connection is
|
||||
established. If the dialect is used to render DDL without the flag being
|
||||
set, it is interpreted the same as ``False``.
|
||||
|
||||
* On first connection, the dialect detects if SQL Server version 2012 or greater
|
||||
is in use; if the flag is still at ``None``, it sets it to ``True`` or
|
||||
``False`` based on whether 2012 or greater is detected.
|
||||
|
||||
* The flag can be set to either ``True`` or ``False`` when the dialect
|
||||
is created, typically via :func:`.create_engine`::
|
||||
|
||||
eng = create_engine("mssql+pymssql://user:pass@host/db",
|
||||
deprecate_large_types=True)
|
||||
|
||||
* Complete control over whether the "old" or "new" types are rendered is
|
||||
available in all SQLAlchemy versions by using the UPPERCASE type objects
|
||||
instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`,
|
||||
:class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain
|
||||
fixed and always output exactly that type.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _mssql_indexes:
|
||||
|
||||
Clustered Index Support
|
||||
|
|
@ -367,19 +463,20 @@ import operator
|
|||
import re
|
||||
|
||||
from ... import sql, schema as sa_schema, exc, util
|
||||
from ...sql import compiler, expression, \
|
||||
util as sql_util, cast
|
||||
from ...sql import compiler, expression, util as sql_util
|
||||
from ... import engine
|
||||
from ...engine import reflection, default
|
||||
from ... import types as sqltypes
|
||||
from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
|
||||
FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
|
||||
VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
|
||||
TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
|
||||
|
||||
|
||||
from ...util import update_wrapper
|
||||
from . import information_schema as ischema
|
||||
|
||||
# http://sqlserverbuilds.blogspot.com/
|
||||
MS_2012_VERSION = (11,)
|
||||
MS_2008_VERSION = (10,)
|
||||
MS_2005_VERSION = (9,)
|
||||
MS_2000_VERSION = (8,)
|
||||
|
|
@ -451,9 +548,13 @@ class _MSDate(sqltypes.Date):
|
|||
if isinstance(value, datetime.datetime):
|
||||
return value.date()
|
||||
elif isinstance(value, util.string_types):
|
||||
m = self._reg.match(value)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
"could not parse %r as a date value" % (value, ))
|
||||
return datetime.date(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()
|
||||
for x in m.groups()
|
||||
])
|
||||
else:
|
||||
return value
|
||||
|
|
@ -485,9 +586,13 @@ class TIME(sqltypes.TIME):
|
|||
if isinstance(value, datetime.datetime):
|
||||
return value.time()
|
||||
elif isinstance(value, util.string_types):
|
||||
m = self._reg.match(value)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
"could not parse %r as a time value" % (value, ))
|
||||
return datetime.time(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()])
|
||||
for x in m.groups()])
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
|
@ -545,6 +650,26 @@ class NTEXT(sqltypes.UnicodeText):
|
|||
__visit_name__ = 'NTEXT'
|
||||
|
||||
|
||||
class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
|
||||
"""The MSSQL VARBINARY type.
|
||||
|
||||
This type extends both :class:`.types.VARBINARY` and
|
||||
:class:`.types.LargeBinary`. In "deprecate_large_types" mode,
|
||||
the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)``
|
||||
on SQL Server.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mssql_large_type_deprecation`
|
||||
|
||||
|
||||
|
||||
"""
|
||||
__visit_name__ = 'VARBINARY'
|
||||
|
||||
|
||||
class IMAGE(sqltypes.LargeBinary):
|
||||
__visit_name__ = 'IMAGE'
|
||||
|
||||
|
|
@ -626,7 +751,6 @@ ischema_names = {
|
|||
|
||||
|
||||
class MSTypeCompiler(compiler.GenericTypeCompiler):
|
||||
|
||||
def _extend(self, spec, type_, length=None):
|
||||
"""Extend a string-type declaration with standard SQL
|
||||
COLLATE annotations.
|
||||
|
|
@ -647,103 +771,115 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
|
|||
return ' '.join([c for c in (spec, collation)
|
||||
if c is not None])
|
||||
|
||||
def visit_FLOAT(self, type_):
|
||||
def visit_FLOAT(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision is None:
|
||||
return "FLOAT"
|
||||
else:
|
||||
return "FLOAT(%(precision)s)" % {'precision': precision}
|
||||
|
||||
def visit_TINYINT(self, type_):
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_DATETIMEOFFSET(self, type_):
|
||||
if type_.precision:
|
||||
def visit_DATETIMEOFFSET(self, type_, **kw):
|
||||
if type_.precision is not None:
|
||||
return "DATETIMEOFFSET(%s)" % type_.precision
|
||||
else:
|
||||
return "DATETIMEOFFSET"
|
||||
|
||||
def visit_TIME(self, type_):
|
||||
def visit_TIME(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision:
|
||||
if precision is not None:
|
||||
return "TIME(%s)" % precision
|
||||
else:
|
||||
return "TIME"
|
||||
|
||||
def visit_DATETIME2(self, type_):
|
||||
def visit_DATETIME2(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision:
|
||||
if precision is not None:
|
||||
return "DATETIME2(%s)" % precision
|
||||
else:
|
||||
return "DATETIME2"
|
||||
|
||||
def visit_SMALLDATETIME(self, type_):
|
||||
def visit_SMALLDATETIME(self, type_, **kw):
|
||||
return "SMALLDATETIME"
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_, **kw)
|
||||
|
||||
def visit_unicode_text(self, type_):
|
||||
return self.visit_NTEXT(type_)
|
||||
def visit_text(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_VARCHAR(type_, **kw)
|
||||
else:
|
||||
return self.visit_TEXT(type_, **kw)
|
||||
|
||||
def visit_NTEXT(self, type_):
|
||||
def visit_unicode_text(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_NVARCHAR(type_, **kw)
|
||||
else:
|
||||
return self.visit_NTEXT(type_, **kw)
|
||||
|
||||
def visit_NTEXT(self, type_, **kw):
|
||||
return self._extend("NTEXT", type_)
|
||||
|
||||
def visit_TEXT(self, type_):
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return self._extend("TEXT", type_)
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
return self._extend("VARCHAR", type_, length=type_.length or 'max')
|
||||
|
||||
def visit_CHAR(self, type_):
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
return self._extend("CHAR", type_)
|
||||
|
||||
def visit_NCHAR(self, type_):
|
||||
def visit_NCHAR(self, type_, **kw):
|
||||
return self._extend("NCHAR", type_)
|
||||
|
||||
def visit_NVARCHAR(self, type_):
|
||||
def visit_NVARCHAR(self, type_, **kw):
|
||||
return self._extend("NVARCHAR", type_, length=type_.length or 'max')
|
||||
|
||||
def visit_date(self, type_):
|
||||
def visit_date(self, type_, **kw):
|
||||
if self.dialect.server_version_info < MS_2008_VERSION:
|
||||
return self.visit_DATETIME(type_)
|
||||
return self.visit_DATETIME(type_, **kw)
|
||||
else:
|
||||
return self.visit_DATE(type_)
|
||||
return self.visit_DATE(type_, **kw)
|
||||
|
||||
def visit_time(self, type_):
|
||||
def visit_time(self, type_, **kw):
|
||||
if self.dialect.server_version_info < MS_2008_VERSION:
|
||||
return self.visit_DATETIME(type_)
|
||||
return self.visit_DATETIME(type_, **kw)
|
||||
else:
|
||||
return self.visit_TIME(type_)
|
||||
return self.visit_TIME(type_, **kw)
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return self.visit_IMAGE(type_)
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_VARBINARY(type_, **kw)
|
||||
else:
|
||||
return self.visit_IMAGE(type_, **kw)
|
||||
|
||||
def visit_IMAGE(self, type_):
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_VARBINARY(self, type_):
|
||||
def visit_VARBINARY(self, type_, **kw):
|
||||
return self._extend(
|
||||
"VARBINARY",
|
||||
type_,
|
||||
length=type_.length or 'max')
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_BIT(self, type_):
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_):
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_):
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return 'SMALLMONEY'
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_):
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
def visit_SQL_VARIANT(self, type_):
|
||||
def visit_SQL_VARIANT(self, type_, **kw):
|
||||
return 'SQL_VARIANT'
|
||||
|
||||
|
||||
|
|
@ -846,7 +982,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
|
|||
"SET IDENTITY_INSERT %s OFF" %
|
||||
self.dialect.identifier_preparer. format_table(
|
||||
self.compiled.statement.table)))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def get_result_proxy(self):
|
||||
|
|
@ -872,6 +1008,15 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
self.tablealiases = {}
|
||||
super(MSSQLCompiler, self).__init__(*args, **kwargs)
|
||||
|
||||
def _with_legacy_schema_aliasing(fn):
|
||||
def decorate(self, *arg, **kw):
|
||||
if self.dialect.legacy_schema_aliasing:
|
||||
return fn(self, *arg, **kw)
|
||||
else:
|
||||
super_ = getattr(super(MSSQLCompiler, self), fn.__name__)
|
||||
return super_(*arg, **kw)
|
||||
return decorate
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "CURRENT_TIMESTAMP"
|
||||
|
||||
|
|
@ -900,19 +1045,24 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw))
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
""" MS-SQL puts TOP, its version of LIMIT, here """
|
||||
if select._distinct or select._limit is not None:
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
""" MS-SQL puts TOP, it's version of LIMIT here """
|
||||
|
||||
s = ""
|
||||
if select._distinct:
|
||||
s += "DISTINCT "
|
||||
|
||||
if select._simple_int_limit and not select._offset:
|
||||
# ODBC drivers and possibly others
|
||||
# don't support bind params in the SELECT clause on SQL Server.
|
||||
# so have to use literal here.
|
||||
if select._limit is not None:
|
||||
if not select._offset:
|
||||
s += "TOP %d " % select._limit
|
||||
s += "TOP %d " % select._limit
|
||||
|
||||
if s:
|
||||
return s
|
||||
return compiler.SQLCompiler.get_select_precolumns(self, select)
|
||||
else:
|
||||
return compiler.SQLCompiler.get_select_precolumns(
|
||||
self, select, **kw)
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
|
@ -920,7 +1070,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
def get_crud_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in mssql is after the select keyword
|
||||
return ""
|
||||
|
||||
|
|
@ -929,39 +1079,48 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
so tries to wrap it in a subquery with ``row_number()`` criterion.
|
||||
|
||||
"""
|
||||
if select._offset and not getattr(select, '_mssql_visit', None):
|
||||
if (
|
||||
(
|
||||
not select._simple_int_limit and
|
||||
select._limit_clause is not None
|
||||
) or (
|
||||
select._offset_clause is not None and
|
||||
not select._simple_int_offset or select._offset
|
||||
)
|
||||
) and not getattr(select, '_mssql_visit', None):
|
||||
|
||||
# to use ROW_NUMBER(), an ORDER BY is required.
|
||||
if not select._order_by_clause.clauses:
|
||||
raise exc.CompileError('MSSQL requires an order_by when '
|
||||
'using an offset.')
|
||||
_offset = select._offset
|
||||
_limit = select._limit
|
||||
'using an OFFSET or a non-simple '
|
||||
'LIMIT clause')
|
||||
|
||||
_order_by_clauses = select._order_by_clause.clauses
|
||||
limit_clause = select._limit_clause
|
||||
offset_clause = select._offset_clause
|
||||
kwargs['select_wraps_for'] = select
|
||||
select = select._generate()
|
||||
select._mssql_visit = True
|
||||
select = select.column(
|
||||
sql.func.ROW_NUMBER().over(order_by=_order_by_clauses)
|
||||
.label("mssql_rn")
|
||||
).order_by(None).alias()
|
||||
.label("mssql_rn")).order_by(None).alias()
|
||||
|
||||
mssql_rn = sql.column('mssql_rn')
|
||||
limitselect = sql.select([c for c in select.c if
|
||||
c.key != 'mssql_rn'])
|
||||
limitselect.append_whereclause(mssql_rn > _offset)
|
||||
if _limit is not None:
|
||||
limitselect.append_whereclause(mssql_rn <= (_limit + _offset))
|
||||
return self.process(limitselect, iswrapper=True, **kwargs)
|
||||
if offset_clause is not None:
|
||||
limitselect.append_whereclause(mssql_rn > offset_clause)
|
||||
if limit_clause is not None:
|
||||
limitselect.append_whereclause(
|
||||
mssql_rn <= (limit_clause + offset_clause))
|
||||
else:
|
||||
limitselect.append_whereclause(
|
||||
mssql_rn <= (limit_clause))
|
||||
return self.process(limitselect, **kwargs)
|
||||
else:
|
||||
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
|
||||
|
||||
def _schema_aliased_table(self, table):
|
||||
if getattr(table, 'schema', None) is not None:
|
||||
if table not in self.tablealiases:
|
||||
self.tablealiases[table] = table.alias()
|
||||
return self.tablealiases[table]
|
||||
else:
|
||||
return None
|
||||
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs):
|
||||
if mssql_aliased is table or iscrud:
|
||||
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
|
||||
|
|
@ -973,25 +1132,14 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
else:
|
||||
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
|
||||
|
||||
def visit_alias(self, alias, **kwargs):
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_alias(self, alias, **kw):
|
||||
# translate for schema-qualified table aliases
|
||||
kwargs['mssql_aliased'] = alias.original
|
||||
return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
|
||||
kw['mssql_aliased'] = alias.original
|
||||
return super(MSSQLCompiler, self).visit_alias(alias, **kw)
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART("%s", %s)' % \
|
||||
(field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_savepoint(self, savepoint_stmt):
|
||||
return "SAVE TRANSACTION %s" % \
|
||||
self.preparer.format_savepoint(savepoint_stmt)
|
||||
|
||||
def visit_rollback_to_savepoint(self, savepoint_stmt):
|
||||
return ("ROLLBACK TRANSACTION %s"
|
||||
% self.preparer.format_savepoint(savepoint_stmt))
|
||||
|
||||
def visit_column(self, column, add_to_result_map=None, **kwargs):
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_column(self, column, add_to_result_map=None, **kw):
|
||||
if column.table is not None and \
|
||||
(not self.isupdate and not self.isdelete) or \
|
||||
self.is_subquery():
|
||||
|
|
@ -1009,10 +1157,40 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
)
|
||||
|
||||
return super(MSSQLCompiler, self).\
|
||||
visit_column(converted, **kwargs)
|
||||
visit_column(converted, **kw)
|
||||
|
||||
return super(MSSQLCompiler, self).visit_column(
|
||||
column, add_to_result_map=add_to_result_map, **kwargs)
|
||||
column, add_to_result_map=add_to_result_map, **kw)
|
||||
|
||||
def _schema_aliased_table(self, table):
|
||||
if getattr(table, 'schema', None) is not None:
|
||||
if self.dialect._warn_schema_aliasing and \
|
||||
table.schema.lower() != 'information_schema':
|
||||
util.warn(
|
||||
"legacy_schema_aliasing flag is defaulted to True; "
|
||||
"some schema-qualified queries may not function "
|
||||
"correctly. Consider setting this flag to False for "
|
||||
"modern SQL Server versions; this flag will default to "
|
||||
"False in version 1.1")
|
||||
|
||||
if table not in self.tablealiases:
|
||||
self.tablealiases[table] = table.alias()
|
||||
return self.tablealiases[table]
|
||||
else:
|
||||
return None
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART(%s, %s)' % \
|
||||
(field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_savepoint(self, savepoint_stmt):
|
||||
return "SAVE TRANSACTION %s" % \
|
||||
self.preparer.format_savepoint(savepoint_stmt)
|
||||
|
||||
def visit_rollback_to_savepoint(self, savepoint_stmt):
|
||||
return ("ROLLBACK TRANSACTION %s"
|
||||
% self.preparer.format_savepoint(savepoint_stmt))
|
||||
|
||||
def visit_binary(self, binary, **kwargs):
|
||||
"""Move bind parameters to the right-hand side of an operator, where
|
||||
|
|
@ -1141,8 +1319,11 @@ class MSSQLStrictCompiler(MSSQLCompiler):
|
|||
class MSDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = (self.preparer.format_column(column) + " "
|
||||
+ self.dialect.type_compiler.process(column.type))
|
||||
colspec = (
|
||||
self.preparer.format_column(column) + " "
|
||||
+ self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
)
|
||||
|
||||
if column.nullable is not None:
|
||||
if not column.nullable or column.primary_key or \
|
||||
|
|
@ -1321,6 +1502,10 @@ class MSDialect(default.DefaultDialect):
|
|||
sqltypes.Time: TIME,
|
||||
}
|
||||
|
||||
engine_config_types = default.DefaultDialect.engine_config_types.union([
|
||||
('legacy_schema_aliasing', util.asbool),
|
||||
])
|
||||
|
||||
ischema_names = ischema_names
|
||||
|
||||
supports_native_boolean = False
|
||||
|
|
@ -1351,13 +1536,24 @@ class MSDialect(default.DefaultDialect):
|
|||
query_timeout=None,
|
||||
use_scope_identity=True,
|
||||
max_identifier_length=None,
|
||||
schema_name="dbo", **opts):
|
||||
schema_name="dbo",
|
||||
deprecate_large_types=None,
|
||||
legacy_schema_aliasing=None, **opts):
|
||||
self.query_timeout = int(query_timeout or 0)
|
||||
self.schema_name = schema_name
|
||||
|
||||
self.use_scope_identity = use_scope_identity
|
||||
self.max_identifier_length = int(max_identifier_length or 0) or \
|
||||
self.max_identifier_length
|
||||
self.deprecate_large_types = deprecate_large_types
|
||||
|
||||
if legacy_schema_aliasing is None:
|
||||
self.legacy_schema_aliasing = True
|
||||
self._warn_schema_aliasing = True
|
||||
else:
|
||||
self.legacy_schema_aliasing = legacy_schema_aliasing
|
||||
self._warn_schema_aliasing = False
|
||||
|
||||
super(MSDialect, self).__init__(**opts)
|
||||
|
||||
def do_savepoint(self, connection, name):
|
||||
|
|
@ -1371,21 +1567,31 @@ class MSDialect(default.DefaultDialect):
|
|||
|
||||
def initialize(self, connection):
|
||||
super(MSDialect, self).initialize(connection)
|
||||
self._setup_version_attributes()
|
||||
|
||||
def _setup_version_attributes(self):
|
||||
if self.server_version_info[0] not in list(range(8, 17)):
|
||||
# FreeTDS with version 4.2 seems to report here
|
||||
# a number like "95.10.255". Don't know what
|
||||
# that is. So emit warning.
|
||||
# Use TDS Version 7.0 through 7.3, per the MS information here:
|
||||
# https://msdn.microsoft.com/en-us/library/dd339982.aspx
|
||||
# and FreeTDS information here (7.3 highest supported version):
|
||||
# http://www.freetds.org/userguide/choosingtdsprotocol.htm
|
||||
util.warn(
|
||||
"Unrecognized server version info '%s'. Version specific "
|
||||
"behaviors may not function properly. If using ODBC "
|
||||
"with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, "
|
||||
"is configured in the FreeTDS configuration." %
|
||||
"with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not "
|
||||
"4.2, is configured in the FreeTDS configuration." %
|
||||
".".join(str(x) for x in self.server_version_info))
|
||||
if self.server_version_info >= MS_2005_VERSION and \
|
||||
'implicit_returning' not in self.__dict__:
|
||||
self.implicit_returning = True
|
||||
if self.server_version_info >= MS_2008_VERSION:
|
||||
self.supports_multivalues_insert = True
|
||||
if self.deprecate_large_types is None:
|
||||
self.deprecate_large_types = \
|
||||
self.server_version_info >= MS_2012_VERSION
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
if self.server_version_info < MS_2005_VERSION:
|
||||
|
|
@ -1573,12 +1779,11 @@ class MSDialect(default.DefaultDialect):
|
|||
if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
|
||||
MSNText, MSBinary, MSVarBinary,
|
||||
sqltypes.LargeBinary):
|
||||
if charlen == -1:
|
||||
charlen = 'max'
|
||||
kwargs['length'] = charlen
|
||||
if collation:
|
||||
kwargs['collation'] = collation
|
||||
if coltype == MSText or \
|
||||
(coltype in (MSString, MSNVarchar) and charlen == -1):
|
||||
kwargs.pop('length')
|
||||
|
||||
if coltype is None:
|
||||
util.warn(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/information_schema.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/mxodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/pymssql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -46,11 +46,12 @@ class MSDialect_pymssql(MSDialect):
|
|||
@classmethod
|
||||
def dbapi(cls):
|
||||
module = __import__('pymssql')
|
||||
# pymmsql doesn't have a Binary method. we use string
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
|
||||
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
|
||||
|
||||
if client_ver < (1, ):
|
||||
util.warn("The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI.")
|
||||
|
|
@ -63,7 +64,7 @@ class MSDialect_pymssql(MSDialect):
|
|||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version")
|
||||
m = re.match(
|
||||
r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
|
|
@ -84,7 +85,8 @@ class MSDialect_pymssql(MSDialect):
|
|||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed"
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -12,74 +12,57 @@
|
|||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
Additional Connection Examples
|
||||
-------------------------------
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
Examples of pyodbc connection string URLs:
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``.
|
||||
The connection string that is created will appear like::
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
dsn=mydsn;Trusted_Connection=Yes
|
||||
A DSN-based connection is **preferred** overall when using ODBC. A
|
||||
basic DSN-based connection looks like::
|
||||
|
||||
* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named
|
||||
``mydsn`` passing in the ``UID`` and ``PWD`` information. The
|
||||
connection string that is created will appear like::
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC::
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass
|
||||
|
||||
* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects
|
||||
using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD``
|
||||
information, plus the additional connection configuration option
|
||||
``LANGUAGE``. The connection string that is created will appear
|
||||
like::
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection
|
||||
that would appear like::
|
||||
Hostname-based connections are **not preferred**, however are supported.
|
||||
The ODBC driver name must be explicitly specified::
|
||||
|
||||
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0")
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
|
||||
string which includes the port
|
||||
information using the comma syntax. This will create the following
|
||||
connection string::
|
||||
.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the
|
||||
SQL Server driver name specified explicitly. SQLAlchemy cannot
|
||||
choose an optimal default here as it varies based on platform
|
||||
and installed drivers.
|
||||
|
||||
DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
|
||||
Other keywords interpreted by the Pyodbc dialect to be passed to
|
||||
``pyodbc.connect()`` in both the DSN and hostname cases include:
|
||||
``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``.
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection
|
||||
string that includes the port
|
||||
information as a separate ``port`` keyword. This will create the
|
||||
following connection string::
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
|
||||
A PyODBC connection string can also be sent exactly as specified in
|
||||
`ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_
|
||||
into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however,
|
||||
as illustrated below using ``urllib.quote_plus``::
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a
|
||||
connection string that includes a custom ODBC driver name. This will create
|
||||
the following connection string::
|
||||
import urllib
|
||||
params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password")
|
||||
|
||||
DRIVER={MyDriver};Server=host;Database=db;UID=user;PWD=pass
|
||||
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
|
||||
|
||||
If you require a connection string that is outside the options
|
||||
presented above, use the ``odbc_connect`` keyword to pass in a
|
||||
urlencoded connection string. What gets passed in will be urldecoded
|
||||
and passed directly.
|
||||
|
||||
For example::
|
||||
|
||||
mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb
|
||||
|
||||
would create the following connection string::
|
||||
|
||||
dsn=mydsn;Database=db
|
||||
|
||||
Encoding your connection string can be easily accomplished through
|
||||
the python shell. For example::
|
||||
|
||||
>>> import urllib
|
||||
>>> urllib.quote_plus('dsn=mydsn;Database=db')
|
||||
'dsn%3Dmydsn%3BDatabase%3Ddb'
|
||||
|
||||
Unicode Binds
|
||||
-------------
|
||||
|
|
@ -112,7 +95,7 @@ for unix + PyODBC.
|
|||
|
||||
"""
|
||||
|
||||
from .base import MSExecutionContext, MSDialect
|
||||
from .base import MSExecutionContext, MSDialect, VARBINARY
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import types as sqltypes, util
|
||||
import decimal
|
||||
|
|
@ -191,6 +174,22 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
|||
pass
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(VARBINARY):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
|
|
@ -243,13 +242,13 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
|||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
pyodbc_driver_name = 'SQL Server'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,6 +13,8 @@
|
|||
[?key=value&key=value...]
|
||||
:driverurl: http://jtds.sourceforge.net/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/cymysql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/gaerdbms.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -17,6 +17,13 @@ developers-guide
|
|||
|
||||
.. versionadded:: 0.7.8
|
||||
|
||||
.. deprecated:: 1.0 This dialect is **no longer necessary** for
|
||||
Google Cloud SQL; the MySQLdb dialect can be used directly.
|
||||
Cloud SQL now recommends creating connections via the
|
||||
mysql dialect using the URL format
|
||||
|
||||
``mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>``
|
||||
|
||||
|
||||
Pooling
|
||||
-------
|
||||
|
|
@ -33,6 +40,7 @@ import os
|
|||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...pool import NullPool
|
||||
import re
|
||||
from sqlalchemy.util import warn_deprecated
|
||||
|
||||
|
||||
def _is_dev_environment():
|
||||
|
|
@ -43,6 +51,14 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
|
|||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
||||
warn_deprecated(
|
||||
"Google Cloud SQL now recommends creating connections via the "
|
||||
"MySQLdb dialect directly, using the URL format "
|
||||
"mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/"
|
||||
"<projectid>:<instancename>"
|
||||
)
|
||||
|
||||
# from django:
|
||||
# http://code.google.com/p/googleappengine/source/
|
||||
# browse/trunk/python/google/storage/speckle/
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/mysqlconnector.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -14,6 +14,12 @@
|
|||
:url: http://dev.mysql.com/downloads/connector/python/
|
||||
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
|
|
@ -21,6 +27,7 @@ from .base import (MySQLDialect, MySQLExecutionContext,
|
|||
BIT)
|
||||
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
||||
|
|
@ -31,18 +38,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
|||
|
||||
class MySQLCompiler_mysqlconnector(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
else:
|
||||
return self.process(binary.left, **kw) + " % " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
def escape_literal_column(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return value.replace("%", "%%")
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class _myconnpyBIT(BIT):
|
||||
|
|
@ -55,8 +78,6 @@ class _myconnpyBIT(BIT):
|
|||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = 'mysqlconnector'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
|
|
@ -77,6 +98,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def supports_unicode_statements(self):
|
||||
return util.py3k or self._mysqlconnector_version_info > (2, 0)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from mysql import connector
|
||||
|
|
@ -89,8 +114,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
|
||||
util.coerce_kw_type(opts, 'buffered', bool)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
|
||||
# unfortunately, MySQL/connector python refuses to release a
|
||||
# cursor without reading fully, so non-buffered isn't an option
|
||||
opts.setdefault('buffered', True)
|
||||
opts.setdefault('raise_on_warnings', True)
|
||||
|
||||
# FOUND_ROWS must be set in ClientFlag to enable
|
||||
# supports_sane_rowcount.
|
||||
|
|
@ -101,10 +128,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
'client_flags', ClientFlag.get_default())
|
||||
client_flags |= ClientFlag.FOUND_ROWS
|
||||
opts['client_flags'] = client_flags
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return [[], opts]
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_version_info(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_double_percents(self):
|
||||
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = dbapi_con.get_server_version()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/mysqldb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,78 +13,101 @@
|
|||
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
.. _mysqldb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
MySQLdb requires a "charset" parameter to be passed in order for it
|
||||
to handle non-ASCII characters correctly. When this parameter is passed,
|
||||
MySQLdb will also implicitly set the "use_unicode" flag to true, which means
|
||||
that it will return Python unicode objects instead of bytestrings.
|
||||
However, SQLAlchemy's decode process, when C extensions are enabled,
|
||||
is orders of magnitude faster than that of MySQLdb as it does not call into
|
||||
Python functions to do so. Therefore, the **recommended URL to use for
|
||||
unicode** will include both charset and use_unicode=0::
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
create_engine("mysql+mysqldb://user:pass@host/dbname?charset=utf8&use_unicode=0")
|
||||
Py3K Support
|
||||
------------
|
||||
|
||||
As of this writing, MySQLdb only runs on Python 2. It is not known how
|
||||
MySQLdb behaves on Python 3 as far as unicode decoding.
|
||||
Currently, MySQLdb only runs on Python 2 and development has been stopped.
|
||||
`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well
|
||||
as some bugfixes.
|
||||
|
||||
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
|
||||
|
||||
Known Issues
|
||||
-------------
|
||||
Using MySQLdb with Google Cloud SQL
|
||||
-----------------------------------
|
||||
|
||||
MySQL-python version 1.2.2 has a serious memory leak related
|
||||
to unicode conversion, a feature which is disabled via ``use_unicode=0``.
|
||||
It is strongly advised to use the latest version of MySQL-Python.
|
||||
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
|
||||
using a URL like the following::
|
||||
|
||||
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer)
|
||||
from ...connectors.mysqldb import (
|
||||
MySQLDBExecutionContext,
|
||||
MySQLDBCompiler,
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLDBConnector
|
||||
)
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(
|
||||
MySQLDBExecutionContext,
|
||||
MySQLExecutionContext):
|
||||
pass
|
||||
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if hasattr(self, '_rowcount'):
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
|
||||
class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler):
|
||||
pass
|
||||
class MySQLCompiler_mysqldb(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqldb(
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLIdentifierPreparer):
|
||||
pass
|
||||
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
|
||||
class MySQLDialect_mysqldb(MySQLDialect):
|
||||
driver = 'mysqldb'
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqldb
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer_mysqldb
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('MySQLdb')
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8_bin collation and unicode returns
|
||||
|
||||
has_utf8_bin = connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation")
|
||||
))
|
||||
has_utf8_bin = self.server_version_info > (5, ) and \
|
||||
connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation")
|
||||
))
|
||||
if has_utf8_bin:
|
||||
additional_tests = [
|
||||
sql.collate(sql.cast(
|
||||
|
|
@ -94,7 +117,82 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
|
|||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super(MySQLDBConnector, self)._check_unicode_returns(
|
||||
return super(MySQLDialect_mysqldb, self)._check_unicode_returns(
|
||||
connection, additional_tests)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'read_timeout', int)
|
||||
util.coerce_kw_type(opts, 'client_flag', int)
|
||||
util.coerce_kw_type(opts, 'local_infile', int)
|
||||
# Note: using either of the below will cause all strings to be
|
||||
# returned as Unicode, both in raw SQL operations and with column
|
||||
# types like String and MSString.
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
|
||||
for key in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get('client_flag', 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + '.constants.CLIENT'
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts['client_flag'] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile('[.\-]')
|
||||
for n in r.split(dbapi_con.get_server_info()):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
else:
|
||||
return cset_name()
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/oursql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -16,22 +16,10 @@
|
|||
Unicode
|
||||
-------
|
||||
|
||||
oursql defaults to using ``utf8`` as the connection charset, but other
|
||||
encodings may be used instead. Like the MySQL-Python driver, unicode support
|
||||
can be completely disabled::
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
# oursql sets the connection charset to utf8 automatically; all strings come
|
||||
# back as utf8 str
|
||||
create_engine('mysql+oursql:///mydb?use_unicode=0')
|
||||
|
||||
To not automatically use ``utf8`` and instead use whatever the connection
|
||||
defaults to, there is a separate parameter::
|
||||
|
||||
# use the default connection charset; all strings come back as unicode
|
||||
create_engine('mysql+oursql:///mydb?default_charset=1')
|
||||
|
||||
# use latin1 as the connection charset; all strings come back as unicode
|
||||
create_engine('mysql+oursql:///mydb?charset=latin1')
|
||||
"""
|
||||
|
||||
import re
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/pymysql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -12,7 +12,13 @@
|
|||
:dbapi: pymysql
|
||||
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: http://code.google.com/p/pymysql/
|
||||
:url: http://www.pymysql.org/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
MySQL-Python Compatibility
|
||||
--------------------------
|
||||
|
|
@ -31,8 +37,12 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
|||
driver = 'pymysql'
|
||||
|
||||
description_encoding = None
|
||||
if py3k:
|
||||
supports_unicode_statements = True
|
||||
|
||||
# generally, these two values should be both True
|
||||
# or both False. PyMySQL unicode tests pass all the way back
|
||||
# to 0.4 either way. See [ticket:3337]
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -14,14 +14,11 @@
|
|||
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
|
||||
The mysql-pyodbc dialect is subject to unresolved character encoding issues
|
||||
which exist within the current ODBC drivers available.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage
|
||||
of OurSQL, MySQLdb, or MySQL-connector/Python.
|
||||
.. note:: The PyODBC for MySQL dialect is not well supported, and
|
||||
is subject to unresolved character encoding issues
|
||||
which exist within the current ODBC drivers available.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25).
|
||||
Other dialects for MySQL are recommended.
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -14,6 +14,9 @@
|
|||
<database>
|
||||
:driverurl: http://dev.mysql.com/downloads/connector/j/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
Character Sets
|
||||
--------------
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -213,15 +213,81 @@ is reflected and the type is reported as ``DATE``, the time-supporting
|
|||
examining the type of column for use in special Python translations or
|
||||
for migrating schemas to other database backends.
|
||||
|
||||
.. _oracle_table_options:
|
||||
|
||||
Oracle Table Options
|
||||
-------------------------
|
||||
|
||||
The CREATE TABLE phrase supports the following options with Oracle
|
||||
in conjunction with the :class:`.Table` construct:
|
||||
|
||||
|
||||
* ``ON COMMIT``::
|
||||
|
||||
Table(
|
||||
"some_table", metadata, ...,
|
||||
prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS')
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
* ``COMPRESS``::
|
||||
|
||||
Table('mytable', metadata, Column('data', String(32)),
|
||||
oracle_compress=True)
|
||||
|
||||
Table('mytable', metadata, Column('data', String(32)),
|
||||
oracle_compress=6)
|
||||
|
||||
The ``oracle_compress`` parameter accepts either an integer compression
|
||||
level, or ``True`` to use the default compression level.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _oracle_index_options:
|
||||
|
||||
Oracle Specific Index Options
|
||||
-----------------------------
|
||||
|
||||
Bitmap Indexes
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
You can specify the ``oracle_bitmap`` parameter to create a bitmap index
|
||||
instead of a B-tree index::
|
||||
|
||||
Index('my_index', my_table.c.data, oracle_bitmap=True)
|
||||
|
||||
Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not
|
||||
check for such limitations, only the database will.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
Index compression
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Oracle has a more efficient storage mode for indexes containing lots of
|
||||
repeated values. Use the ``oracle_compress`` parameter to turn on key c
|
||||
ompression::
|
||||
|
||||
Index('my_index', my_table.c.data, oracle_compress=True)
|
||||
|
||||
Index('my_index', my_table.c.data1, my_table.c.data2, unique=True,
|
||||
oracle_compress=1)
|
||||
|
||||
The ``oracle_compress`` parameter accepts either an integer specifying the
|
||||
number of prefix columns to compress, or ``True`` to use the default (all
|
||||
columns for non-unique indexes, all but the last column for unique indexes).
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from sqlalchemy import util, sql
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy.engine import default, reflection
|
||||
from sqlalchemy.sql import compiler, visitors, expression
|
||||
from sqlalchemy.sql import (operators as sql_operators,
|
||||
functions as sql_functions)
|
||||
from sqlalchemy.sql import operators as sql_operators
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
from sqlalchemy import types as sqltypes, schema as sa_schema
|
||||
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
|
||||
BLOB, CLOB, TIMESTAMP, FLOAT
|
||||
|
|
@ -300,7 +366,6 @@ class LONG(sqltypes.Text):
|
|||
|
||||
|
||||
class DATE(sqltypes.DateTime):
|
||||
|
||||
"""Provide the oracle DATE type.
|
||||
|
||||
This type has no special Python behavior, except that it subclasses
|
||||
|
|
@ -349,7 +414,6 @@ class INTERVAL(sqltypes.TypeEngine):
|
|||
|
||||
|
||||
class ROWID(sqltypes.TypeEngine):
|
||||
|
||||
"""Oracle ROWID type.
|
||||
|
||||
When used in a cast() or similar, generates ROWID.
|
||||
|
|
@ -359,7 +423,6 @@ class ROWID(sqltypes.TypeEngine):
|
|||
|
||||
|
||||
class _OracleBoolean(sqltypes.Boolean):
|
||||
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NUMBER
|
||||
|
||||
|
|
@ -395,19 +458,19 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
# Oracle does not allow milliseconds in DATE
|
||||
# Oracle does not support TIME columns
|
||||
|
||||
def visit_datetime(self, type_):
|
||||
return self.visit_DATE(type_)
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_DATE(type_, **kw)
|
||||
|
||||
def visit_float(self, type_):
|
||||
return self.visit_FLOAT(type_)
|
||||
def visit_float(self, type_, **kw):
|
||||
return self.visit_FLOAT(type_, **kw)
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
def visit_unicode(self, type_, **kw):
|
||||
if self.dialect._supports_nchar:
|
||||
return self.visit_NVARCHAR2(type_)
|
||||
return self.visit_NVARCHAR2(type_, **kw)
|
||||
else:
|
||||
return self.visit_VARCHAR2(type_)
|
||||
return self.visit_VARCHAR2(type_, **kw)
|
||||
|
||||
def visit_INTERVAL(self, type_):
|
||||
def visit_INTERVAL(self, type_, **kw):
|
||||
return "INTERVAL DAY%s TO SECOND%s" % (
|
||||
type_.day_precision is not None and
|
||||
"(%d)" % type_.day_precision or
|
||||
|
|
@ -417,22 +480,22 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
"",
|
||||
)
|
||||
|
||||
def visit_LONG(self, type_):
|
||||
def visit_LONG(self, type_, **kw):
|
||||
return "LONG"
|
||||
|
||||
def visit_TIMESTAMP(self, type_):
|
||||
def visit_TIMESTAMP(self, type_, **kw):
|
||||
if type_.timezone:
|
||||
return "TIMESTAMP WITH TIME ZONE"
|
||||
else:
|
||||
return "TIMESTAMP"
|
||||
|
||||
def visit_DOUBLE_PRECISION(self, type_):
|
||||
return self._generate_numeric(type_, "DOUBLE PRECISION")
|
||||
def visit_DOUBLE_PRECISION(self, type_, **kw):
|
||||
return self._generate_numeric(type_, "DOUBLE PRECISION", **kw)
|
||||
|
||||
def visit_NUMBER(self, type_, **kw):
|
||||
return self._generate_numeric(type_, "NUMBER", **kw)
|
||||
|
||||
def _generate_numeric(self, type_, name, precision=None, scale=None):
|
||||
def _generate_numeric(self, type_, name, precision=None, scale=None, **kw):
|
||||
if precision is None:
|
||||
precision = type_.precision
|
||||
|
||||
|
|
@ -448,17 +511,17 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
n = "%(name)s(%(precision)s, %(scale)s)"
|
||||
return n % {'name': name, 'precision': precision, 'scale': scale}
|
||||
|
||||
def visit_string(self, type_):
|
||||
return self.visit_VARCHAR2(type_)
|
||||
def visit_string(self, type_, **kw):
|
||||
return self.visit_VARCHAR2(type_, **kw)
|
||||
|
||||
def visit_VARCHAR2(self, type_):
|
||||
def visit_VARCHAR2(self, type_, **kw):
|
||||
return self._visit_varchar(type_, '', '2')
|
||||
|
||||
def visit_NVARCHAR2(self, type_):
|
||||
def visit_NVARCHAR2(self, type_, **kw):
|
||||
return self._visit_varchar(type_, 'N', '2')
|
||||
visit_NVARCHAR = visit_NVARCHAR2
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
return self._visit_varchar(type_, '', '')
|
||||
|
||||
def _visit_varchar(self, type_, n, num):
|
||||
|
|
@ -471,36 +534,35 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
varchar = "%(n)sVARCHAR%(two)s(%(length)s)"
|
||||
return varchar % {'length': type_.length, 'two': num, 'n': n}
|
||||
|
||||
def visit_text(self, type_):
|
||||
return self.visit_CLOB(type_)
|
||||
def visit_text(self, type_, **kw):
|
||||
return self.visit_CLOB(type_, **kw)
|
||||
|
||||
def visit_unicode_text(self, type_):
|
||||
def visit_unicode_text(self, type_, **kw):
|
||||
if self.dialect._supports_nchar:
|
||||
return self.visit_NCLOB(type_)
|
||||
return self.visit_NCLOB(type_, **kw)
|
||||
else:
|
||||
return self.visit_CLOB(type_)
|
||||
return self.visit_CLOB(type_, **kw)
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return self.visit_BLOB(type_)
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_BLOB(type_, **kw)
|
||||
|
||||
def visit_big_integer(self, type_):
|
||||
return self.visit_NUMBER(type_, precision=19)
|
||||
def visit_big_integer(self, type_, **kw):
|
||||
return self.visit_NUMBER(type_, precision=19, **kw)
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
return self.visit_SMALLINT(type_)
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_RAW(self, type_):
|
||||
def visit_RAW(self, type_, **kw):
|
||||
if type_.length:
|
||||
return "RAW(%(length)s)" % {'length': type_.length}
|
||||
else:
|
||||
return "RAW"
|
||||
|
||||
def visit_ROWID(self, type_):
|
||||
def visit_ROWID(self, type_, **kw):
|
||||
return "ROWID"
|
||||
|
||||
|
||||
class OracleCompiler(compiler.SQLCompiler):
|
||||
|
||||
"""Oracle compiler modifies the lexical structure of Select
|
||||
statements to work under non-ANSI configured Oracle databases, if
|
||||
the use_ansi flag is False.
|
||||
|
|
@ -538,6 +600,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
def visit_false(self, expr, **kw):
|
||||
return '0'
|
||||
|
||||
def get_cte_preamble(self, recursive):
|
||||
return "WITH"
|
||||
|
||||
def get_select_hint_text(self, byfroms):
|
||||
return " ".join(
|
||||
"/*+ %s */" % text for table, text in byfroms.items()
|
||||
|
|
@ -601,29 +666,17 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
else:
|
||||
return sql.and_(*clauses)
|
||||
|
||||
def visit_outer_join_column(self, vc):
|
||||
return self.process(vc.column) + "(+)"
|
||||
def visit_outer_join_column(self, vc, **kw):
|
||||
return self.process(vc.column, **kw) + "(+)"
|
||||
|
||||
def visit_sequence(self, seq):
|
||||
return (self.dialect.identifier_preparer.format_sequence(seq) +
|
||||
".nextval")
|
||||
|
||||
def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
|
||||
"""Oracle doesn't like ``FROM table AS alias``. Is the AS standard
|
||||
SQL??
|
||||
"""
|
||||
def get_render_as_alias_suffix(self, alias_name_text):
|
||||
"""Oracle doesn't like ``FROM table AS alias``"""
|
||||
|
||||
if asfrom or ashint:
|
||||
alias_name = isinstance(alias.name, expression._truncated_label) and \
|
||||
self._truncated_identifier("alias", alias.name) or alias.name
|
||||
|
||||
if ashint:
|
||||
return alias_name
|
||||
elif asfrom:
|
||||
return self.process(alias.original, asfrom=asfrom, **kwargs) + \
|
||||
" " + self.preparer.format_alias(alias, alias_name)
|
||||
else:
|
||||
return self.process(alias.original, **kwargs)
|
||||
return " " + alias_name_text
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
columns = []
|
||||
|
|
@ -640,8 +693,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
self.bindparam_string(self._truncate_bindparam(outparam)))
|
||||
columns.append(
|
||||
self.process(col_expr, within_columns_clause=False))
|
||||
self.result_map[outparam.key] = (
|
||||
outparam.key,
|
||||
|
||||
self._add_to_result_map(
|
||||
outparam.key, outparam.key,
|
||||
(column, getattr(column, 'name', None),
|
||||
getattr(column, 'key', None)),
|
||||
column.type
|
||||
|
|
@ -669,9 +723,11 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
select = select.where(whereclause)
|
||||
select._oracle_visit = True
|
||||
|
||||
if select._limit is not None or select._offset is not None:
|
||||
limit_clause = select._limit_clause
|
||||
offset_clause = select._offset_clause
|
||||
if limit_clause is not None or offset_clause is not None:
|
||||
# See http://www.oracle.com/technology/oramag/oracle/06-sep/\
|
||||
# o56asktom.html
|
||||
# o56asktom.html
|
||||
#
|
||||
# Generalized form of an Oracle pagination query:
|
||||
# select ... from (
|
||||
|
|
@ -682,13 +738,15 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
# Outer select and "ROWNUM as ora_rn" can be dropped if
|
||||
# limit=0
|
||||
|
||||
# TODO: use annotations instead of clone + attr set ?
|
||||
kwargs['select_wraps_for'] = select
|
||||
select = select._generate()
|
||||
select._oracle_visit = True
|
||||
|
||||
# Wrap the middle select and add the hint
|
||||
limitselect = sql.select([c for c in select.c])
|
||||
if select._limit and self.dialect.optimize_limits:
|
||||
if limit_clause is not None and \
|
||||
self.dialect.optimize_limits and \
|
||||
select._simple_int_limit:
|
||||
limitselect = limitselect.prefix_with(
|
||||
"/*+ FIRST_ROWS(%d) */" %
|
||||
select._limit)
|
||||
|
|
@ -697,17 +755,24 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
limitselect._is_wrapper = True
|
||||
|
||||
# If needed, add the limiting clause
|
||||
if select._limit is not None:
|
||||
max_row = select._limit
|
||||
if select._offset is not None:
|
||||
max_row += select._offset
|
||||
if limit_clause is not None:
|
||||
if not self.dialect.use_binds_for_limits:
|
||||
# use simple int limits, will raise an exception
|
||||
# if the limit isn't specified this way
|
||||
max_row = select._limit
|
||||
|
||||
if offset_clause is not None:
|
||||
max_row += select._offset
|
||||
max_row = sql.literal_column("%d" % max_row)
|
||||
else:
|
||||
max_row = limit_clause
|
||||
if offset_clause is not None:
|
||||
max_row = max_row + offset_clause
|
||||
limitselect.append_whereclause(
|
||||
sql.literal_column("ROWNUM") <= max_row)
|
||||
|
||||
# If needed, add the ora_rn, and wrap again with offset.
|
||||
if select._offset is None:
|
||||
if offset_clause is None:
|
||||
limitselect._for_update_arg = select._for_update_arg
|
||||
select = limitselect
|
||||
else:
|
||||
|
|
@ -721,22 +786,21 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
offsetselect._oracle_visit = True
|
||||
offsetselect._is_wrapper = True
|
||||
|
||||
offset_value = select._offset
|
||||
if not self.dialect.use_binds_for_limits:
|
||||
offset_value = sql.literal_column("%d" % offset_value)
|
||||
offset_clause = sql.literal_column(
|
||||
"%d" % select._offset)
|
||||
offsetselect.append_whereclause(
|
||||
sql.literal_column("ora_rn") > offset_value)
|
||||
sql.literal_column("ora_rn") > offset_clause)
|
||||
|
||||
offsetselect._for_update_arg = select._for_update_arg
|
||||
select = offsetselect
|
||||
|
||||
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
|
||||
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
return ""
|
||||
|
||||
def for_update_clause(self, select):
|
||||
def for_update_clause(self, select, **kw):
|
||||
if self.is_subquery():
|
||||
return ""
|
||||
|
||||
|
|
@ -744,7 +808,7 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
|
||||
if select._for_update_arg.of:
|
||||
tmp += ' OF ' + ', '.join(
|
||||
self.process(elem) for elem in
|
||||
self.process(elem, **kw) for elem in
|
||||
select._for_update_arg.of
|
||||
)
|
||||
|
||||
|
|
@ -773,15 +837,57 @@ class OracleDDLCompiler(compiler.DDLCompiler):
|
|||
|
||||
return text
|
||||
|
||||
def visit_create_index(self, create, **kw):
|
||||
return super(OracleDDLCompiler, self).\
|
||||
visit_create_index(create, include_schema=True)
|
||||
def visit_create_index(self, create):
|
||||
index = create.element
|
||||
self._verify_index_table(index)
|
||||
preparer = self.preparer
|
||||
text = "CREATE "
|
||||
if index.unique:
|
||||
text += "UNIQUE "
|
||||
if index.dialect_options['oracle']['bitmap']:
|
||||
text += "BITMAP "
|
||||
text += "INDEX %s ON %s (%s)" % (
|
||||
self._prepared_index_name(index, include_schema=True),
|
||||
preparer.format_table(index.table, use_schema=True),
|
||||
', '.join(
|
||||
self.sql_compiler.process(
|
||||
expr,
|
||||
include_table=False, literal_binds=True)
|
||||
for expr in index.expressions)
|
||||
)
|
||||
if index.dialect_options['oracle']['compress'] is not False:
|
||||
if index.dialect_options['oracle']['compress'] is True:
|
||||
text += " COMPRESS"
|
||||
else:
|
||||
text += " COMPRESS %d" % (
|
||||
index.dialect_options['oracle']['compress']
|
||||
)
|
||||
return text
|
||||
|
||||
def post_create_table(self, table):
|
||||
table_opts = []
|
||||
opts = table.dialect_options['oracle']
|
||||
|
||||
if opts['on_commit']:
|
||||
on_commit_options = opts['on_commit'].replace("_", " ").upper()
|
||||
table_opts.append('\n ON COMMIT %s' % on_commit_options)
|
||||
|
||||
if opts['compress']:
|
||||
if opts['compress'] is True:
|
||||
table_opts.append("\n COMPRESS")
|
||||
else:
|
||||
table_opts.append("\n COMPRESS FOR %s" % (
|
||||
opts['compress']
|
||||
))
|
||||
|
||||
return ''.join(table_opts)
|
||||
|
||||
|
||||
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
|
||||
reserved_words = set([x.lower() for x in RESERVED_WORDS])
|
||||
illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
|
||||
illegal_initial_characters = set(
|
||||
(str(dig) for dig in range(0, 10))).union(["_", "$"])
|
||||
|
||||
def _bindparam_requires_quotes(self, value):
|
||||
"""Return True if the given identifier requires quoting."""
|
||||
|
|
@ -798,7 +904,6 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer):
|
|||
|
||||
|
||||
class OracleExecutionContext(default.DefaultExecutionContext):
|
||||
|
||||
def fire_sequence(self, seq, type_):
|
||||
return self._execute_scalar(
|
||||
"SELECT " +
|
||||
|
|
@ -815,6 +920,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_simple_order_by_label = False
|
||||
|
||||
supports_sequences = True
|
||||
sequences_optional = False
|
||||
postfetch_lastrowid = False
|
||||
|
|
@ -836,7 +943,15 @@ class OracleDialect(default.DefaultDialect):
|
|||
reflection_options = ('oracle_resolve_synonyms', )
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.Table, {"resolve_synonyms": False})
|
||||
(sa_schema.Table, {
|
||||
"resolve_synonyms": False,
|
||||
"on_commit": None,
|
||||
"compress": False
|
||||
}),
|
||||
(sa_schema.Index, {
|
||||
"bitmap": False,
|
||||
"compress": False
|
||||
})
|
||||
]
|
||||
|
||||
def __init__(self,
|
||||
|
|
@ -866,6 +981,16 @@ class OracleDialect(default.DefaultDialect):
|
|||
return self.server_version_info and \
|
||||
self.server_version_info < (9, )
|
||||
|
||||
@property
|
||||
def _supports_table_compression(self):
|
||||
return self.server_version_info and \
|
||||
self.server_version_info >= (9, 2, )
|
||||
|
||||
@property
|
||||
def _supports_table_compress_for(self):
|
||||
return self.server_version_info and \
|
||||
self.server_version_info >= (11, )
|
||||
|
||||
@property
|
||||
def _supports_char_length(self):
|
||||
return not self._is_oracle_8
|
||||
|
|
@ -908,6 +1033,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
if name.upper() == name and not \
|
||||
self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.lower()
|
||||
elif name.lower() == name:
|
||||
return quoted_name(name, quote=True)
|
||||
else:
|
||||
return name
|
||||
|
||||
|
|
@ -1023,7 +1150,21 @@ class OracleDialect(default.DefaultDialect):
|
|||
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
|
||||
"('SYSTEM', 'SYSAUX') "
|
||||
"AND OWNER = :owner "
|
||||
"AND IOT_NAME IS NULL")
|
||||
"AND IOT_NAME IS NULL "
|
||||
"AND DURATION IS NULL")
|
||||
cursor = connection.execute(s, owner=schema)
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
@reflection.cache
|
||||
def get_temp_table_names(self, connection, **kw):
|
||||
schema = self.denormalize_name(self.default_schema_name)
|
||||
s = sql.text(
|
||||
"SELECT table_name FROM all_tables "
|
||||
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
|
||||
"('SYSTEM', 'SYSAUX') "
|
||||
"AND OWNER = :owner "
|
||||
"AND IOT_NAME IS NULL "
|
||||
"AND DURATION IS NOT NULL")
|
||||
cursor = connection.execute(s, owner=schema)
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
|
|
@ -1034,6 +1175,50 @@ class OracleDialect(default.DefaultDialect):
|
|||
cursor = connection.execute(s, owner=self.denormalize_name(schema))
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
@reflection.cache
|
||||
def get_table_options(self, connection, table_name, schema=None, **kw):
|
||||
options = {}
|
||||
|
||||
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
|
||||
dblink = kw.get('dblink', '')
|
||||
info_cache = kw.get('info_cache')
|
||||
|
||||
(table_name, schema, dblink, synonym) = \
|
||||
self._prepare_reflection_args(connection, table_name, schema,
|
||||
resolve_synonyms, dblink,
|
||||
info_cache=info_cache)
|
||||
|
||||
params = {"table_name": table_name}
|
||||
|
||||
columns = ["table_name"]
|
||||
if self._supports_table_compression:
|
||||
columns.append("compression")
|
||||
if self._supports_table_compress_for:
|
||||
columns.append("compress_for")
|
||||
|
||||
text = "SELECT %(columns)s "\
|
||||
"FROM ALL_TABLES%(dblink)s "\
|
||||
"WHERE table_name = :table_name"
|
||||
|
||||
if schema is not None:
|
||||
params['owner'] = schema
|
||||
text += " AND owner = :owner "
|
||||
text = text % {'dblink': dblink, 'columns': ", ".join(columns)}
|
||||
|
||||
result = connection.execute(sql.text(text), **params)
|
||||
|
||||
enabled = dict(DISABLED=False, ENABLED=True)
|
||||
|
||||
row = result.first()
|
||||
if row:
|
||||
if "compression" in row and enabled.get(row.compression, False):
|
||||
if "compress_for" in row:
|
||||
options['oracle_compress'] = row.compress_for
|
||||
else:
|
||||
options['oracle_compress'] = True
|
||||
|
||||
return options
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
"""
|
||||
|
|
@ -1119,7 +1304,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
|
||||
params = {'table_name': table_name}
|
||||
text = \
|
||||
"SELECT a.index_name, a.column_name, b.uniqueness "\
|
||||
"SELECT a.index_name, a.column_name, "\
|
||||
"\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\
|
||||
"\nFROM ALL_IND_COLUMNS%(dblink)s a, "\
|
||||
"\nALL_INDEXES%(dblink)s b "\
|
||||
"\nWHERE "\
|
||||
|
|
@ -1145,6 +1331,7 @@ class OracleDialect(default.DefaultDialect):
|
|||
dblink=dblink, info_cache=kw.get('info_cache'))
|
||||
pkeys = pk_constraint['constrained_columns']
|
||||
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
|
||||
enabled = dict(DISABLED=False, ENABLED=True)
|
||||
|
||||
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
|
||||
|
||||
|
|
@ -1164,10 +1351,15 @@ class OracleDialect(default.DefaultDialect):
|
|||
if rset.index_name != last_index_name:
|
||||
remove_if_primary_key(index)
|
||||
index = dict(name=self.normalize_name(rset.index_name),
|
||||
column_names=[])
|
||||
column_names=[], dialect_options={})
|
||||
indexes.append(index)
|
||||
index['unique'] = uniqueness.get(rset.uniqueness, False)
|
||||
|
||||
if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'):
|
||||
index['dialect_options']['oracle_bitmap'] = True
|
||||
if enabled.get(rset.compression, False):
|
||||
index['dialect_options']['oracle_compress'] = rset.prefix_length
|
||||
|
||||
# filter out Oracle SYS_NC names. could also do an outer join
|
||||
# to the all_tab_columns table and check for real col names there.
|
||||
if not oracle_sys_col.match(rset.column_name):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/cx_oracle.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -61,6 +61,14 @@ on the URL, or as keyword arguments to :func:`.create_engine()` are:
|
|||
Defaults to ``True``. Note that this is the opposite default of the
|
||||
cx_Oracle DBAPI itself.
|
||||
|
||||
* ``service_name`` - An option to use connection string (DSN) with
|
||||
``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database``
|
||||
part is given.
|
||||
E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr``
|
||||
is a valid url. This value is only available as a URL query string argument.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _cx_oracle_unicode:
|
||||
|
||||
Unicode
|
||||
|
|
@ -285,6 +293,7 @@ from .base import OracleCompiler, OracleDialect, OracleExecutionContext
|
|||
from . import base as oracle
|
||||
from ...engine import result as _result
|
||||
from sqlalchemy import types as sqltypes, util, exc, processors
|
||||
from sqlalchemy import util
|
||||
import random
|
||||
import collections
|
||||
import decimal
|
||||
|
|
@ -711,8 +720,10 @@ class OracleDialect_cx_oracle(OracleDialect):
|
|||
# this occurs in tests with mock DBAPIs
|
||||
self._cx_oracle_string_types = set()
|
||||
self._cx_oracle_with_unicode = False
|
||||
elif self.cx_oracle_ver >= (5,) and not \
|
||||
hasattr(self.dbapi, 'UNICODE'):
|
||||
elif util.py3k or (
|
||||
self.cx_oracle_ver >= (5,) and not \
|
||||
hasattr(self.dbapi, 'UNICODE')
|
||||
):
|
||||
# cx_Oracle WITH_UNICODE mode. *only* python
|
||||
# unicode objects accepted for anything
|
||||
self.supports_unicode_statements = True
|
||||
|
|
@ -862,14 +873,26 @@ class OracleDialect_cx_oracle(OracleDialect):
|
|||
util.coerce_kw_type(dialect_opts, opt, bool)
|
||||
setattr(self, opt, dialect_opts[opt])
|
||||
|
||||
if url.database:
|
||||
database = url.database
|
||||
service_name = dialect_opts.get('service_name', None)
|
||||
if database or service_name:
|
||||
# if we have a database, then we have a remote host
|
||||
port = url.port
|
||||
if port:
|
||||
port = int(port)
|
||||
else:
|
||||
port = 1521
|
||||
dsn = self.dbapi.makedsn(url.host, port, url.database)
|
||||
|
||||
if database and service_name:
|
||||
raise exc.InvalidRequestError(
|
||||
'"service_name" option shouldn\'t '
|
||||
'be used with a "database" part of the url')
|
||||
if database:
|
||||
makedsn_kwargs = {'sid': database}
|
||||
if service_name:
|
||||
makedsn_kwargs = {'service_name': service_name}
|
||||
|
||||
dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs)
|
||||
else:
|
||||
# we have a local tnsname
|
||||
dsn = url.host
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -10,8 +10,10 @@
|
|||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: oracle+zxjdbc://user:pass@host/dbname
|
||||
:driverurl: http://www.oracle.com/technology/software/tech/java/\
|
||||
sqlj_jdbc/index.html.
|
||||
:driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
import decimal
|
||||
|
|
@ -68,8 +70,7 @@ class OracleCompiler_zxjdbc(OracleCompiler):
|
|||
expression._select_iterables(returning_cols))
|
||||
|
||||
# within_columns_clause=False so that labels (foo AS bar) don't render
|
||||
columns = [self.process(c, within_columns_clause=False,
|
||||
result_map=self.result_map)
|
||||
columns = [self.process(c, within_columns_clause=False)
|
||||
for c in self.returning_cols]
|
||||
|
||||
if not hasattr(self, 'returning_parameters'):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# dialects/postgres.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
# postgresql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, psycopg2, pg8000, pypostgresql, zxjdbc
|
||||
from . import base, psycopg2, pg8000, pypostgresql, zxjdbc, psycopg2cffi
|
||||
|
||||
base.dialect = psycopg2.dialect
|
||||
|
||||
|
|
@ -13,7 +13,7 @@ from .base import \
|
|||
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
|
||||
INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \
|
||||
DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
|
||||
TSVECTOR
|
||||
TSVECTOR, DropEnumType
|
||||
from .constraints import ExcludeConstraint
|
||||
from .hstore import HSTORE, hstore
|
||||
from .json import JSON, JSONElement, JSONB
|
||||
|
|
@ -26,5 +26,6 @@ __all__ = (
|
|||
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
|
||||
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
|
||||
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement'
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement',
|
||||
'DropEnumType'
|
||||
)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,10 +1,11 @@
|
|||
# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from sqlalchemy.schema import ColumnCollectionConstraint
|
||||
from sqlalchemy.sql import expression
|
||||
from ...sql.schema import ColumnCollectionConstraint
|
||||
from ...sql import expression
|
||||
from ... import util
|
||||
|
||||
|
||||
class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
|
|
@ -48,20 +49,42 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
|
|||
for this constraint.
|
||||
|
||||
"""
|
||||
columns = []
|
||||
render_exprs = []
|
||||
self.operators = {}
|
||||
|
||||
expressions, operators = zip(*elements)
|
||||
|
||||
for (expr, column, strname, add_element), operator in zip(
|
||||
self._extract_col_expression_collection(expressions),
|
||||
operators
|
||||
):
|
||||
if add_element is not None:
|
||||
columns.append(add_element)
|
||||
|
||||
name = column.name if column is not None else strname
|
||||
|
||||
if name is not None:
|
||||
# backwards compat
|
||||
self.operators[name] = operator
|
||||
|
||||
expr = expression._literal_as_text(expr)
|
||||
|
||||
render_exprs.append(
|
||||
(expr, name, operator)
|
||||
)
|
||||
|
||||
self._render_exprs = render_exprs
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*[col for col, op in elements],
|
||||
*columns,
|
||||
name=kw.get('name'),
|
||||
deferrable=kw.get('deferrable'),
|
||||
initially=kw.get('initially')
|
||||
)
|
||||
self.operators = {}
|
||||
for col_or_string, op in elements:
|
||||
name = getattr(col_or_string, 'name', col_or_string)
|
||||
self.operators[name] = op
|
||||
self.using = kw.get('using', 'gist')
|
||||
where = kw.get('where')
|
||||
if where:
|
||||
if where is not None:
|
||||
self.where = expression._literal_as_text(where)
|
||||
|
||||
def copy(self, **kw):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/hstore.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -12,7 +12,7 @@ from .base import ischema_names
|
|||
from ... import types as sqltypes
|
||||
from ...sql.operators import custom_op
|
||||
from ... import sql
|
||||
from ...sql import elements
|
||||
from ...sql import elements, default_comparator
|
||||
from ... import util
|
||||
|
||||
__all__ = ('JSON', 'JSONElement', 'JSONB')
|
||||
|
|
@ -46,7 +46,8 @@ class JSONElement(elements.BinaryExpression):
|
|||
|
||||
self._json_opstring = opstring
|
||||
operator = custom_op(opstring, precedence=5)
|
||||
right = left._check_literal(left, operator, right)
|
||||
right = default_comparator._check_literal(
|
||||
left, operator, right)
|
||||
super(JSONElement, self).__init__(
|
||||
left, right, operator, type_=result_type)
|
||||
|
||||
|
|
@ -77,7 +78,7 @@ class JSONElement(elements.BinaryExpression):
|
|||
|
||||
def cast(self, type_):
|
||||
"""Convert this :class:`.JSONElement` to apply both the 'astext' operator
|
||||
as well as an explicit type cast when evaulated.
|
||||
as well as an explicit type cast when evaluated.
|
||||
|
||||
E.g.::
|
||||
|
||||
|
|
@ -164,6 +165,23 @@ class JSON(sqltypes.TypeEngine):
|
|||
|
||||
__visit_name__ = 'JSON'
|
||||
|
||||
def __init__(self, none_as_null=False):
|
||||
"""Construct a :class:`.JSON` type.
|
||||
|
||||
:param none_as_null: if True, persist the value ``None`` as a
|
||||
SQL NULL value, not the JSON encoding of ``null``. Note that
|
||||
when this flag is False, the :func:`.null` construct can still
|
||||
be used to persist a NULL value::
|
||||
|
||||
from sqlalchemy import null
|
||||
conn.execute(table.insert(), data=null())
|
||||
|
||||
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
|
||||
is now supported in order to persist a NULL value.
|
||||
|
||||
"""
|
||||
self.none_as_null = none_as_null
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
|
|
@ -185,9 +203,17 @@ class JSON(sqltypes.TypeEngine):
|
|||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, elements.Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
return json_serializer(value).encode(encoding)
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, elements.Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
return json_serializer(value)
|
||||
return process
|
||||
|
||||
|
|
@ -197,9 +223,13 @@ class JSON(sqltypes.TypeEngine):
|
|||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
return json_deserializer(value.decode(encoding))
|
||||
else:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
return json_deserializer(value)
|
||||
return process
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/pg8000.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,17 +13,30 @@
|
|||
postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://pythonhosted.org/pg8000/
|
||||
|
||||
|
||||
.. _pg8000_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
When communicating with the server, pg8000 **always uses the server-side
|
||||
character set**. SQLAlchemy has no ability to modify what character set
|
||||
pg8000 chooses to use, and additionally SQLAlchemy does no unicode conversion
|
||||
of any kind with the pg8000 backend. The origin of the client encoding setting
|
||||
is ultimately the CLIENT_ENCODING setting in postgresql.conf.
|
||||
pg8000 will encode / decode string values between it and the server using the
|
||||
PostgreSQL ``client_encoding`` parameter; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
|
||||
It is not necessary, though is also harmless, to pass the "encoding" parameter
|
||||
to :func:`.create_engine` when using pg8000.
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
The ``client_encoding`` can be overriden for a session by executing the SQL:
|
||||
|
||||
SET CLIENT_ENCODING TO 'utf8';
|
||||
|
||||
SQLAlchemy will execute this SQL on all new connections based on the value
|
||||
passed to :func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
|
||||
|
||||
|
||||
.. _pg8000_isolation_level:
|
||||
|
|
@ -58,6 +71,8 @@ from ... import types as sqltypes
|
|||
from .base import (
|
||||
PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext,
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES)
|
||||
import re
|
||||
from sqlalchemy.dialects.postgresql.json import JSON
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
|
|
@ -88,6 +103,15 @@ class _PGNumericNoBind(_PGNumeric):
|
|||
return None
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._dbapi_version > (1, 10, 1):
|
||||
return None # Has native JSON
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class PGExecutionContext_pg8000(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
|
@ -119,7 +143,7 @@ class PGDialect_pg8000(PGDialect):
|
|||
supports_unicode_binds = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = PGExecutionContext_pg8000
|
||||
statement_compiler = PGCompiler_pg8000
|
||||
preparer = PGIdentifierPreparer_pg8000
|
||||
|
|
@ -129,10 +153,29 @@ class PGDialect_pg8000(PGDialect):
|
|||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumericNoBind,
|
||||
sqltypes.Float: _PGNumeric
|
||||
sqltypes.Float: _PGNumeric,
|
||||
JSON: _PGJSON,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, client_encoding=None, **kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
def initialize(self, connection):
|
||||
self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
|
||||
super(PGDialect_pg8000, self).initialize(connection)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
return tuple(
|
||||
[
|
||||
int(x) for x in re.findall(
|
||||
r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)])
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pg8000')
|
||||
|
|
@ -171,4 +214,51 @@ class PGDialect_pg8000(PGDialect):
|
|||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
def set_client_encoding(self, connection, client_encoding):
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'")
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.tpc_begin((0, xid, ''))
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.connection.tpc_prepare()
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_rollback((0, xid, ''))
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_commit((0, xid, ''))
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [row[1] for row in connection.connection.tpc_recover()]
|
||||
|
||||
def on_connect(self):
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
self.set_client_encoding(conn, self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if len(fns) > 0:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = PGDialect_pg8000
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/psycopg2.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by
|
|||
way of enabling this mode on a per-execution basis.
|
||||
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
|
||||
per connection. True by default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_disable_native_unicode`
|
||||
|
||||
* ``isolation_level``: This option, available for all PostgreSQL dialects,
|
||||
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
|
||||
dialect. See :ref:`psycopg2_isolation_level`.
|
||||
dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
|
||||
using psycopg2's ``set_client_encoding()`` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_unicode`
|
||||
|
||||
Unix Domain Connections
|
||||
------------------------
|
||||
|
|
@ -51,12 +66,15 @@ in ``/tmp``, or whatever socket directory was specified when PostgreSQL
|
|||
was built. This value can be overridden by passing a pathname to psycopg2,
|
||||
using ``host`` as an additional keyword argument::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql")
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?\
|
||||
host=/var/lib/postgresql")
|
||||
|
||||
See also:
|
||||
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static\
|
||||
/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\
|
||||
libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
|
||||
.. _psycopg2_execution_options:
|
||||
|
||||
Per-Statement/Connection Execution Options
|
||||
-------------------------------------------
|
||||
|
|
@ -65,18 +83,27 @@ The following DBAPI-specific options are respected when used with
|
|||
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
|
||||
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
|
||||
|
||||
* isolation_level - Set the transaction isolation level for the lifespan of a
|
||||
* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
|
||||
:class:`.Connection` (can only be set on a connection, not a statement
|
||||
or query). See :ref:`psycopg2_isolation_level`.
|
||||
|
||||
* stream_results - Enable or disable usage of psycopg2 server side cursors -
|
||||
* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
|
||||
this feature makes use of "named" cursors in combination with special
|
||||
result handling methods so that result rows are not fully buffered.
|
||||
If ``None`` or not set, the ``server_side_cursors`` option of the
|
||||
:class:`.Engine` is used.
|
||||
|
||||
Unicode
|
||||
-------
|
||||
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
|
||||
specifies the maximum number of rows to buffer at a time. This is
|
||||
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
|
||||
buffer will grow to ultimately store 1000 rows at a time.
|
||||
|
||||
.. versionadded:: 1.0.6
|
||||
|
||||
.. _psycopg2_unicode:
|
||||
|
||||
Unicode with Psycopg2
|
||||
----------------------
|
||||
|
||||
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
|
||||
extension, such that the DBAPI receives and returns all strings as Python
|
||||
|
|
@ -84,27 +111,51 @@ Unicode objects directly - SQLAlchemy passes these values through without
|
|||
change. Psycopg2 here will encode/decode string values based on the
|
||||
current "client encoding" setting; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
Typically, this can be changed to ``utf8``, as a more useful default::
|
||||
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# postgresql.conf file
|
||||
|
||||
# client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
A second way to affect the client encoding is to set it within Psycopg2
|
||||
locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
|
||||
method (see:
|
||||
http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
|
||||
locally. SQLAlchemy will call psycopg2's
|
||||
:meth:`psycopg2:connection.set_client_encoding` method
|
||||
on all new connections based on the value passed to
|
||||
:func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
# set_client_encoding() setting;
|
||||
# works for *all* Postgresql versions
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
client_encoding='utf8')
|
||||
|
||||
This overrides the encoding specified in the Postgresql client configuration.
|
||||
When using the parameter in this way, the psycopg2 driver emits
|
||||
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
|
||||
in all Postgresql versions.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
The psycopg2-specific ``client_encoding`` parameter to
|
||||
:func:`.create_engine`.
|
||||
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
|
||||
is **not the same** as the more recently added ``client_encoding`` parameter
|
||||
now supported by libpq directly. This is enabled when ``client_encoding``
|
||||
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
|
||||
using the :paramref:`.create_engine.connect_args` parameter::
|
||||
|
||||
# libpq direct parameter setting;
|
||||
# only works for Postgresql **9.1 and above**
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
connect_args={'client_encoding': 'utf8'})
|
||||
|
||||
# using the query string is equivalent
|
||||
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
|
||||
|
||||
The above parameter was only added to libpq as of version 9.1 of Postgresql,
|
||||
so using the previous method is better for cross-version support.
|
||||
|
||||
.. _psycopg2_disable_native_unicode:
|
||||
|
||||
Disabling Native Unicode
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
|
|
@ -116,8 +167,56 @@ in and coerce from bytes on the way back,
|
|||
using the value of the :func:`.create_engine` ``encoding`` parameter, which
|
||||
defaults to ``utf-8``.
|
||||
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
|
||||
obsolete as more DBAPIs support unicode fully along with the approach of
|
||||
Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
|
||||
obsolete as most DBAPIs now support unicode fully.
|
||||
|
||||
Bound Parameter Styles
|
||||
----------------------
|
||||
|
||||
The default parameter style for the psycopg2 dialect is "pyformat", where
|
||||
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
|
||||
that it does not accommodate the unusual case of parameter names that
|
||||
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
|
||||
generates bound parameter names based on the name of a column, the presence
|
||||
of these characters in a column name can lead to problems.
|
||||
|
||||
There are two solutions to the issue of a :class:`.schema.Column` that contains
|
||||
one of these characters in its name. One is to specify the
|
||||
:paramref:`.schema.Column.key` for columns that have such names::
|
||||
|
||||
measurement = Table('measurement', metadata,
|
||||
Column('Size (meters)', Integer, key='size_meters')
|
||||
)
|
||||
|
||||
Above, an INSERT statement such as ``measurement.insert()`` will use
|
||||
``size_meters`` as the parameter name, and a SQL expression such as
|
||||
``measurement.c.size_meters > 10`` will derive the bound parameter name
|
||||
from the ``size_meters`` key as well.
|
||||
|
||||
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
|
||||
as the source of naming when anonymous bound parameters are created
|
||||
in SQL expressions; previously, this behavior only applied to
|
||||
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
|
||||
|
||||
The other solution is to use a positional format; psycopg2 allows use of the
|
||||
"format" paramstyle, which can be passed to
|
||||
:paramref:`.create_engine.paramstyle`::
|
||||
|
||||
engine = create_engine(
|
||||
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
|
||||
|
||||
With the above engine, instead of a statement like::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
|
||||
{'Size (meters)': 1}
|
||||
|
||||
we instead see::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%s)
|
||||
(1, )
|
||||
|
||||
Where above, the dictionary style is converted into a tuple with positional
|
||||
style.
|
||||
|
||||
|
||||
Transactions
|
||||
------------
|
||||
|
|
@ -148,7 +247,7 @@ The psycopg2 dialect supports these constants for isolation level:
|
|||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
|
||||
psycopg2.
|
||||
psycopg2.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
|
@ -173,14 +272,17 @@ HSTORE type
|
|||
|
||||
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
|
||||
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
|
||||
by default when it is detected that the target database has the HSTORE
|
||||
type set up for use. In other words, when the dialect makes the first
|
||||
by default when psycopg2 version 2.4 or greater is used, and
|
||||
it is detected that the target database has the HSTORE type set up for use.
|
||||
In other words, when the dialect makes the first
|
||||
connection, a sequence like the following is performed:
|
||||
|
||||
1. Request the available HSTORE oids using
|
||||
``psycopg2.extras.HstoreAdapter.get_oids()``.
|
||||
If this function returns a list of HSTORE identifiers, we then determine
|
||||
that the ``HSTORE`` extension is present.
|
||||
This function is **skipped** if the version of psycopg2 installed is
|
||||
less than version 2.4.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
|
|
@ -219,9 +321,14 @@ from ... import types as sqltypes
|
|||
from .base import PGDialect, PGCompiler, \
|
||||
PGIdentifierPreparer, PGExecutionContext, \
|
||||
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
|
||||
_INT_TYPES
|
||||
_INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON
|
||||
from .json import JSON, JSONB
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
|
||||
|
|
@ -256,7 +363,7 @@ class _PGNumeric(sqltypes.Numeric):
|
|||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k and self.convert_unicode is True:
|
||||
if self.native_enum and util.py2k and self.convert_unicode is True:
|
||||
# we can't easily use PG's extensions here because
|
||||
# the OID is on the fly, and we need to give it a python
|
||||
# function anyway - not really worth it.
|
||||
|
|
@ -286,6 +393,35 @@ class _PGJSON(JSON):
|
|||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_jsonb:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
nonetype = type(None)
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
# When we're handed literal SQL, ensure it's a SELECT query. Since
|
||||
# 8.3, combining cursors and "FOR UPDATE" has been fine.
|
||||
SERVER_SIDE_CURSOR_RE = re.compile(
|
||||
|
|
@ -374,8 +510,21 @@ class PGDialect_psycopg2(PGDialect):
|
|||
preparer = PGIdentifierPreparer_psycopg2
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 5),
|
||||
native_jsonb=(2, 5, 4),
|
||||
sane_multi_rowcount=(2, 0, 9),
|
||||
array_oid=(2, 4, 3),
|
||||
hstore_adapter=(2, 4)
|
||||
)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
_has_native_jsonb = False
|
||||
|
||||
engine_config_types = PGDialect.engine_config_types.union([
|
||||
('use_native_unicode', util.asbool),
|
||||
])
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
|
|
@ -384,18 +533,21 @@ class PGDialect_psycopg2(PGDialect):
|
|||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON
|
||||
JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, server_side_cursors=False, use_native_unicode=True,
|
||||
client_encoding=None,
|
||||
use_native_hstore=True,
|
||||
use_native_hstore=True, use_native_uuid=True,
|
||||
**kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
self.use_native_unicode = use_native_unicode
|
||||
self.use_native_hstore = use_native_hstore
|
||||
self.use_native_uuid = use_native_uuid
|
||||
self.supports_unicode_binds = use_native_unicode
|
||||
self.client_encoding = client_encoding
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
|
|
@ -412,19 +564,34 @@ class PGDialect_psycopg2(PGDialect):
|
|||
self._has_native_hstore = self.use_native_hstore and \
|
||||
self._hstore_oids(connection.connection) \
|
||||
is not None
|
||||
self._has_native_json = self.psycopg2_version >= (2, 5)
|
||||
self._has_native_json = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
|
||||
self._has_native_jsonb = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9)
|
||||
self.supports_sane_multi_rowcount = \
|
||||
self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['sane_multi_rowcount']
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import psycopg2
|
||||
return psycopg2
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
from psycopg2 import extensions
|
||||
return extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
from psycopg2 import extras
|
||||
return extras
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
from psycopg2 import extensions
|
||||
extensions = self._psycopg2_extensions()
|
||||
return {
|
||||
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
|
|
@ -446,7 +613,8 @@ class PGDialect_psycopg2(PGDialect):
|
|||
connection.set_isolation_level(level)
|
||||
|
||||
def on_connect(self):
|
||||
from psycopg2 import extras, extensions
|
||||
extras = self._psycopg2_extras()
|
||||
extensions = self._psycopg2_extensions()
|
||||
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
|
|
@ -459,6 +627,11 @@ class PGDialect_psycopg2(PGDialect):
|
|||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_uuid:
|
||||
def on_connect(conn):
|
||||
extras.register_uuid(None, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
|
|
@ -470,19 +643,23 @@ class PGDialect_psycopg2(PGDialect):
|
|||
hstore_oids = self._hstore_oids(conn)
|
||||
if hstore_oids is not None:
|
||||
oid, array_oid = hstore_oids
|
||||
kw = {'oid': oid}
|
||||
if util.py2k:
|
||||
extras.register_hstore(conn, oid=oid,
|
||||
array_oid=array_oid,
|
||||
unicode=True)
|
||||
else:
|
||||
extras.register_hstore(conn, oid=oid,
|
||||
array_oid=array_oid)
|
||||
kw['unicode'] = True
|
||||
if self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['array_oid']:
|
||||
kw['array_oid'] = array_oid
|
||||
extras.register_hstore(conn, **kw)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
def on_connect(conn):
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer)
|
||||
if self._has_native_json:
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer)
|
||||
if self._has_native_jsonb:
|
||||
extras.register_default_jsonb(
|
||||
conn, loads=self._json_deserializer)
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
|
|
@ -495,8 +672,8 @@ class PGDialect_psycopg2(PGDialect):
|
|||
|
||||
@util.memoized_instancemethod
|
||||
def _hstore_oids(self, conn):
|
||||
if self.psycopg2_version >= (2, 4):
|
||||
from psycopg2 import extras
|
||||
if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
|
||||
extras = self._psycopg2_extras()
|
||||
oids = extras.HstoreAdapter.get_oids(conn)
|
||||
if oids is not None and oids[0]:
|
||||
return oids[0:2]
|
||||
|
|
@ -512,12 +689,14 @@ class PGDialect_psycopg2(PGDialect):
|
|||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
# check the "closed" flag. this might not be
|
||||
# present on old psycopg2 versions
|
||||
# present on old psycopg2 versions. Also,
|
||||
# this flag doesn't actually help in a lot of disconnect
|
||||
# situations, so don't rely on it.
|
||||
if getattr(connection, 'closed', False):
|
||||
return True
|
||||
|
||||
# legacy checks based on strings. the "closed" check
|
||||
# above most likely obviates the need for any of these.
|
||||
# checks based on strings. in the case that .closed
|
||||
# didn't cut it, fall back onto these.
|
||||
str_e = str(e).partition("\n")[0]
|
||||
for msg in [
|
||||
# these error messages from libpq: interfaces/libpq/fe-misc.c
|
||||
|
|
@ -534,8 +713,10 @@ class PGDialect_psycopg2(PGDialect):
|
|||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
'losed the connection unexpectedly',
|
||||
# this can occur in newer SSL
|
||||
'connection has been closed unexpectedly'
|
||||
# these can occur in newer SSL
|
||||
'connection has been closed unexpectedly',
|
||||
'SSL SYSCALL error: Bad file descriptor',
|
||||
'SSL SYSCALL error: EOF detected',
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# testing/engines.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+psycopg2cffi
|
||||
:name: psycopg2cffi
|
||||
:dbapi: psycopg2cffi
|
||||
:connectstring: \
|
||||
postgresql+psycopg2cffi://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2cffi/
|
||||
|
||||
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
|
||||
layer. This makes it suitable for use in e.g. PyPy. Documentation
|
||||
is as per ``psycopg2``.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
|
||||
|
||||
"""
|
||||
from .psycopg2 import PGDialect_psycopg2
|
||||
|
||||
|
||||
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
|
||||
driver = 'psycopg2cffi'
|
||||
supports_unicode_statements = True
|
||||
|
||||
# psycopg2cffi's first release is 2.5.0, but reports
|
||||
# __version__ as 2.4.4. Subsequent releases seem to have
|
||||
# fixed this.
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 4, 4),
|
||||
native_jsonb=(2, 7, 1),
|
||||
sane_multi_rowcount=(2, 4, 4),
|
||||
array_oid=(2, 4, 4),
|
||||
hstore_adapter=(2, 4, 4)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('psycopg2cffi')
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extensions'])
|
||||
return root.extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extras'])
|
||||
return root.extras
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2cffi
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/pypostgresql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -65,6 +65,23 @@ class PGDialect_pypostgresql(PGDialect):
|
|||
from postgresql.driver import dbapi20
|
||||
return dbapi20
|
||||
|
||||
_DBAPI_ERROR_NAMES = [
|
||||
"Error",
|
||||
"InterfaceError", "DatabaseError", "DataError",
|
||||
"OperationalError", "IntegrityError", "InternalError",
|
||||
"ProgrammingError", "NotSupportedError"
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def dbapi_exception_translation_map(self):
|
||||
if self.dbapi is None:
|
||||
return {}
|
||||
|
||||
return dict(
|
||||
(getattr(self.dbapi, name).__name__, name)
|
||||
for name in self._DBAPI_ERROR_NAMES
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
# sqlite/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.sqlite import base, pysqlite
|
||||
from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher
|
||||
|
||||
# default dialect
|
||||
base.dialect = pysqlite.dialect
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,116 @@
|
|||
# sqlite/pysqlcipher.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sqlite+pysqlcipher
|
||||
:name: pysqlcipher
|
||||
:dbapi: pysqlcipher
|
||||
:connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
|
||||
:url: https://pypi.python.org/pypi/pysqlcipher
|
||||
|
||||
``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
|
||||
use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
|
||||
driver, which makes use of the SQLCipher engine. This system essentially
|
||||
introduces new PRAGMA commands to SQLite which allows the setting of a
|
||||
passphrase and other encryption parameters, allowing the database
|
||||
file to be encrypted.
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The format of the connect string is in every way the same as that
|
||||
of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
|
||||
"password" field is now accepted, which should contain a passphrase::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
|
||||
|
||||
For an absolute file path, two leading slashes should be used for the
|
||||
database name::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
|
||||
|
||||
A selection of additional encryption-related pragmas supported by SQLCipher
|
||||
as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
|
||||
in the query string, and will result in that PRAGMA being called for each
|
||||
new connection. Currently, ``cipher``, ``kdf_iter``
|
||||
``cipher_page_size`` and ``cipher_use_hmac`` are supported::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
|
||||
|
||||
|
||||
Pooling Behavior
|
||||
----------------
|
||||
|
||||
The driver makes a change to the default pool behavior of pysqlite
|
||||
as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
|
||||
has been observed to be significantly slower on connection than the
|
||||
pysqlite driver, most likely due to the encryption overhead, so the
|
||||
dialect here defaults to using the :class:`.SingletonThreadPool`
|
||||
implementation,
|
||||
instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
|
||||
implementation is entirely configurable using the
|
||||
:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
|
||||
be more feasible for single-threaded use, or :class:`.NullPool` may be used
|
||||
to prevent unencrypted connections from being held open for long periods of
|
||||
time, at the expense of slower startup time for new connections.
|
||||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from .pysqlite import SQLiteDialect_pysqlite
|
||||
from ...engine import url as _url
|
||||
from ... import pool
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
|
||||
driver = 'pysqlcipher'
|
||||
|
||||
pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from pysqlcipher import dbapi2 as sqlcipher
|
||||
return sqlcipher
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def connect(self, *cargs, **cparams):
|
||||
passphrase = cparams.pop('passphrase', '')
|
||||
|
||||
pragmas = dict(
|
||||
(key, cparams.pop(key, None)) for key in
|
||||
self.pragmas
|
||||
)
|
||||
|
||||
conn = super(SQLiteDialect_pysqlcipher, self).\
|
||||
connect(*cargs, **cparams)
|
||||
conn.execute('pragma key="%s"' % passphrase)
|
||||
for prag, value in pragmas.items():
|
||||
if value is not None:
|
||||
conn.execute('pragma %s=%s' % (prag, value))
|
||||
|
||||
return conn
|
||||
|
||||
def create_connect_args(self, url):
|
||||
super_url = _url.URL(
|
||||
url.drivername, username=url.username,
|
||||
host=url.host, database=url.database, query=url.query)
|
||||
c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
|
||||
create_connect_args(super_url)
|
||||
opts['passphrase'] = url.password
|
||||
return c_args, opts
|
||||
|
||||
dialect = SQLiteDialect_pysqlcipher
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# sqlite/pysqlite.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -200,30 +200,68 @@ is passed containing non-ASCII characters.
|
|||
|
||||
.. _pysqlite_serializable:
|
||||
|
||||
Serializable Transaction Isolation
|
||||
----------------------------------
|
||||
Serializable isolation / Savepoints / Transactional DDL
|
||||
-------------------------------------------------------
|
||||
|
||||
The pysqlite DBAPI driver has a long-standing bug in which transactional
|
||||
state is not begun until the first DML statement, that is INSERT, UPDATE
|
||||
or DELETE, is emitted. A SELECT statement will not cause transactional
|
||||
state to begin. While this mode of usage is fine for typical situations
|
||||
and has the advantage that the SQLite database file is not prematurely
|
||||
locked, it breaks serializable transaction isolation, which requires
|
||||
that the database file be locked upon any SQL being emitted.
|
||||
In the section :ref:`sqlite_concurrency`, we refer to the pysqlite
|
||||
driver's assortment of issues that prevent several features of SQLite
|
||||
from working correctly. The pysqlite DBAPI driver has several
|
||||
long-standing bugs which impact the correctness of its transactional
|
||||
behavior. In its default mode of operation, SQLite features such as
|
||||
SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are
|
||||
non-functional, and in order to use these features, workarounds must
|
||||
be taken.
|
||||
|
||||
To work around this issue, the ``BEGIN`` keyword can be emitted
|
||||
at the start of each transaction. The following recipe establishes
|
||||
a :meth:`.ConnectionEvents.begin` handler to achieve this::
|
||||
The issue is essentially that the driver attempts to second-guess the user's
|
||||
intent, failing to start transactions and sometimes ending them prematurely, in
|
||||
an effort to minimize the SQLite databases's file locking behavior, even
|
||||
though SQLite itself uses "shared" locks for read-only activities.
|
||||
|
||||
SQLAlchemy chooses to not alter this behavior by default, as it is the
|
||||
long-expected behavior of the pysqlite driver; if and when the pysqlite
|
||||
driver attempts to repair these issues, that will be more of a driver towards
|
||||
defaults for SQLAlchemy.
|
||||
|
||||
The good news is that with a few events, we can implement transactional
|
||||
support fully, by disabling pysqlite's feature entirely and emitting BEGIN
|
||||
ourselves. This is achieved using two event listeners::
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
|
||||
engine = create_engine("sqlite:///myfile.db",
|
||||
isolation_level='SERIALIZABLE')
|
||||
engine = create_engine("sqlite:///myfile.db")
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def do_connect(dbapi_connection, connection_record):
|
||||
# disable pysqlite's emitting of the BEGIN statement entirely.
|
||||
# also stops it from emitting COMMIT before any DDL.
|
||||
dbapi_connection.isolation_level = None
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
# emit our own BEGIN
|
||||
conn.execute("BEGIN")
|
||||
|
||||
Above, we intercept a new pysqlite connection and disable any transactional
|
||||
integration. Then, at the point at which SQLAlchemy knows that transaction
|
||||
scope is to begin, we emit ``"BEGIN"`` ourselves.
|
||||
|
||||
When we take control of ``"BEGIN"``, we can also control directly SQLite's
|
||||
locking modes, introduced at `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_,
|
||||
by adding the desired locking mode to our ``"BEGIN"``::
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
conn.execute("BEGIN EXCLUSIVE")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_ - on the SQLite site
|
||||
|
||||
`sqlite3 SELECT does not BEGIN a transaction <http://bugs.python.org/issue9924>`_ - on the Python bug tracker
|
||||
|
||||
`sqlite3 module breaks transactions and potentially corrupts data <http://bugs.python.org/issue10740>`_ - on the Python bug tracker
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/base.py
|
||||
# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# get_select_precolumns(), limit_clause() implementation
|
||||
# copyright (C) 2007 Fisch Asset Management
|
||||
|
|
@ -98,7 +98,6 @@ RESERVED_WORDS = set([
|
|||
|
||||
|
||||
class _SybaseUnitypeMixin(object):
|
||||
|
||||
"""these types appear to return a buffer object."""
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
|
|
@ -147,41 +146,40 @@ class IMAGE(sqltypes.LargeBinary):
|
|||
|
||||
|
||||
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_IMAGE(type_)
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
|
||||
def visit_UNICHAR(self, type_):
|
||||
def visit_UNICHAR(self, type_, **kw):
|
||||
return "UNICHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNIVARCHAR(self, type_):
|
||||
def visit_UNIVARCHAR(self, type_, **kw):
|
||||
return "UNIVARCHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNITEXT(self, type_):
|
||||
def visit_UNITEXT(self, type_, **kw):
|
||||
return "UNITEXT"
|
||||
|
||||
def visit_TINYINT(self, type_):
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_IMAGE(self, type_):
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_BIT(self, type_):
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_):
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_):
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return "SMALLMONEY"
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_):
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
ischema_names = {
|
||||
|
|
@ -325,28 +323,30 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
|
|||
'milliseconds': 'millisecond'
|
||||
})
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
# TODO: don't think Sybase supports
|
||||
# bind params for FIRST / TOP
|
||||
if select._limit:
|
||||
limit = select._limit
|
||||
if limit:
|
||||
# if select._limit == 1:
|
||||
# s += "FIRST "
|
||||
# s += "FIRST "
|
||||
# else:
|
||||
# s += "TOP %s " % (select._limit,)
|
||||
s += "TOP %s " % (select._limit,)
|
||||
if select._offset:
|
||||
if not select._limit:
|
||||
# s += "TOP %s " % (select._limit,)
|
||||
s += "TOP %s " % (limit,)
|
||||
offset = select._offset
|
||||
if offset:
|
||||
if not limit:
|
||||
# FIXME: sybase doesn't allow an offset without a limit
|
||||
# so use a huge value for TOP here
|
||||
s += "TOP 1000000 "
|
||||
s += "START AT %s " % (select._offset + 1,)
|
||||
s += "START AT %s " % (offset + 1,)
|
||||
return s
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in sybase is after the select keyword
|
||||
return ""
|
||||
|
||||
|
|
@ -375,10 +375,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
|
|||
|
||||
|
||||
class SybaseDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = self.preparer.format_column(column) + " " + \
|
||||
self.dialect.type_compiler.process(column.type)
|
||||
self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
|
||||
if column.table is None:
|
||||
raise exc.CompileError(
|
||||
|
|
@ -608,8 +608,8 @@ class SybaseDialect(default.DefaultDialect):
|
|||
FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
|
||||
WHERE r.tableid = :table_id
|
||||
""")
|
||||
referential_constraints = connection.execute(REFCONSTRAINT_SQL,
|
||||
table_id=table_id)
|
||||
referential_constraints = connection.execute(
|
||||
REFCONSTRAINT_SQL, table_id=table_id).fetchall()
|
||||
|
||||
REFTABLE_SQL = text("""
|
||||
SELECT o.name AS name, u.name AS 'schema'
|
||||
|
|
@ -740,10 +740,13 @@ class SybaseDialect(default.DefaultDialect):
|
|||
results.close()
|
||||
|
||||
constrained_columns = []
|
||||
for i in range(1, pks["count"] + 1):
|
||||
constrained_columns.append(pks["pk_%i" % (i,)])
|
||||
return {"constrained_columns": constrained_columns,
|
||||
"name": pks["name"]}
|
||||
if pks:
|
||||
for i in range(1, pks["count"] + 1):
|
||||
constrained_columns.append(pks["pk_%i" % (i,)])
|
||||
return {"constrained_columns": constrained_columns,
|
||||
"name": pks["name"]}
|
||||
else:
|
||||
return {"constrained_columns": [], "name": None}
|
||||
|
||||
@reflection.cache
|
||||
def get_schema_names(self, connection, **kw):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/mxodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/pysybase.py
|
||||
# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue