update sqlalchemy
This commit is contained in:
parent
7365367c61
commit
3b436646a2
362 changed files with 37720 additions and 11021 deletions
|
|
@ -1,15 +1,15 @@
|
|||
# util/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .compat import callable, cmp, reduce, \
|
||||
threading, py3k, py33, py2k, jython, pypy, cpython, win32, \
|
||||
threading, py3k, py33, py36, py2k, jython, pypy, cpython, win32, \
|
||||
pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
|
||||
raise_from_cause, text_type, safe_kwarg, string_types, int_types, \
|
||||
binary_type, \
|
||||
binary_type, nested, \
|
||||
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
|
||||
unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
|
||||
iterbytes, StringIO, inspect_getargspec, zip_longest
|
||||
|
|
@ -19,9 +19,9 @@ from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
|
|||
OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
|
||||
column_dict, ordered_column_set, populate_column_dict, unique_list, \
|
||||
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
|
||||
to_column_set, update_copy, flatten_iterator, \
|
||||
to_column_set, update_copy, flatten_iterator, has_intersection, \
|
||||
LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
|
||||
coerce_generator_arg
|
||||
coerce_generator_arg, lightweight_named_tuple
|
||||
|
||||
from .langhelpers import iterate_attributes, class_hierarchy, \
|
||||
portable_instancemethod, unbound_method_to_callable, \
|
||||
|
|
@ -33,8 +33,10 @@ from .langhelpers import iterate_attributes, class_hierarchy, \
|
|||
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
|
||||
classproperty, set_creation_order, warn_exception, warn, NoneType,\
|
||||
constructor_copy, methods_equivalent, chop_traceback, asint,\
|
||||
generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
|
||||
get_callable_argspec, only_once
|
||||
generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \
|
||||
safe_reraise,\
|
||||
get_callable_argspec, only_once, attrsetter, ellipses_string, \
|
||||
warn_limited, map_bits, MemoizedSlots, EnsureKWArgType
|
||||
|
||||
from .deprecations import warn_deprecated, warn_pending_deprecation, \
|
||||
deprecated, pending_deprecation, inject_docstring_text
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/_collections.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -10,14 +10,30 @@
|
|||
from __future__ import absolute_import
|
||||
import weakref
|
||||
import operator
|
||||
from .compat import threading, itertools_filterfalse
|
||||
from .compat import threading, itertools_filterfalse, string_types
|
||||
from . import py2k
|
||||
import types
|
||||
import collections
|
||||
|
||||
EMPTY_SET = frozenset()
|
||||
|
||||
|
||||
class KeyedTuple(tuple):
|
||||
class AbstractKeyedTuple(tuple):
|
||||
__slots__ = ()
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of string key names for this :class:`.KeyedTuple`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.KeyedTuple._fields`
|
||||
|
||||
"""
|
||||
|
||||
return list(self._fields)
|
||||
|
||||
|
||||
class KeyedTuple(AbstractKeyedTuple):
|
||||
"""``tuple`` subclass that adds labeled names.
|
||||
|
||||
E.g.::
|
||||
|
|
@ -56,23 +72,13 @@ class KeyedTuple(tuple):
|
|||
|
||||
def __new__(cls, vals, labels=None):
|
||||
t = tuple.__new__(cls, vals)
|
||||
t._labels = []
|
||||
if labels:
|
||||
t.__dict__.update(zip(labels, vals))
|
||||
t._labels = labels
|
||||
else:
|
||||
labels = []
|
||||
t.__dict__['_labels'] = labels
|
||||
return t
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of string key names for this :class:`.KeyedTuple`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.KeyedTuple._fields`
|
||||
|
||||
"""
|
||||
|
||||
return [l for l in self._labels if l is not None]
|
||||
|
||||
@property
|
||||
def _fields(self):
|
||||
"""Return a tuple of string key names for this :class:`.KeyedTuple`.
|
||||
|
|
@ -86,7 +92,10 @@ class KeyedTuple(tuple):
|
|||
:meth:`.KeyedTuple.keys`
|
||||
|
||||
"""
|
||||
return tuple(self.keys())
|
||||
return tuple([l for l in self._labels if l is not None])
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
raise AttributeError("Can't set attribute: %s" % key)
|
||||
|
||||
def _asdict(self):
|
||||
"""Return the contents of this :class:`.KeyedTuple` as a dictionary.
|
||||
|
|
@ -100,6 +109,26 @@ class KeyedTuple(tuple):
|
|||
return dict((key, self.__dict__[key]) for key in self.keys())
|
||||
|
||||
|
||||
class _LW(AbstractKeyedTuple):
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, vals):
|
||||
return tuple.__new__(cls, vals)
|
||||
|
||||
def __reduce__(self):
|
||||
# for pickling, degrade down to the regular
|
||||
# KeyedTuple, thus avoiding anonymous class pickling
|
||||
# difficulties
|
||||
return KeyedTuple, (list(self), self._real_fields)
|
||||
|
||||
def _asdict(self):
|
||||
"""Return the contents of this :class:`.KeyedTuple` as a dictionary."""
|
||||
|
||||
d = dict(zip(self._real_fields, self))
|
||||
d.pop(None, None)
|
||||
return d
|
||||
|
||||
|
||||
class ImmutableContainer(object):
|
||||
def _immutable(self, *arg, **kw):
|
||||
raise TypeError("%s object is immutable" % self.__class__.__name__)
|
||||
|
|
@ -124,8 +153,13 @@ class immutabledict(ImmutableContainer, dict):
|
|||
return immutabledict, (dict(self), )
|
||||
|
||||
def union(self, d):
|
||||
if not self:
|
||||
return immutabledict(d)
|
||||
if not d:
|
||||
return self
|
||||
elif not self:
|
||||
if isinstance(d, immutabledict):
|
||||
return d
|
||||
else:
|
||||
return immutabledict(d)
|
||||
else:
|
||||
d2 = immutabledict(self)
|
||||
dict.update(d2, d)
|
||||
|
|
@ -138,8 +172,10 @@ class immutabledict(ImmutableContainer, dict):
|
|||
class Properties(object):
|
||||
"""Provide a __getattr__/__setattr__ interface over a dict."""
|
||||
|
||||
__slots__ = '_data',
|
||||
|
||||
def __init__(self, data):
|
||||
self.__dict__['_data'] = data
|
||||
object.__setattr__(self, '_data', data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
|
@ -159,8 +195,8 @@ class Properties(object):
|
|||
def __delitem__(self, key):
|
||||
del self._data[key]
|
||||
|
||||
def __setattr__(self, key, object):
|
||||
self._data[key] = object
|
||||
def __setattr__(self, key, obj):
|
||||
self._data[key] = obj
|
||||
|
||||
def __getstate__(self):
|
||||
return {'_data': self.__dict__['_data']}
|
||||
|
|
@ -211,6 +247,8 @@ class OrderedProperties(Properties):
|
|||
"""Provide a __getattr__/__setattr__ interface with an OrderedDict
|
||||
as backing store."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self):
|
||||
Properties.__init__(self, OrderedDict())
|
||||
|
||||
|
|
@ -218,10 +256,17 @@ class OrderedProperties(Properties):
|
|||
class ImmutableProperties(ImmutableContainer, Properties):
|
||||
"""Provide immutable dict/object attribute to an underlying dictionary."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
"""A dict that returns keys/values/items in the order they were added."""
|
||||
|
||||
__slots__ = '_list',
|
||||
|
||||
def __reduce__(self):
|
||||
return OrderedDict, (self.items(),)
|
||||
|
||||
def __init__(self, ____sequence=None, **kwargs):
|
||||
self._list = []
|
||||
if ____sequence is None:
|
||||
|
|
@ -264,15 +309,18 @@ class OrderedDict(dict):
|
|||
def __iter__(self):
|
||||
return iter(self._list)
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
return [self[key] for key in self._list]
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in self._list]
|
||||
|
||||
if py2k:
|
||||
def values(self):
|
||||
return [self[key] for key in self._list]
|
||||
|
||||
def keys(self):
|
||||
return self._list
|
||||
|
||||
def itervalues(self):
|
||||
return iter([self[key] for key in self._list])
|
||||
return iter(self.values())
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self)
|
||||
|
|
@ -280,41 +328,6 @@ class OrderedDict(dict):
|
|||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in self._list]
|
||||
else:
|
||||
def values(self):
|
||||
# return (self[key] for key in self)
|
||||
return (self[key] for key in self._list)
|
||||
|
||||
def keys(self):
|
||||
# return iter(self)
|
||||
return iter(self._list)
|
||||
|
||||
def items(self):
|
||||
# return ((key, self[key]) for key in self)
|
||||
return ((key, self[key]) for key in self._list)
|
||||
|
||||
_debug_iter = False
|
||||
if _debug_iter:
|
||||
# normally disabled to reduce function call
|
||||
# overhead
|
||||
def __iter__(self):
|
||||
len_ = len(self._list)
|
||||
for item in self._list:
|
||||
yield item
|
||||
assert len_ == len(self._list), \
|
||||
"Dictionary changed size during iteration"
|
||||
|
||||
def values(self):
|
||||
return (self[key] for key in self)
|
||||
|
||||
def keys(self):
|
||||
return iter(self)
|
||||
|
||||
def items(self):
|
||||
return ((key, self[key]) for key in self)
|
||||
|
||||
def __setitem__(self, key, object):
|
||||
if key not in self:
|
||||
try:
|
||||
|
|
@ -347,7 +360,10 @@ class OrderedSet(set):
|
|||
set.__init__(self)
|
||||
self._list = []
|
||||
if d is not None:
|
||||
self.update(d)
|
||||
self._list = unique_list(d)
|
||||
set.update(self, self._list)
|
||||
else:
|
||||
self._list = []
|
||||
|
||||
def add(self, element):
|
||||
if element not in self:
|
||||
|
|
@ -722,16 +738,23 @@ ordered_column_set = OrderedSet
|
|||
populate_column_dict = PopulateDict
|
||||
|
||||
|
||||
_getters = PopulateDict(operator.itemgetter)
|
||||
|
||||
_property_getters = PopulateDict(
|
||||
lambda idx: property(operator.itemgetter(idx)))
|
||||
|
||||
|
||||
def unique_list(seq, hashfunc=None):
|
||||
seen = {}
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
if not hashfunc:
|
||||
return [x for x in seq
|
||||
if x not in seen
|
||||
and not seen.__setitem__(x, True)]
|
||||
and not seen_add(x)]
|
||||
else:
|
||||
return [x for x in seq
|
||||
if hashfunc(x) not in seen
|
||||
and not seen.__setitem__(hashfunc(x), True)]
|
||||
and not seen_add(hashfunc(x))]
|
||||
|
||||
|
||||
class UniqueAppender(object):
|
||||
|
|
@ -771,10 +794,25 @@ def coerce_generator_arg(arg):
|
|||
def to_list(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
if not isinstance(x, (list, tuple)):
|
||||
if not isinstance(x, collections.Iterable) or isinstance(x, string_types):
|
||||
return [x]
|
||||
else:
|
||||
elif isinstance(x, list):
|
||||
return x
|
||||
else:
|
||||
return list(x)
|
||||
|
||||
|
||||
def has_intersection(set_, iterable):
|
||||
"""return True if any items of set_ are present in iterable.
|
||||
|
||||
Goes through special effort to ensure __hash__ is not called
|
||||
on items in iterable that don't support it.
|
||||
|
||||
"""
|
||||
# TODO: optimize, write in C, etc.
|
||||
return bool(
|
||||
set_.intersection([i for i in iterable if i.__hash__])
|
||||
)
|
||||
|
||||
|
||||
def to_set(x):
|
||||
|
|
@ -822,17 +860,30 @@ class LRUCache(dict):
|
|||
"""Dictionary with 'squishy' removal of least
|
||||
recently used items.
|
||||
|
||||
Note that either get() or [] should be used here, but
|
||||
generally its not safe to do an "in" check first as the dictionary
|
||||
can change subsequent to that call.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, capacity=100, threshold=.5):
|
||||
self.capacity = capacity
|
||||
self.threshold = threshold
|
||||
self._counter = 0
|
||||
self._mutex = threading.Lock()
|
||||
|
||||
def _inc_counter(self):
|
||||
self._counter += 1
|
||||
return self._counter
|
||||
|
||||
def get(self, key, default=None):
|
||||
item = dict.get(self, key, default)
|
||||
if item is not default:
|
||||
item[2] = self._inc_counter()
|
||||
return item[1]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
item = dict.__getitem__(self, key)
|
||||
item[2] = self._inc_counter()
|
||||
|
|
@ -858,18 +909,45 @@ class LRUCache(dict):
|
|||
self._manage_size()
|
||||
|
||||
def _manage_size(self):
|
||||
while len(self) > self.capacity + self.capacity * self.threshold:
|
||||
by_counter = sorted(dict.values(self),
|
||||
key=operator.itemgetter(2),
|
||||
reverse=True)
|
||||
for item in by_counter[self.capacity:]:
|
||||
try:
|
||||
del self[item[0]]
|
||||
except KeyError:
|
||||
# if we couldn't find a key, most
|
||||
# likely some other thread broke in
|
||||
# on us. loop around and try again
|
||||
break
|
||||
if not self._mutex.acquire(False):
|
||||
return
|
||||
try:
|
||||
while len(self) > self.capacity + self.capacity * self.threshold:
|
||||
by_counter = sorted(dict.values(self),
|
||||
key=operator.itemgetter(2),
|
||||
reverse=True)
|
||||
for item in by_counter[self.capacity:]:
|
||||
try:
|
||||
del self[item[0]]
|
||||
except KeyError:
|
||||
# deleted elsewhere; skip
|
||||
continue
|
||||
finally:
|
||||
self._mutex.release()
|
||||
|
||||
|
||||
_lw_tuples = LRUCache(100)
|
||||
|
||||
|
||||
def lightweight_named_tuple(name, fields):
|
||||
hash_ = (name, ) + tuple(fields)
|
||||
tp_cls = _lw_tuples.get(hash_)
|
||||
if tp_cls:
|
||||
return tp_cls
|
||||
|
||||
tp_cls = type(
|
||||
name, (_LW,),
|
||||
dict([
|
||||
(field, _property_getters[idx])
|
||||
for idx, field in enumerate(fields) if field is not None
|
||||
] + [('__slots__', ())])
|
||||
)
|
||||
|
||||
tp_cls._real_fields = fields
|
||||
tp_cls._fields = tuple([f for f in fields if f is not None])
|
||||
|
||||
_lw_tuples[hash_] = tp_cls
|
||||
return tp_cls
|
||||
|
||||
|
||||
class ScopedRegistry(object):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/compat.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -14,6 +14,7 @@ try:
|
|||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
|
||||
py36 = sys.version_info >= (3, 6)
|
||||
py33 = sys.version_info >= (3, 3)
|
||||
py32 = sys.version_info >= (3, 2)
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
|
|
@ -176,27 +177,27 @@ from operator import attrgetter as dottedgetter
|
|||
if py3k:
|
||||
def reraise(tp, value, tb=None, cause=None):
|
||||
if cause is not None:
|
||||
assert cause is not value, "Same cause emitted"
|
||||
value.__cause__ = cause
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
|
||||
else:
|
||||
# not as nice as that of Py3K, but at least preserves
|
||||
# the code line where the issue occurred
|
||||
exec("def reraise(tp, value, tb=None, cause=None):\n"
|
||||
" if cause is not None:\n"
|
||||
" assert cause is not value, 'Same cause emitted'\n"
|
||||
" raise tp, value, tb\n")
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
# not as nice as that of Py3K, but at least preserves
|
||||
# the code line where the issue occurred
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
reraise(type(exception), exception, tb=exc_tb)
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
cause = exc_value if exc_value is not exception else None
|
||||
reraise(type(exception), exception, tb=exc_tb, cause=cause)
|
||||
|
||||
if py3k:
|
||||
exec_ = getattr(builtins, 'exec')
|
||||
|
|
@ -226,3 +227,37 @@ def with_metaclass(meta, *bases):
|
|||
return type.__new__(cls, name, (), d)
|
||||
return meta(name, bases, d)
|
||||
return metaclass('temporary_class', None, {})
|
||||
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
try:
|
||||
from contextlib import nested
|
||||
except ImportError:
|
||||
# removed in py3k, credit to mitsuhiko for
|
||||
# workaround
|
||||
|
||||
@contextmanager
|
||||
def nested(*managers):
|
||||
exits = []
|
||||
vars = []
|
||||
exc = (None, None, None)
|
||||
try:
|
||||
for mgr in managers:
|
||||
exit = mgr.__exit__
|
||||
enter = mgr.__enter__
|
||||
vars.append(enter())
|
||||
exits.append(exit)
|
||||
yield vars
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
finally:
|
||||
while exits:
|
||||
exit = exits.pop()
|
||||
try:
|
||||
if exit(*exc):
|
||||
exc = (None, None, None)
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
if exc != (None, None, None):
|
||||
reraise(exc[0], exc[1], exc[2])
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/deprecations.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -102,7 +102,7 @@ def _decorate_with_warning(func, wtype, message, docstring_header=None):
|
|||
|
||||
@decorator
|
||||
def warned(fn, *args, **kwargs):
|
||||
warnings.warn(wtype(message), stacklevel=3)
|
||||
warnings.warn(message, wtype, stacklevel=3)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
doc = func.__doc__ is not None and func.__doc__ or ''
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/langhelpers.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -59,6 +59,13 @@ class safe_reraise(object):
|
|||
self._exc_info = None # remove potential circular references
|
||||
compat.reraise(exc_type, exc_value, exc_tb)
|
||||
else:
|
||||
if not compat.py3k and self._exc_info and self._exc_info[1]:
|
||||
# emulate Py3K's behavior of telling us when an exception
|
||||
# occurs in an exception handler.
|
||||
warn(
|
||||
"An exception has occurred during handling of a "
|
||||
"previous exception. The previous exception "
|
||||
"is:\n %s %s\n" % (self._exc_info[0], self._exc_info[1]))
|
||||
self._exc_info = None # remove potential circular references
|
||||
compat.reraise(type_, value, traceback)
|
||||
|
||||
|
|
@ -92,6 +99,15 @@ def _unique_symbols(used, *bases):
|
|||
raise NameError("exhausted namespace for symbol base %s" % base)
|
||||
|
||||
|
||||
def map_bits(fn, n):
|
||||
"""Call the given function given each nonzero bit from n."""
|
||||
|
||||
while n:
|
||||
b = n & (~n + 1)
|
||||
yield fn(b)
|
||||
n ^= b
|
||||
|
||||
|
||||
def decorator(target):
|
||||
"""A signature-matching decorator factory."""
|
||||
|
||||
|
|
@ -134,7 +150,8 @@ def public_factory(target, location):
|
|||
fn = target.__init__
|
||||
callable_ = target
|
||||
doc = "Construct a new :class:`.%s` object. \n\n"\
|
||||
"This constructor is mirrored as a public API function; see :func:`~%s` "\
|
||||
"This constructor is mirrored as a public API function; "\
|
||||
"see :func:`~%s` "\
|
||||
"for a full usage and argument description." % (
|
||||
target.__name__, location, )
|
||||
else:
|
||||
|
|
@ -155,6 +172,7 @@ def %(name)s(%(args)s):
|
|||
exec(code, env)
|
||||
decorated = env[location_name]
|
||||
decorated.__doc__ = fn.__doc__
|
||||
decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0]
|
||||
if compat.py2k or hasattr(fn, '__func__'):
|
||||
fn.__func__.__doc__ = doc
|
||||
else:
|
||||
|
|
@ -415,7 +433,7 @@ def getargspec_init(method):
|
|||
|
||||
"""
|
||||
try:
|
||||
return inspect.getargspec(method)
|
||||
return compat.inspect_getargspec(method)
|
||||
except TypeError:
|
||||
if method is object.__init__:
|
||||
return (['self'], None, None, None)
|
||||
|
|
@ -435,7 +453,7 @@ def unbound_method_to_callable(func_or_cls):
|
|||
return func_or_cls
|
||||
|
||||
|
||||
def generic_repr(obj, additional_kw=(), to_inspect=None):
|
||||
def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
|
||||
"""Produce a __repr__() based on direct association of the __init__()
|
||||
specification vs. same-named attributes present.
|
||||
|
||||
|
|
@ -453,7 +471,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
|
|||
for i, insp in enumerate(to_inspect):
|
||||
try:
|
||||
(_args, _vargs, vkw, defaults) = \
|
||||
inspect.getargspec(insp.__init__)
|
||||
compat.inspect_getargspec(insp.__init__)
|
||||
except TypeError:
|
||||
continue
|
||||
else:
|
||||
|
|
@ -484,11 +502,13 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
|
|||
output.extend([repr(val) for val in getattr(obj, vargs)])
|
||||
|
||||
for arg, defval in kw_args.items():
|
||||
if arg in omit_kwarg:
|
||||
continue
|
||||
try:
|
||||
val = getattr(obj, arg, missing)
|
||||
if val is not missing and val != defval:
|
||||
output.append('%s=%r' % (arg, val))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if additional_kw:
|
||||
|
|
@ -497,7 +517,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
|
|||
val = getattr(obj, arg, missing)
|
||||
if val is not missing and val != defval:
|
||||
output.append('%s=%r' % (arg, val))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
|
||||
|
|
@ -509,6 +529,15 @@ class portable_instancemethod(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'target', 'name', '__weakref__'
|
||||
|
||||
def __getstate__(self):
|
||||
return {'target': self.target, 'name': self.name}
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.target = state['target']
|
||||
self.name = state['name']
|
||||
|
||||
def __init__(self, meth):
|
||||
self.target = meth.__self__
|
||||
self.name = meth.__name__
|
||||
|
|
@ -603,7 +632,7 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None,
|
|||
except AttributeError:
|
||||
continue
|
||||
try:
|
||||
spec = inspect.getargspec(fn)
|
||||
spec = compat.inspect_getargspec(fn)
|
||||
fn_args = inspect.formatargspec(spec[0])
|
||||
d_args = inspect.formatargspec(spec[0][1:])
|
||||
except TypeError:
|
||||
|
|
@ -733,7 +762,7 @@ class memoized_property(object):
|
|||
obj.__dict__.pop(name, None)
|
||||
|
||||
|
||||
class memoized_instancemethod(object):
|
||||
def memoized_instancemethod(fn):
|
||||
"""Decorate a method memoize its return value.
|
||||
|
||||
Best applied to no-arg methods: memoization is not sensitive to
|
||||
|
|
@ -742,26 +771,14 @@ class memoized_instancemethod(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
def oneshot(*args, **kw):
|
||||
result = self.fget(obj, *args, **kw)
|
||||
memo = lambda *a, **kw: result
|
||||
memo.__name__ = self.__name__
|
||||
memo.__doc__ = self.__doc__
|
||||
obj.__dict__[self.__name__] = memo
|
||||
return result
|
||||
|
||||
oneshot.__name__ = self.__name__
|
||||
oneshot.__doc__ = self.__doc__
|
||||
return oneshot
|
||||
def oneshot(self, *args, **kw):
|
||||
result = fn(self, *args, **kw)
|
||||
memo = lambda *a, **kw: result
|
||||
memo.__name__ = fn.__name__
|
||||
memo.__doc__ = fn.__doc__
|
||||
self.__dict__[fn.__name__] = memo
|
||||
return result
|
||||
return update_wrapper(oneshot, fn)
|
||||
|
||||
|
||||
class group_expirable_memoized_property(object):
|
||||
|
|
@ -787,6 +804,42 @@ class group_expirable_memoized_property(object):
|
|||
return memoized_instancemethod(fn)
|
||||
|
||||
|
||||
class MemoizedSlots(object):
|
||||
"""Apply memoized items to an object using a __getattr__ scheme.
|
||||
|
||||
This allows the functionality of memoized_property and
|
||||
memoized_instancemethod to be available to a class using __slots__.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def _fallback_getattr(self, key):
|
||||
raise AttributeError(key)
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key.startswith('_memoized'):
|
||||
raise AttributeError(key)
|
||||
elif hasattr(self, '_memoized_attr_%s' % key):
|
||||
value = getattr(self, '_memoized_attr_%s' % key)()
|
||||
setattr(self, key, value)
|
||||
return value
|
||||
elif hasattr(self, '_memoized_method_%s' % key):
|
||||
fn = getattr(self, '_memoized_method_%s' % key)
|
||||
|
||||
def oneshot(*args, **kw):
|
||||
result = fn(*args, **kw)
|
||||
memo = lambda *a, **kw: result
|
||||
memo.__name__ = fn.__name__
|
||||
memo.__doc__ = fn.__doc__
|
||||
setattr(self, key, memo)
|
||||
return result
|
||||
oneshot.__doc__ = fn.__doc__
|
||||
return oneshot
|
||||
else:
|
||||
return self._fallback_getattr(key)
|
||||
|
||||
|
||||
def dependency_for(modulename):
|
||||
def decorate(obj):
|
||||
# TODO: would be nice to improve on this import silliness,
|
||||
|
|
@ -932,7 +985,7 @@ def asbool(obj):
|
|||
|
||||
|
||||
def bool_or_str(*text):
|
||||
"""Return a callable that will evaulate a string as
|
||||
"""Return a callable that will evaluate a string as
|
||||
boolean, or one of a set of "alternate" string values.
|
||||
|
||||
"""
|
||||
|
|
@ -965,7 +1018,7 @@ def coerce_kw_type(kw, key, type_, flexi_bool=True):
|
|||
kw[key] = type_(kw[key])
|
||||
|
||||
|
||||
def constructor_copy(obj, cls, **kw):
|
||||
def constructor_copy(obj, cls, *args, **kw):
|
||||
"""Instantiate cls using the __dict__ of obj as constructor arguments.
|
||||
|
||||
Uses inspect to match the named arguments of ``cls``.
|
||||
|
|
@ -974,7 +1027,7 @@ def constructor_copy(obj, cls, **kw):
|
|||
|
||||
names = get_cls_kwargs(cls)
|
||||
kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__)
|
||||
return cls(**kw)
|
||||
return cls(*args, **kw)
|
||||
|
||||
|
||||
def counter():
|
||||
|
|
@ -1088,10 +1141,23 @@ class classproperty(property):
|
|||
return desc.fget(cls)
|
||||
|
||||
|
||||
class hybridproperty(object):
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
clsval = self.func(owner)
|
||||
clsval.__doc__ = self.func.__doc__
|
||||
return clsval
|
||||
else:
|
||||
return self.func(instance)
|
||||
|
||||
|
||||
class hybridmethod(object):
|
||||
"""Decorate a function as cls- or instance- level."""
|
||||
|
||||
def __init__(self, func, expr=None):
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
|
|
@ -1183,28 +1249,62 @@ def warn_exception(func, *args, **kwargs):
|
|||
"""
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except:
|
||||
except Exception:
|
||||
warn("%s('%s') ignored" % sys.exc_info()[0:2])
|
||||
|
||||
|
||||
def warn(msg, stacklevel=3):
|
||||
def ellipses_string(value, len_=25):
|
||||
try:
|
||||
if len(value) > len_:
|
||||
return "%s..." % value[0:len_]
|
||||
else:
|
||||
return value
|
||||
except TypeError:
|
||||
return value
|
||||
|
||||
|
||||
class _hash_limit_string(compat.text_type):
|
||||
"""A string subclass that can only be hashed on a maximum amount
|
||||
of unique values.
|
||||
|
||||
This is used for warnings so that we can send out parameterized warnings
|
||||
without the __warningregistry__ of the module, or the non-overridable
|
||||
"once" registry within warnings.py, overloading memory,
|
||||
|
||||
|
||||
"""
|
||||
def __new__(cls, value, num, args):
|
||||
interpolated = (value % args) + \
|
||||
(" (this warning may be suppressed after %d occurrences)" % num)
|
||||
self = super(_hash_limit_string, cls).__new__(cls, interpolated)
|
||||
self._hash = hash("%s_%d" % (value, hash(interpolated) % num))
|
||||
return self
|
||||
|
||||
def __hash__(self):
|
||||
return self._hash
|
||||
|
||||
def __eq__(self, other):
|
||||
return hash(self) == hash(other)
|
||||
|
||||
|
||||
def warn(msg):
|
||||
"""Issue a warning.
|
||||
|
||||
If msg is a string, :class:`.exc.SAWarning` is used as
|
||||
the category.
|
||||
|
||||
.. note::
|
||||
"""
|
||||
warnings.warn(msg, exc.SAWarning, stacklevel=2)
|
||||
|
||||
This function is swapped out when the test suite
|
||||
runs, with a compatible version that uses
|
||||
warnings.warn_explicit, so that the warnings registry can
|
||||
be controlled.
|
||||
|
||||
def warn_limited(msg, args):
|
||||
"""Issue a warning with a paramterized string, limiting the number
|
||||
of registrations.
|
||||
|
||||
"""
|
||||
if isinstance(msg, compat.string_types):
|
||||
warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
|
||||
else:
|
||||
warnings.warn(msg, stacklevel=stacklevel)
|
||||
if args:
|
||||
msg = _hash_limit_string(msg, 10, args)
|
||||
warnings.warn(msg, exc.SAWarning, stacklevel=2)
|
||||
|
||||
|
||||
def only_once(fn):
|
||||
|
|
@ -1247,3 +1347,38 @@ def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE):
|
|||
return tb[start:end + 1]
|
||||
|
||||
NoneType = type(None)
|
||||
|
||||
|
||||
def attrsetter(attrname):
|
||||
code = \
|
||||
"def set(obj, value):"\
|
||||
" obj.%s = value" % attrname
|
||||
env = locals().copy()
|
||||
exec(code, env)
|
||||
return env['set']
|
||||
|
||||
|
||||
class EnsureKWArgType(type):
|
||||
"""Apply translation of functions to accept **kw arguments if they
|
||||
don't already.
|
||||
|
||||
"""
|
||||
def __init__(cls, clsname, bases, clsdict):
|
||||
fn_reg = cls.ensure_kwarg
|
||||
if fn_reg:
|
||||
for key in clsdict:
|
||||
m = re.match(fn_reg, key)
|
||||
if m:
|
||||
fn = clsdict[key]
|
||||
spec = compat.inspect_getargspec(fn)
|
||||
if not spec.keywords:
|
||||
clsdict[key] = wrapped = cls._wrap_w_kw(fn)
|
||||
setattr(cls, key, wrapped)
|
||||
super(EnsureKWArgType, cls).__init__(clsname, bases, clsdict)
|
||||
|
||||
def _wrap_w_kw(self, fn):
|
||||
|
||||
def wrap(*arg, **kw):
|
||||
return fn(*arg)
|
||||
return update_wrapper(wrap, fn)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/queue.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# util/topological.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
@ -13,18 +13,20 @@ from .. import util
|
|||
__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
|
||||
|
||||
|
||||
def sort_as_subsets(tuples, allitems):
|
||||
def sort_as_subsets(tuples, allitems, deterministic_order=False):
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[child].add(parent)
|
||||
|
||||
todo = set(allitems)
|
||||
Set = util.OrderedSet if deterministic_order else set
|
||||
|
||||
todo = Set(allitems)
|
||||
|
||||
while todo:
|
||||
output = set()
|
||||
for node in list(todo):
|
||||
if not todo.intersection(edges[node]):
|
||||
output = Set()
|
||||
for node in todo:
|
||||
if todo.isdisjoint(edges[node]):
|
||||
output.add(node)
|
||||
|
||||
if not output:
|
||||
|
|
@ -38,13 +40,14 @@ def sort_as_subsets(tuples, allitems):
|
|||
yield output
|
||||
|
||||
|
||||
def sort(tuples, allitems):
|
||||
def sort(tuples, allitems, deterministic_order=False):
|
||||
"""sort the given list of items by dependency.
|
||||
|
||||
'tuples' is a list of tuples representing a partial ordering.
|
||||
'deterministic_order' keeps items within a dependency tier in list order.
|
||||
"""
|
||||
|
||||
for set_ in sort_as_subsets(tuples, allitems):
|
||||
for set_ in sort_as_subsets(tuples, allitems, deterministic_order):
|
||||
for s in set_:
|
||||
yield s
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue