split platform
This commit is contained in:
commit
a2291f58b8
278 changed files with 114251 additions and 0 deletions
47
lib/python3.4/site-packages/sqlalchemy/util/__init__.py
Normal file
47
lib/python3.4/site-packages/sqlalchemy/util/__init__.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# util/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .compat import callable, cmp, reduce, \
|
||||
threading, py3k, py33, py2k, jython, pypy, cpython, win32, \
|
||||
pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
|
||||
raise_from_cause, text_type, safe_kwarg, string_types, int_types, \
|
||||
binary_type, \
|
||||
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
|
||||
unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
|
||||
iterbytes, StringIO, inspect_getargspec, zip_longest
|
||||
|
||||
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
|
||||
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
|
||||
OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
|
||||
column_dict, ordered_column_set, populate_column_dict, unique_list, \
|
||||
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
|
||||
to_column_set, update_copy, flatten_iterator, \
|
||||
LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
|
||||
coerce_generator_arg
|
||||
|
||||
from .langhelpers import iterate_attributes, class_hierarchy, \
|
||||
portable_instancemethod, unbound_method_to_callable, \
|
||||
getargspec_init, format_argspec_init, format_argspec_plus, \
|
||||
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
|
||||
memoized_property, memoized_instancemethod, md5_hex, \
|
||||
group_expirable_memoized_property, dependencies, decode_slice, \
|
||||
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
|
||||
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
|
||||
classproperty, set_creation_order, warn_exception, warn, NoneType,\
|
||||
constructor_copy, methods_equivalent, chop_traceback, asint,\
|
||||
generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
|
||||
get_callable_argspec, only_once
|
||||
|
||||
from .deprecations import warn_deprecated, warn_pending_deprecation, \
|
||||
deprecated, pending_deprecation, inject_docstring_text
|
||||
|
||||
# things that used to be not always available,
|
||||
# but are now as of current support Python versions
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from functools import update_wrapper
|
||||
from contextlib import contextmanager
|
||||
965
lib/python3.4/site-packages/sqlalchemy/util/_collections.py
Normal file
965
lib/python3.4/site-packages/sqlalchemy/util/_collections.py
Normal file
|
|
@ -0,0 +1,965 @@
|
|||
# util/_collections.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Collection classes and helpers."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
import weakref
|
||||
import operator
|
||||
from .compat import threading, itertools_filterfalse
|
||||
from . import py2k
|
||||
import types
|
||||
|
||||
EMPTY_SET = frozenset()
|
||||
|
||||
|
||||
class KeyedTuple(tuple):
|
||||
"""``tuple`` subclass that adds labeled names.
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> k = KeyedTuple([1, 2, 3], labels=["one", "two", "three"])
|
||||
>>> k.one
|
||||
1
|
||||
>>> k.two
|
||||
2
|
||||
|
||||
Result rows returned by :class:`.Query` that contain multiple
|
||||
ORM entities and/or column expressions make use of this
|
||||
class to return rows.
|
||||
|
||||
The :class:`.KeyedTuple` exhibits similar behavior to the
|
||||
``collections.namedtuple()`` construct provided in the Python
|
||||
standard library, however is architected very differently.
|
||||
Unlike ``collections.namedtuple()``, :class:`.KeyedTuple` is
|
||||
does not rely on creation of custom subtypes in order to represent
|
||||
a new series of keys, instead each :class:`.KeyedTuple` instance
|
||||
receives its list of keys in place. The subtype approach
|
||||
of ``collections.namedtuple()`` introduces significant complexity
|
||||
and performance overhead, which is not necessary for the
|
||||
:class:`.Query` object's use case.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
Compatibility methods with ``collections.namedtuple()`` have been
|
||||
added including :attr:`.KeyedTuple._fields` and
|
||||
:meth:`.KeyedTuple._asdict`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`ormtutorial_querying`
|
||||
|
||||
"""
|
||||
|
||||
def __new__(cls, vals, labels=None):
|
||||
t = tuple.__new__(cls, vals)
|
||||
t._labels = []
|
||||
if labels:
|
||||
t.__dict__.update(zip(labels, vals))
|
||||
t._labels = labels
|
||||
return t
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of string key names for this :class:`.KeyedTuple`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.KeyedTuple._fields`
|
||||
|
||||
"""
|
||||
|
||||
return [l for l in self._labels if l is not None]
|
||||
|
||||
@property
|
||||
def _fields(self):
|
||||
"""Return a tuple of string key names for this :class:`.KeyedTuple`.
|
||||
|
||||
This method provides compatibility with ``collections.namedtuple()``.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.KeyedTuple.keys`
|
||||
|
||||
"""
|
||||
return tuple(self.keys())
|
||||
|
||||
def _asdict(self):
|
||||
"""Return the contents of this :class:`.KeyedTuple` as a dictionary.
|
||||
|
||||
This method provides compatibility with ``collections.namedtuple()``,
|
||||
with the exception that the dictionary returned is **not** ordered.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
return dict((key, self.__dict__[key]) for key in self.keys())
|
||||
|
||||
|
||||
class ImmutableContainer(object):
|
||||
def _immutable(self, *arg, **kw):
|
||||
raise TypeError("%s object is immutable" % self.__class__.__name__)
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = _immutable
|
||||
|
||||
|
||||
class immutabledict(ImmutableContainer, dict):
|
||||
|
||||
clear = pop = popitem = setdefault = \
|
||||
update = ImmutableContainer._immutable
|
||||
|
||||
def __new__(cls, *args):
|
||||
new = dict.__new__(cls)
|
||||
dict.__init__(new, *args)
|
||||
return new
|
||||
|
||||
def __init__(self, *args):
|
||||
pass
|
||||
|
||||
def __reduce__(self):
|
||||
return immutabledict, (dict(self), )
|
||||
|
||||
def union(self, d):
|
||||
if not self:
|
||||
return immutabledict(d)
|
||||
else:
|
||||
d2 = immutabledict(self)
|
||||
dict.update(d2, d)
|
||||
return d2
|
||||
|
||||
def __repr__(self):
|
||||
return "immutabledict(%s)" % dict.__repr__(self)
|
||||
|
||||
|
||||
class Properties(object):
|
||||
"""Provide a __getattr__/__setattr__ interface over a dict."""
|
||||
|
||||
def __init__(self, data):
|
||||
self.__dict__['_data'] = data
|
||||
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(list(self._data.values()))
|
||||
|
||||
def __add__(self, other):
|
||||
return list(self) + list(other)
|
||||
|
||||
def __setitem__(self, key, object):
|
||||
self._data[key] = object
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self._data[key]
|
||||
|
||||
def __setattr__(self, key, object):
|
||||
self._data[key] = object
|
||||
|
||||
def __getstate__(self):
|
||||
return {'_data': self.__dict__['_data']}
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__dict__['_data'] = state['_data']
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return self._data[key]
|
||||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._data
|
||||
|
||||
def as_immutable(self):
|
||||
"""Return an immutable proxy for this :class:`.Properties`."""
|
||||
|
||||
return ImmutableProperties(self._data)
|
||||
|
||||
def update(self, value):
|
||||
self._data.update(value)
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def keys(self):
|
||||
return list(self._data)
|
||||
|
||||
def values(self):
|
||||
return list(self._data.values())
|
||||
|
||||
def items(self):
|
||||
return list(self._data.items())
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self._data
|
||||
|
||||
def clear(self):
|
||||
self._data.clear()
|
||||
|
||||
|
||||
class OrderedProperties(Properties):
|
||||
"""Provide a __getattr__/__setattr__ interface with an OrderedDict
|
||||
as backing store."""
|
||||
|
||||
def __init__(self):
|
||||
Properties.__init__(self, OrderedDict())
|
||||
|
||||
|
||||
class ImmutableProperties(ImmutableContainer, Properties):
|
||||
"""Provide immutable dict/object attribute to an underlying dictionary."""
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
"""A dict that returns keys/values/items in the order they were added."""
|
||||
|
||||
def __init__(self, ____sequence=None, **kwargs):
|
||||
self._list = []
|
||||
if ____sequence is None:
|
||||
if kwargs:
|
||||
self.update(**kwargs)
|
||||
else:
|
||||
self.update(____sequence, **kwargs)
|
||||
|
||||
def clear(self):
|
||||
self._list = []
|
||||
dict.clear(self)
|
||||
|
||||
def copy(self):
|
||||
return self.__copy__()
|
||||
|
||||
def __copy__(self):
|
||||
return OrderedDict(self)
|
||||
|
||||
def sort(self, *arg, **kw):
|
||||
self._list.sort(*arg, **kw)
|
||||
|
||||
def update(self, ____sequence=None, **kwargs):
|
||||
if ____sequence is not None:
|
||||
if hasattr(____sequence, 'keys'):
|
||||
for key in ____sequence.keys():
|
||||
self.__setitem__(key, ____sequence[key])
|
||||
else:
|
||||
for key, value in ____sequence:
|
||||
self[key] = value
|
||||
if kwargs:
|
||||
self.update(kwargs)
|
||||
|
||||
def setdefault(self, key, value):
|
||||
if key not in self:
|
||||
self.__setitem__(key, value)
|
||||
return value
|
||||
else:
|
||||
return self.__getitem__(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._list)
|
||||
|
||||
if py2k:
|
||||
def values(self):
|
||||
return [self[key] for key in self._list]
|
||||
|
||||
def keys(self):
|
||||
return self._list
|
||||
|
||||
def itervalues(self):
|
||||
return iter([self[key] for key in self._list])
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self)
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in self._list]
|
||||
else:
|
||||
def values(self):
|
||||
# return (self[key] for key in self)
|
||||
return (self[key] for key in self._list)
|
||||
|
||||
def keys(self):
|
||||
# return iter(self)
|
||||
return iter(self._list)
|
||||
|
||||
def items(self):
|
||||
# return ((key, self[key]) for key in self)
|
||||
return ((key, self[key]) for key in self._list)
|
||||
|
||||
_debug_iter = False
|
||||
if _debug_iter:
|
||||
# normally disabled to reduce function call
|
||||
# overhead
|
||||
def __iter__(self):
|
||||
len_ = len(self._list)
|
||||
for item in self._list:
|
||||
yield item
|
||||
assert len_ == len(self._list), \
|
||||
"Dictionary changed size during iteration"
|
||||
|
||||
def values(self):
|
||||
return (self[key] for key in self)
|
||||
|
||||
def keys(self):
|
||||
return iter(self)
|
||||
|
||||
def items(self):
|
||||
return ((key, self[key]) for key in self)
|
||||
|
||||
def __setitem__(self, key, object):
|
||||
if key not in self:
|
||||
try:
|
||||
self._list.append(key)
|
||||
except AttributeError:
|
||||
# work around Python pickle loads() with
|
||||
# dict subclass (seems to ignore __setstate__?)
|
||||
self._list = [key]
|
||||
dict.__setitem__(self, key, object)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
self._list.remove(key)
|
||||
|
||||
def pop(self, key, *default):
|
||||
present = key in self
|
||||
value = dict.pop(self, key, *default)
|
||||
if present:
|
||||
self._list.remove(key)
|
||||
return value
|
||||
|
||||
def popitem(self):
|
||||
item = dict.popitem(self)
|
||||
self._list.remove(item[0])
|
||||
return item
|
||||
|
||||
|
||||
class OrderedSet(set):
|
||||
def __init__(self, d=None):
|
||||
set.__init__(self)
|
||||
self._list = []
|
||||
if d is not None:
|
||||
self.update(d)
|
||||
|
||||
def add(self, element):
|
||||
if element not in self:
|
||||
self._list.append(element)
|
||||
set.add(self, element)
|
||||
|
||||
def remove(self, element):
|
||||
set.remove(self, element)
|
||||
self._list.remove(element)
|
||||
|
||||
def insert(self, pos, element):
|
||||
if element not in self:
|
||||
self._list.insert(pos, element)
|
||||
set.add(self, element)
|
||||
|
||||
def discard(self, element):
|
||||
if element in self:
|
||||
self._list.remove(element)
|
||||
set.remove(self, element)
|
||||
|
||||
def clear(self):
|
||||
set.clear(self)
|
||||
self._list = []
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._list[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._list)
|
||||
|
||||
def __add__(self, other):
|
||||
return self.union(other)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (self.__class__.__name__, self._list)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def update(self, iterable):
|
||||
for e in iterable:
|
||||
if e not in self:
|
||||
self._list.append(e)
|
||||
set.add(self, e)
|
||||
return self
|
||||
|
||||
__ior__ = update
|
||||
|
||||
def union(self, other):
|
||||
result = self.__class__(self)
|
||||
result.update(other)
|
||||
return result
|
||||
|
||||
__or__ = union
|
||||
|
||||
def intersection(self, other):
|
||||
other = set(other)
|
||||
return self.__class__(a for a in self if a in other)
|
||||
|
||||
__and__ = intersection
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
other = set(other)
|
||||
result = self.__class__(a for a in self if a not in other)
|
||||
result.update(a for a in other if a not in self)
|
||||
return result
|
||||
|
||||
__xor__ = symmetric_difference
|
||||
|
||||
def difference(self, other):
|
||||
other = set(other)
|
||||
return self.__class__(a for a in self if a not in other)
|
||||
|
||||
__sub__ = difference
|
||||
|
||||
def intersection_update(self, other):
|
||||
other = set(other)
|
||||
set.intersection_update(self, other)
|
||||
self._list = [a for a in self._list if a in other]
|
||||
return self
|
||||
|
||||
__iand__ = intersection_update
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
set.symmetric_difference_update(self, other)
|
||||
self._list = [a for a in self._list if a in self]
|
||||
self._list += [a for a in other._list if a in self]
|
||||
return self
|
||||
|
||||
__ixor__ = symmetric_difference_update
|
||||
|
||||
def difference_update(self, other):
|
||||
set.difference_update(self, other)
|
||||
self._list = [a for a in self._list if a in self]
|
||||
return self
|
||||
|
||||
__isub__ = difference_update
|
||||
|
||||
|
||||
class IdentitySet(object):
|
||||
"""A set that considers only object id() for uniqueness.
|
||||
|
||||
This strategy has edge cases for builtin types- it's possible to have
|
||||
two 'foo' strings in one of these sets, for example. Use sparingly.
|
||||
|
||||
"""
|
||||
|
||||
_working_set = set
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self._members = dict()
|
||||
if iterable:
|
||||
for o in iterable:
|
||||
self.add(o)
|
||||
|
||||
def add(self, value):
|
||||
self._members[id(value)] = value
|
||||
|
||||
def __contains__(self, value):
|
||||
return id(value) in self._members
|
||||
|
||||
def remove(self, value):
|
||||
del self._members[id(value)]
|
||||
|
||||
def discard(self, value):
|
||||
try:
|
||||
self.remove(value)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def pop(self):
|
||||
try:
|
||||
pair = self._members.popitem()
|
||||
return pair[1]
|
||||
except KeyError:
|
||||
raise KeyError('pop from an empty set')
|
||||
|
||||
def clear(self):
|
||||
self._members.clear()
|
||||
|
||||
def __cmp__(self, other):
|
||||
raise TypeError('cannot compare sets using cmp()')
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, IdentitySet):
|
||||
return self._members == other._members
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, IdentitySet):
|
||||
return self._members != other._members
|
||||
else:
|
||||
return True
|
||||
|
||||
def issubset(self, iterable):
|
||||
other = type(self)(iterable)
|
||||
|
||||
if len(self) > len(other):
|
||||
return False
|
||||
for m in itertools_filterfalse(other._members.__contains__,
|
||||
iter(self._members.keys())):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.issubset(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return len(self) < len(other) and self.issubset(other)
|
||||
|
||||
def issuperset(self, iterable):
|
||||
other = type(self)(iterable)
|
||||
|
||||
if len(self) < len(other):
|
||||
return False
|
||||
|
||||
for m in itertools_filterfalse(self._members.__contains__,
|
||||
iter(other._members.keys())):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.issuperset(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return len(self) > len(other) and self.issuperset(other)
|
||||
|
||||
def union(self, iterable):
|
||||
result = type(self)()
|
||||
# testlib.pragma exempt:__hash__
|
||||
members = self._member_id_tuples()
|
||||
other = _iter_id(iterable)
|
||||
result._members.update(self._working_set(members).union(other))
|
||||
return result
|
||||
|
||||
def __or__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.union(other)
|
||||
|
||||
def update(self, iterable):
|
||||
self._members = self.union(iterable)._members
|
||||
|
||||
def __ior__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.update(other)
|
||||
return self
|
||||
|
||||
def difference(self, iterable):
|
||||
result = type(self)()
|
||||
# testlib.pragma exempt:__hash__
|
||||
members = self._member_id_tuples()
|
||||
other = _iter_id(iterable)
|
||||
result._members.update(self._working_set(members).difference(other))
|
||||
return result
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.difference(other)
|
||||
|
||||
def difference_update(self, iterable):
|
||||
self._members = self.difference(iterable)._members
|
||||
|
||||
def __isub__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.difference_update(other)
|
||||
return self
|
||||
|
||||
def intersection(self, iterable):
|
||||
result = type(self)()
|
||||
# testlib.pragma exempt:__hash__
|
||||
members = self._member_id_tuples()
|
||||
other = _iter_id(iterable)
|
||||
result._members.update(self._working_set(members).intersection(other))
|
||||
return result
|
||||
|
||||
def __and__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.intersection(other)
|
||||
|
||||
def intersection_update(self, iterable):
|
||||
self._members = self.intersection(iterable)._members
|
||||
|
||||
def __iand__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.intersection_update(other)
|
||||
return self
|
||||
|
||||
def symmetric_difference(self, iterable):
|
||||
result = type(self)()
|
||||
# testlib.pragma exempt:__hash__
|
||||
members = self._member_id_tuples()
|
||||
other = _iter_id(iterable)
|
||||
result._members.update(
|
||||
self._working_set(members).symmetric_difference(other))
|
||||
return result
|
||||
|
||||
def _member_id_tuples(self):
|
||||
return ((id(v), v) for v in self._members.values())
|
||||
|
||||
def __xor__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.symmetric_difference(other)
|
||||
|
||||
def symmetric_difference_update(self, iterable):
|
||||
self._members = self.symmetric_difference(iterable)._members
|
||||
|
||||
def __ixor__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.symmetric_difference(other)
|
||||
return self
|
||||
|
||||
def copy(self):
|
||||
return type(self)(iter(self._members.values()))
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
def __len__(self):
|
||||
return len(self._members)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._members.values())
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('set objects are unhashable')
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__name__, list(self._members.values()))
|
||||
|
||||
|
||||
class WeakSequence(object):
|
||||
def __init__(self, __elements=()):
|
||||
self._storage = [
|
||||
weakref.ref(element, self._remove) for element in __elements
|
||||
]
|
||||
|
||||
def append(self, item):
|
||||
self._storage.append(weakref.ref(item, self._remove))
|
||||
|
||||
def _remove(self, ref):
|
||||
self._storage.remove(ref)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._storage)
|
||||
|
||||
def __iter__(self):
|
||||
return (obj for obj in
|
||||
(ref() for ref in self._storage) if obj is not None)
|
||||
|
||||
def __getitem__(self, index):
|
||||
try:
|
||||
obj = self._storage[index]
|
||||
except KeyError:
|
||||
raise IndexError("Index %s out of range" % index)
|
||||
else:
|
||||
return obj()
|
||||
|
||||
|
||||
class OrderedIdentitySet(IdentitySet):
|
||||
class _working_set(OrderedSet):
|
||||
# a testing pragma: exempt the OIDS working set from the test suite's
|
||||
# "never call the user's __hash__" assertions. this is a big hammer,
|
||||
# but it's safe here: IDS operates on (id, instance) tuples in the
|
||||
# working set.
|
||||
__sa_hash_exempt__ = True
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
IdentitySet.__init__(self)
|
||||
self._members = OrderedDict()
|
||||
if iterable:
|
||||
for o in iterable:
|
||||
self.add(o)
|
||||
|
||||
|
||||
class PopulateDict(dict):
|
||||
"""A dict which populates missing values via a creation function.
|
||||
|
||||
Note the creation function takes a key, unlike
|
||||
collections.defaultdict.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, creator):
|
||||
self.creator = creator
|
||||
|
||||
def __missing__(self, key):
|
||||
self[key] = val = self.creator(key)
|
||||
return val
|
||||
|
||||
# Define collections that are capable of storing
|
||||
# ColumnElement objects as hashable keys/elements.
|
||||
# At this point, these are mostly historical, things
|
||||
# used to be more complicated.
|
||||
column_set = set
|
||||
column_dict = dict
|
||||
ordered_column_set = OrderedSet
|
||||
populate_column_dict = PopulateDict
|
||||
|
||||
|
||||
def unique_list(seq, hashfunc=None):
|
||||
seen = {}
|
||||
if not hashfunc:
|
||||
return [x for x in seq
|
||||
if x not in seen
|
||||
and not seen.__setitem__(x, True)]
|
||||
else:
|
||||
return [x for x in seq
|
||||
if hashfunc(x) not in seen
|
||||
and not seen.__setitem__(hashfunc(x), True)]
|
||||
|
||||
|
||||
class UniqueAppender(object):
|
||||
"""Appends items to a collection ensuring uniqueness.
|
||||
|
||||
Additional appends() of the same object are ignored. Membership is
|
||||
determined by identity (``is a``) not equality (``==``).
|
||||
"""
|
||||
|
||||
def __init__(self, data, via=None):
|
||||
self.data = data
|
||||
self._unique = {}
|
||||
if via:
|
||||
self._data_appender = getattr(data, via)
|
||||
elif hasattr(data, 'append'):
|
||||
self._data_appender = data.append
|
||||
elif hasattr(data, 'add'):
|
||||
self._data_appender = data.add
|
||||
|
||||
def append(self, item):
|
||||
id_ = id(item)
|
||||
if id_ not in self._unique:
|
||||
self._data_appender(item)
|
||||
self._unique[id_] = True
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.data)
|
||||
|
||||
|
||||
def coerce_generator_arg(arg):
|
||||
if len(arg) == 1 and isinstance(arg[0], types.GeneratorType):
|
||||
return list(arg[0])
|
||||
else:
|
||||
return arg
|
||||
|
||||
|
||||
def to_list(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
if not isinstance(x, (list, tuple)):
|
||||
return [x]
|
||||
else:
|
||||
return x
|
||||
|
||||
|
||||
def to_set(x):
|
||||
if x is None:
|
||||
return set()
|
||||
if not isinstance(x, set):
|
||||
return set(to_list(x))
|
||||
else:
|
||||
return x
|
||||
|
||||
|
||||
def to_column_set(x):
|
||||
if x is None:
|
||||
return column_set()
|
||||
if not isinstance(x, column_set):
|
||||
return column_set(to_list(x))
|
||||
else:
|
||||
return x
|
||||
|
||||
|
||||
def update_copy(d, _new=None, **kw):
|
||||
"""Copy the given dict and update with the given values."""
|
||||
|
||||
d = d.copy()
|
||||
if _new:
|
||||
d.update(_new)
|
||||
d.update(**kw)
|
||||
return d
|
||||
|
||||
|
||||
def flatten_iterator(x):
|
||||
"""Given an iterator of which further sub-elements may also be
|
||||
iterators, flatten the sub-elements into a single iterator.
|
||||
|
||||
"""
|
||||
for elem in x:
|
||||
if not isinstance(elem, str) and hasattr(elem, '__iter__'):
|
||||
for y in flatten_iterator(elem):
|
||||
yield y
|
||||
else:
|
||||
yield elem
|
||||
|
||||
|
||||
class LRUCache(dict):
|
||||
"""Dictionary with 'squishy' removal of least
|
||||
recently used items.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, capacity=100, threshold=.5):
|
||||
self.capacity = capacity
|
||||
self.threshold = threshold
|
||||
self._counter = 0
|
||||
|
||||
def _inc_counter(self):
|
||||
self._counter += 1
|
||||
return self._counter
|
||||
|
||||
def __getitem__(self, key):
|
||||
item = dict.__getitem__(self, key)
|
||||
item[2] = self._inc_counter()
|
||||
return item[1]
|
||||
|
||||
def values(self):
|
||||
return [i[1] for i in dict.values(self)]
|
||||
|
||||
def setdefault(self, key, value):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
self[key] = value
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
item = dict.get(self, key)
|
||||
if item is None:
|
||||
item = [key, value, self._inc_counter()]
|
||||
dict.__setitem__(self, key, item)
|
||||
else:
|
||||
item[1] = value
|
||||
self._manage_size()
|
||||
|
||||
def _manage_size(self):
|
||||
while len(self) > self.capacity + self.capacity * self.threshold:
|
||||
by_counter = sorted(dict.values(self),
|
||||
key=operator.itemgetter(2),
|
||||
reverse=True)
|
||||
for item in by_counter[self.capacity:]:
|
||||
try:
|
||||
del self[item[0]]
|
||||
except KeyError:
|
||||
# if we couldn't find a key, most
|
||||
# likely some other thread broke in
|
||||
# on us. loop around and try again
|
||||
break
|
||||
|
||||
|
||||
class ScopedRegistry(object):
|
||||
"""A Registry that can store one or multiple instances of a single
|
||||
class on the basis of a "scope" function.
|
||||
|
||||
The object implements ``__call__`` as the "getter", so by
|
||||
calling ``myregistry()`` the contained object is returned
|
||||
for the current scope.
|
||||
|
||||
:param createfunc:
|
||||
a callable that returns a new object to be placed in the registry
|
||||
|
||||
:param scopefunc:
|
||||
a callable that will return a key to store/retrieve an object.
|
||||
"""
|
||||
|
||||
def __init__(self, createfunc, scopefunc):
|
||||
"""Construct a new :class:`.ScopedRegistry`.
|
||||
|
||||
:param createfunc: A creation function that will generate
|
||||
a new value for the current scope, if none is present.
|
||||
|
||||
:param scopefunc: A function that returns a hashable
|
||||
token representing the current scope (such as, current
|
||||
thread identifier).
|
||||
|
||||
"""
|
||||
self.createfunc = createfunc
|
||||
self.scopefunc = scopefunc
|
||||
self.registry = {}
|
||||
|
||||
def __call__(self):
|
||||
key = self.scopefunc()
|
||||
try:
|
||||
return self.registry[key]
|
||||
except KeyError:
|
||||
return self.registry.setdefault(key, self.createfunc())
|
||||
|
||||
def has(self):
|
||||
"""Return True if an object is present in the current scope."""
|
||||
|
||||
return self.scopefunc() in self.registry
|
||||
|
||||
def set(self, obj):
|
||||
"""Set the value for the current scope."""
|
||||
|
||||
self.registry[self.scopefunc()] = obj
|
||||
|
||||
def clear(self):
|
||||
"""Clear the current scope, if any."""
|
||||
|
||||
try:
|
||||
del self.registry[self.scopefunc()]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
class ThreadLocalRegistry(ScopedRegistry):
|
||||
"""A :class:`.ScopedRegistry` that uses a ``threading.local()``
|
||||
variable for storage.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, createfunc):
|
||||
self.createfunc = createfunc
|
||||
self.registry = threading.local()
|
||||
|
||||
def __call__(self):
|
||||
try:
|
||||
return self.registry.value
|
||||
except AttributeError:
|
||||
val = self.registry.value = self.createfunc()
|
||||
return val
|
||||
|
||||
def has(self):
|
||||
return hasattr(self.registry, "value")
|
||||
|
||||
def set(self, obj):
|
||||
self.registry.value = obj
|
||||
|
||||
def clear(self):
|
||||
try:
|
||||
del self.registry.value
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def _iter_id(iterable):
|
||||
"""Generator: ((id(o), o) for o in iterable)."""
|
||||
|
||||
for item in iterable:
|
||||
yield id(item), item
|
||||
228
lib/python3.4/site-packages/sqlalchemy/util/compat.py
Normal file
228
lib/python3.4/site-packages/sqlalchemy/util/compat.py
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
# util/compat.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Handle Python version/platform incompatibilities."""
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
|
||||
py33 = sys.version_info >= (3, 3)
|
||||
py32 = sys.version_info >= (3, 2)
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
py2k = sys.version_info < (3, 0)
|
||||
py265 = sys.version_info >= (2, 6, 5)
|
||||
jython = sys.platform.startswith('java')
|
||||
pypy = hasattr(sys, 'pypy_version_info')
|
||||
win32 = sys.platform.startswith('win')
|
||||
cpython = not pypy and not jython # TODO: something better for this ?
|
||||
|
||||
import collections
|
||||
next = next
|
||||
|
||||
if py3k:
|
||||
import pickle
|
||||
else:
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
# work around http://bugs.python.org/issue2646
|
||||
if py265:
|
||||
safe_kwarg = lambda arg: arg
|
||||
else:
|
||||
safe_kwarg = str
|
||||
|
||||
ArgSpec = collections.namedtuple("ArgSpec",
|
||||
["args", "varargs", "keywords", "defaults"])
|
||||
|
||||
if py3k:
|
||||
import builtins
|
||||
|
||||
from inspect import getfullargspec as inspect_getfullargspec
|
||||
from urllib.parse import (quote_plus, unquote_plus,
|
||||
parse_qsl, quote, unquote)
|
||||
import configparser
|
||||
from io import StringIO
|
||||
|
||||
from io import BytesIO as byte_buffer
|
||||
|
||||
def inspect_getargspec(func):
|
||||
return ArgSpec(
|
||||
*inspect_getfullargspec(func)[0:4]
|
||||
)
|
||||
|
||||
string_types = str,
|
||||
binary_type = bytes
|
||||
text_type = str
|
||||
int_types = int,
|
||||
iterbytes = iter
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
def ue(s):
|
||||
return s
|
||||
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
if py32:
|
||||
callable = callable
|
||||
else:
|
||||
def callable(fn):
|
||||
return hasattr(fn, '__call__')
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
from functools import reduce
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
|
||||
import_ = getattr(builtins, '__import__')
|
||||
|
||||
import itertools
|
||||
itertools_filterfalse = itertools.filterfalse
|
||||
itertools_filter = filter
|
||||
itertools_imap = map
|
||||
from itertools import zip_longest
|
||||
|
||||
import base64
|
||||
|
||||
def b64encode(x):
|
||||
return base64.b64encode(x).decode('ascii')
|
||||
|
||||
def b64decode(x):
|
||||
return base64.b64decode(x.encode('ascii'))
|
||||
|
||||
else:
|
||||
from inspect import getargspec as inspect_getfullargspec
|
||||
inspect_getargspec = inspect_getfullargspec
|
||||
from urllib import quote_plus, unquote_plus, quote, unquote
|
||||
from urlparse import parse_qsl
|
||||
import ConfigParser as configparser
|
||||
from StringIO import StringIO
|
||||
from cStringIO import StringIO as byte_buffer
|
||||
|
||||
string_types = basestring,
|
||||
binary_type = str
|
||||
text_type = unicode
|
||||
int_types = int, long
|
||||
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
|
||||
def u(s):
|
||||
# this differs from what six does, which doesn't support non-ASCII
|
||||
# strings - we only use u() with
|
||||
# literal source strings, and all our source files with non-ascii
|
||||
# in them (all are tests) are utf-8 encoded.
|
||||
return unicode(s, "utf-8")
|
||||
|
||||
def ue(s):
|
||||
return unicode(s, "unicode_escape")
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def import_(*args):
|
||||
if len(args) == 4:
|
||||
args = args[0:3] + ([str(arg) for arg in args[3]],)
|
||||
return __import__(*args)
|
||||
|
||||
callable = callable
|
||||
cmp = cmp
|
||||
reduce = reduce
|
||||
|
||||
import base64
|
||||
b64encode = base64.b64encode
|
||||
b64decode = base64.b64decode
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
for arg in enumerate(args):
|
||||
if not isinstance(arg, basestring):
|
||||
arg = str(arg)
|
||||
fp.write(arg)
|
||||
|
||||
import itertools
|
||||
itertools_filterfalse = itertools.ifilterfalse
|
||||
itertools_filter = itertools.ifilter
|
||||
itertools_imap = itertools.imap
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
|
||||
import time
|
||||
if win32 or jython:
|
||||
time_func = time.clock
|
||||
else:
|
||||
time_func = time.time
|
||||
|
||||
from collections import namedtuple
|
||||
from operator import attrgetter as dottedgetter
|
||||
|
||||
|
||||
if py3k:
|
||||
def reraise(tp, value, tb=None, cause=None):
|
||||
if cause is not None:
|
||||
value.__cause__ = cause
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
|
||||
else:
|
||||
exec("def reraise(tp, value, tb=None, cause=None):\n"
|
||||
" raise tp, value, tb\n")
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
# not as nice as that of Py3K, but at least preserves
|
||||
# the code line where the issue occurred
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
reraise(type(exception), exception, tb=exc_tb)
|
||||
|
||||
if py3k:
|
||||
exec_ = getattr(builtins, 'exec')
|
||||
else:
|
||||
def exec_(func_text, globals_, lcl=None):
|
||||
if lcl is None:
|
||||
exec('exec func_text in globals_')
|
||||
else:
|
||||
exec('exec func_text in globals_, lcl')
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass.
|
||||
|
||||
Drops the middle class upon creation.
|
||||
|
||||
Source: http://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/
|
||||
|
||||
"""
|
||||
|
||||
class metaclass(meta):
|
||||
__call__ = type.__call__
|
||||
__init__ = type.__init__
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
if this_bases is None:
|
||||
return type.__new__(cls, name, (), d)
|
||||
return meta(name, bases, d)
|
||||
return metaclass('temporary_class', None, {})
|
||||
146
lib/python3.4/site-packages/sqlalchemy/util/deprecations.py
Normal file
146
lib/python3.4/site-packages/sqlalchemy/util/deprecations.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# util/deprecations.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Helpers related to deprecation of functions, methods, classes, other
|
||||
functionality."""
|
||||
|
||||
from .. import exc
|
||||
import warnings
|
||||
import re
|
||||
from .langhelpers import decorator
|
||||
|
||||
|
||||
def warn_deprecated(msg, stacklevel=3):
|
||||
warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel)
|
||||
|
||||
|
||||
def warn_pending_deprecation(msg, stacklevel=3):
|
||||
warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
|
||||
|
||||
|
||||
def deprecated(version, message=None, add_deprecation_to_docstring=True):
|
||||
"""Decorates a function and issues a deprecation warning on use.
|
||||
|
||||
:param message:
|
||||
If provided, issue message in the warning. A sensible default
|
||||
is used if not provided.
|
||||
|
||||
:param add_deprecation_to_docstring:
|
||||
Default True. If False, the wrapped function's __doc__ is left
|
||||
as-is. If True, the 'message' is prepended to the docs if
|
||||
provided, or sensible default if message is omitted.
|
||||
|
||||
"""
|
||||
|
||||
if add_deprecation_to_docstring:
|
||||
header = ".. deprecated:: %s %s" % \
|
||||
(version, (message or ''))
|
||||
else:
|
||||
header = None
|
||||
|
||||
if message is None:
|
||||
message = "Call to deprecated function %(func)s"
|
||||
|
||||
def decorate(fn):
|
||||
return _decorate_with_warning(
|
||||
fn, exc.SADeprecationWarning,
|
||||
message % dict(func=fn.__name__), header)
|
||||
return decorate
|
||||
|
||||
|
||||
def pending_deprecation(version, message=None,
|
||||
add_deprecation_to_docstring=True):
|
||||
"""Decorates a function and issues a pending deprecation warning on use.
|
||||
|
||||
:param version:
|
||||
An approximate future version at which point the pending deprecation
|
||||
will become deprecated. Not used in messaging.
|
||||
|
||||
:param message:
|
||||
If provided, issue message in the warning. A sensible default
|
||||
is used if not provided.
|
||||
|
||||
:param add_deprecation_to_docstring:
|
||||
Default True. If False, the wrapped function's __doc__ is left
|
||||
as-is. If True, the 'message' is prepended to the docs if
|
||||
provided, or sensible default if message is omitted.
|
||||
"""
|
||||
|
||||
if add_deprecation_to_docstring:
|
||||
header = ".. deprecated:: %s (pending) %s" % \
|
||||
(version, (message or ''))
|
||||
else:
|
||||
header = None
|
||||
|
||||
if message is None:
|
||||
message = "Call to deprecated function %(func)s"
|
||||
|
||||
def decorate(fn):
|
||||
return _decorate_with_warning(
|
||||
fn, exc.SAPendingDeprecationWarning,
|
||||
message % dict(func=fn.__name__), header)
|
||||
return decorate
|
||||
|
||||
|
||||
def _sanitize_restructured_text(text):
|
||||
def repl(m):
|
||||
type_, name = m.group(1, 2)
|
||||
if type_ in ("func", "meth"):
|
||||
name += "()"
|
||||
return name
|
||||
return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text)
|
||||
|
||||
|
||||
def _decorate_with_warning(func, wtype, message, docstring_header=None):
|
||||
"""Wrap a function with a warnings.warn and augmented docstring."""
|
||||
|
||||
message = _sanitize_restructured_text(message)
|
||||
|
||||
@decorator
|
||||
def warned(fn, *args, **kwargs):
|
||||
warnings.warn(wtype(message), stacklevel=3)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
doc = func.__doc__ is not None and func.__doc__ or ''
|
||||
if docstring_header is not None:
|
||||
docstring_header %= dict(func=func.__name__)
|
||||
|
||||
doc = inject_docstring_text(doc, docstring_header, 1)
|
||||
|
||||
decorated = warned(func)
|
||||
decorated.__doc__ = doc
|
||||
return decorated
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def _dedent_docstring(text):
|
||||
split_text = text.split("\n", 1)
|
||||
if len(split_text) == 1:
|
||||
return text
|
||||
else:
|
||||
firstline, remaining = split_text
|
||||
if not firstline.startswith(" "):
|
||||
return firstline + "\n" + textwrap.dedent(remaining)
|
||||
else:
|
||||
return textwrap.dedent(text)
|
||||
|
||||
|
||||
def inject_docstring_text(doctext, injecttext, pos):
|
||||
doctext = _dedent_docstring(doctext or "")
|
||||
lines = doctext.split('\n')
|
||||
injectlines = textwrap.dedent(injecttext).split("\n")
|
||||
if injectlines[0]:
|
||||
injectlines.insert(0, "")
|
||||
|
||||
blanks = [num for num, line in enumerate(lines) if not line.strip()]
|
||||
blanks.insert(0, 0)
|
||||
|
||||
inject_pos = blanks[min(pos, len(blanks) - 1)]
|
||||
|
||||
lines = lines[0:inject_pos] + injectlines + lines[inject_pos:]
|
||||
return "\n".join(lines)
|
||||
1249
lib/python3.4/site-packages/sqlalchemy/util/langhelpers.py
Normal file
1249
lib/python3.4/site-packages/sqlalchemy/util/langhelpers.py
Normal file
File diff suppressed because it is too large
Load diff
199
lib/python3.4/site-packages/sqlalchemy/util/queue.py
Normal file
199
lib/python3.4/site-packages/sqlalchemy/util/queue.py
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# util/queue.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""An adaptation of Py2.3/2.4's Queue module which supports reentrant
|
||||
behavior, using RLock instead of Lock for its mutex object. The
|
||||
Queue object is used exclusively by the sqlalchemy.pool.QueuePool
|
||||
class.
|
||||
|
||||
This is to support the connection pool's usage of weakref callbacks to return
|
||||
connections to the underlying Queue, which can in extremely
|
||||
rare cases be invoked within the ``get()`` method of the Queue itself,
|
||||
producing a ``put()`` inside the ``get()`` and therefore a reentrant
|
||||
condition.
|
||||
|
||||
"""
|
||||
|
||||
from collections import deque
|
||||
from time import time as _time
|
||||
from .compat import threading
|
||||
|
||||
|
||||
__all__ = ['Empty', 'Full', 'Queue']
|
||||
|
||||
|
||||
class Empty(Exception):
|
||||
"Exception raised by Queue.get(block=0)/get_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Full(Exception):
|
||||
"Exception raised by Queue.put(block=0)/put_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Queue:
|
||||
def __init__(self, maxsize=0):
|
||||
"""Initialize a queue object with a given maximum size.
|
||||
|
||||
If `maxsize` is <= 0, the queue size is infinite.
|
||||
"""
|
||||
|
||||
self._init(maxsize)
|
||||
# mutex must be held whenever the queue is mutating. All methods
|
||||
# that acquire mutex must release it before returning. mutex
|
||||
# is shared between the two conditions, so acquiring and
|
||||
# releasing the conditions also acquires and releases mutex.
|
||||
self.mutex = threading.RLock()
|
||||
# Notify not_empty whenever an item is added to the queue; a
|
||||
# thread waiting to get is notified then.
|
||||
self.not_empty = threading.Condition(self.mutex)
|
||||
# Notify not_full whenever an item is removed from the queue;
|
||||
# a thread waiting to put is notified then.
|
||||
self.not_full = threading.Condition(self.mutex)
|
||||
|
||||
def qsize(self):
|
||||
"""Return the approximate size of the queue (not reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._qsize()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def empty(self):
|
||||
"""Return True if the queue is empty, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._empty()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def full(self):
|
||||
"""Return True if the queue is full, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._full()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
"""Put an item into the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until a free slot is
|
||||
available. If `timeout` is a positive number, it blocks at
|
||||
most `timeout` seconds and raises the ``Full`` exception if no
|
||||
free slot was available within that time. Otherwise (`block`
|
||||
is false), put an item on the queue if a free slot is
|
||||
immediately available, else raise the ``Full`` exception
|
||||
(`timeout` is ignored in that case).
|
||||
"""
|
||||
|
||||
self.not_full.acquire()
|
||||
try:
|
||||
if not block:
|
||||
if self._full():
|
||||
raise Full
|
||||
elif timeout is None:
|
||||
while self._full():
|
||||
self.not_full.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._full():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Full
|
||||
self.not_full.wait(remaining)
|
||||
self._put(item)
|
||||
self.not_empty.notify()
|
||||
finally:
|
||||
self.not_full.release()
|
||||
|
||||
def put_nowait(self, item):
|
||||
"""Put an item into the queue without blocking.
|
||||
|
||||
Only enqueue the item if a free slot is immediately available.
|
||||
Otherwise raise the ``Full`` exception.
|
||||
"""
|
||||
return self.put(item, False)
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
"""Remove and return an item from the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until an item is available. If
|
||||
`timeout` is a positive number, it blocks at most `timeout`
|
||||
seconds and raises the ``Empty`` exception if no item was
|
||||
available within that time. Otherwise (`block` is false),
|
||||
return an item if one is immediately available, else raise the
|
||||
``Empty`` exception (`timeout` is ignored in that case).
|
||||
"""
|
||||
self.not_empty.acquire()
|
||||
try:
|
||||
if not block:
|
||||
if self._empty():
|
||||
raise Empty
|
||||
elif timeout is None:
|
||||
while self._empty():
|
||||
self.not_empty.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._empty():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Empty
|
||||
self.not_empty.wait(remaining)
|
||||
item = self._get()
|
||||
self.not_full.notify()
|
||||
return item
|
||||
finally:
|
||||
self.not_empty.release()
|
||||
|
||||
def get_nowait(self):
|
||||
"""Remove and return an item from the queue without blocking.
|
||||
|
||||
Only get an item if one is immediately available. Otherwise
|
||||
raise the ``Empty`` exception.
|
||||
"""
|
||||
|
||||
return self.get(False)
|
||||
|
||||
# Override these methods to implement other queue organizations
|
||||
# (e.g. stack or priority queue).
|
||||
# These will only be called with appropriate locks held
|
||||
|
||||
# Initialize the queue representation
|
||||
def _init(self, maxsize):
|
||||
self.maxsize = maxsize
|
||||
self.queue = deque()
|
||||
|
||||
def _qsize(self):
|
||||
return len(self.queue)
|
||||
|
||||
# Check whether the queue is empty
|
||||
def _empty(self):
|
||||
return not self.queue
|
||||
|
||||
# Check whether the queue is full
|
||||
def _full(self):
|
||||
return self.maxsize > 0 and len(self.queue) == self.maxsize
|
||||
|
||||
# Put a new item in the queue
|
||||
def _put(self, item):
|
||||
self.queue.append(item)
|
||||
|
||||
# Get an item from the queue
|
||||
def _get(self):
|
||||
return self.queue.popleft()
|
||||
97
lib/python3.4/site-packages/sqlalchemy/util/topological.py
Normal file
97
lib/python3.4/site-packages/sqlalchemy/util/topological.py
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
# util/topological.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Topological sorting algorithms."""
|
||||
|
||||
from ..exc import CircularDependencyError
|
||||
from .. import util
|
||||
|
||||
__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
|
||||
|
||||
|
||||
def sort_as_subsets(tuples, allitems):
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[child].add(parent)
|
||||
|
||||
todo = set(allitems)
|
||||
|
||||
while todo:
|
||||
output = set()
|
||||
for node in list(todo):
|
||||
if not todo.intersection(edges[node]):
|
||||
output.add(node)
|
||||
|
||||
if not output:
|
||||
raise CircularDependencyError(
|
||||
"Circular dependency detected.",
|
||||
find_cycles(tuples, allitems),
|
||||
_gen_edges(edges)
|
||||
)
|
||||
|
||||
todo.difference_update(output)
|
||||
yield output
|
||||
|
||||
|
||||
def sort(tuples, allitems):
|
||||
"""sort the given list of items by dependency.
|
||||
|
||||
'tuples' is a list of tuples representing a partial ordering.
|
||||
"""
|
||||
|
||||
for set_ in sort_as_subsets(tuples, allitems):
|
||||
for s in set_:
|
||||
yield s
|
||||
|
||||
|
||||
def find_cycles(tuples, allitems):
|
||||
# adapted from:
|
||||
# http://neopythonic.blogspot.com/2009/01/detecting-cycles-in-directed-graph.html
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[parent].add(child)
|
||||
nodes_to_test = set(edges)
|
||||
|
||||
output = set()
|
||||
|
||||
# we'd like to find all nodes that are
|
||||
# involved in cycles, so we do the full
|
||||
# pass through the whole thing for each
|
||||
# node in the original list.
|
||||
|
||||
# we can go just through parent edge nodes.
|
||||
# if a node is only a child and never a parent,
|
||||
# by definition it can't be part of a cycle. same
|
||||
# if it's not in the edges at all.
|
||||
for node in nodes_to_test:
|
||||
stack = [node]
|
||||
todo = nodes_to_test.difference(stack)
|
||||
while stack:
|
||||
top = stack[-1]
|
||||
for node in edges[top]:
|
||||
if node in stack:
|
||||
cyc = stack[stack.index(node):]
|
||||
todo.difference_update(cyc)
|
||||
output.update(cyc)
|
||||
|
||||
if node in todo:
|
||||
stack.append(node)
|
||||
todo.remove(node)
|
||||
break
|
||||
else:
|
||||
node = stack.pop()
|
||||
return output
|
||||
|
||||
|
||||
def _gen_edges(edges):
|
||||
return set([
|
||||
(right, left)
|
||||
for left in edges
|
||||
for right in edges[left]
|
||||
])
|
||||
Loading…
Add table
Add a link
Reference in a new issue