run update
This commit is contained in:
parent
e85b9478a7
commit
c6f20c5f92
515 changed files with 22459 additions and 12734 deletions
49
lib/python3.7/site-packages/sqlalchemy/util/__init__.py
Normal file
49
lib/python3.7/site-packages/sqlalchemy/util/__init__.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# util/__init__.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .compat import callable, cmp, reduce, \
|
||||
threading, py3k, py33, py36, py2k, jython, pypy, cpython, win32, \
|
||||
pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
|
||||
raise_from_cause, text_type, safe_kwarg, string_types, int_types, \
|
||||
binary_type, nested, \
|
||||
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
|
||||
unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
|
||||
iterbytes, StringIO, inspect_getargspec, zip_longest
|
||||
|
||||
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
|
||||
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
|
||||
OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
|
||||
column_dict, ordered_column_set, populate_column_dict, unique_list, \
|
||||
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
|
||||
to_column_set, update_copy, flatten_iterator, has_intersection, \
|
||||
LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
|
||||
coerce_generator_arg, lightweight_named_tuple
|
||||
|
||||
from .langhelpers import iterate_attributes, class_hierarchy, \
|
||||
portable_instancemethod, unbound_method_to_callable, \
|
||||
getargspec_init, format_argspec_init, format_argspec_plus, \
|
||||
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
|
||||
memoized_property, memoized_instancemethod, md5_hex, \
|
||||
group_expirable_memoized_property, dependencies, decode_slice, \
|
||||
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
|
||||
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
|
||||
classproperty, set_creation_order, warn_exception, warn, NoneType,\
|
||||
constructor_copy, methods_equivalent, chop_traceback, asint,\
|
||||
generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \
|
||||
safe_reraise,\
|
||||
get_callable_argspec, only_once, attrsetter, ellipses_string, \
|
||||
warn_limited, map_bits, MemoizedSlots, EnsureKWArgType
|
||||
|
||||
from .deprecations import warn_deprecated, warn_pending_deprecation, \
|
||||
deprecated, pending_deprecation, inject_docstring_text
|
||||
|
||||
# things that used to be not always available,
|
||||
# but are now as of current support Python versions
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from functools import update_wrapper
|
||||
from contextlib import contextmanager
|
||||
1043
lib/python3.7/site-packages/sqlalchemy/util/_collections.py
Normal file
1043
lib/python3.7/site-packages/sqlalchemy/util/_collections.py
Normal file
File diff suppressed because it is too large
Load diff
263
lib/python3.7/site-packages/sqlalchemy/util/compat.py
Normal file
263
lib/python3.7/site-packages/sqlalchemy/util/compat.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
# util/compat.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Handle Python version/platform incompatibilities."""
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
|
||||
py36 = sys.version_info >= (3, 6)
|
||||
py33 = sys.version_info >= (3, 3)
|
||||
py32 = sys.version_info >= (3, 2)
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
py2k = sys.version_info < (3, 0)
|
||||
py265 = sys.version_info >= (2, 6, 5)
|
||||
jython = sys.platform.startswith('java')
|
||||
pypy = hasattr(sys, 'pypy_version_info')
|
||||
win32 = sys.platform.startswith('win')
|
||||
cpython = not pypy and not jython # TODO: something better for this ?
|
||||
|
||||
import collections
|
||||
next = next
|
||||
|
||||
if py3k:
|
||||
import pickle
|
||||
else:
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
# work around http://bugs.python.org/issue2646
|
||||
if py265:
|
||||
safe_kwarg = lambda arg: arg
|
||||
else:
|
||||
safe_kwarg = str
|
||||
|
||||
ArgSpec = collections.namedtuple("ArgSpec",
|
||||
["args", "varargs", "keywords", "defaults"])
|
||||
|
||||
if py3k:
|
||||
import builtins
|
||||
|
||||
from inspect import getfullargspec as inspect_getfullargspec
|
||||
from urllib.parse import (quote_plus, unquote_plus,
|
||||
parse_qsl, quote, unquote)
|
||||
import configparser
|
||||
from io import StringIO
|
||||
|
||||
from io import BytesIO as byte_buffer
|
||||
|
||||
def inspect_getargspec(func):
|
||||
return ArgSpec(
|
||||
*inspect_getfullargspec(func)[0:4]
|
||||
)
|
||||
|
||||
string_types = str,
|
||||
binary_type = bytes
|
||||
text_type = str
|
||||
int_types = int,
|
||||
iterbytes = iter
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
def ue(s):
|
||||
return s
|
||||
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
if py32:
|
||||
callable = callable
|
||||
else:
|
||||
def callable(fn):
|
||||
return hasattr(fn, '__call__')
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
from functools import reduce
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
|
||||
import_ = getattr(builtins, '__import__')
|
||||
|
||||
import itertools
|
||||
itertools_filterfalse = itertools.filterfalse
|
||||
itertools_filter = filter
|
||||
itertools_imap = map
|
||||
from itertools import zip_longest
|
||||
|
||||
import base64
|
||||
|
||||
def b64encode(x):
|
||||
return base64.b64encode(x).decode('ascii')
|
||||
|
||||
def b64decode(x):
|
||||
return base64.b64decode(x.encode('ascii'))
|
||||
|
||||
else:
|
||||
from inspect import getargspec as inspect_getfullargspec
|
||||
inspect_getargspec = inspect_getfullargspec
|
||||
from urllib import quote_plus, unquote_plus, quote, unquote
|
||||
from urlparse import parse_qsl
|
||||
import ConfigParser as configparser
|
||||
from StringIO import StringIO
|
||||
from cStringIO import StringIO as byte_buffer
|
||||
|
||||
string_types = basestring,
|
||||
binary_type = str
|
||||
text_type = unicode
|
||||
int_types = int, long
|
||||
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
|
||||
def u(s):
|
||||
# this differs from what six does, which doesn't support non-ASCII
|
||||
# strings - we only use u() with
|
||||
# literal source strings, and all our source files with non-ascii
|
||||
# in them (all are tests) are utf-8 encoded.
|
||||
return unicode(s, "utf-8")
|
||||
|
||||
def ue(s):
|
||||
return unicode(s, "unicode_escape")
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def import_(*args):
|
||||
if len(args) == 4:
|
||||
args = args[0:3] + ([str(arg) for arg in args[3]],)
|
||||
return __import__(*args)
|
||||
|
||||
callable = callable
|
||||
cmp = cmp
|
||||
reduce = reduce
|
||||
|
||||
import base64
|
||||
b64encode = base64.b64encode
|
||||
b64decode = base64.b64decode
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
for arg in enumerate(args):
|
||||
if not isinstance(arg, basestring):
|
||||
arg = str(arg)
|
||||
fp.write(arg)
|
||||
|
||||
import itertools
|
||||
itertools_filterfalse = itertools.ifilterfalse
|
||||
itertools_filter = itertools.ifilter
|
||||
itertools_imap = itertools.imap
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
|
||||
import time
|
||||
if win32 or jython:
|
||||
time_func = time.clock
|
||||
else:
|
||||
time_func = time.time
|
||||
|
||||
from collections import namedtuple
|
||||
from operator import attrgetter as dottedgetter
|
||||
|
||||
|
||||
if py3k:
|
||||
def reraise(tp, value, tb=None, cause=None):
|
||||
if cause is not None:
|
||||
assert cause is not value, "Same cause emitted"
|
||||
value.__cause__ = cause
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
else:
|
||||
# not as nice as that of Py3K, but at least preserves
|
||||
# the code line where the issue occurred
|
||||
exec("def reraise(tp, value, tb=None, cause=None):\n"
|
||||
" if cause is not None:\n"
|
||||
" assert cause is not value, 'Same cause emitted'\n"
|
||||
" raise tp, value, tb\n")
|
||||
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
cause = exc_value if exc_value is not exception else None
|
||||
reraise(type(exception), exception, tb=exc_tb, cause=cause)
|
||||
|
||||
if py3k:
|
||||
exec_ = getattr(builtins, 'exec')
|
||||
else:
|
||||
def exec_(func_text, globals_, lcl=None):
|
||||
if lcl is None:
|
||||
exec('exec func_text in globals_')
|
||||
else:
|
||||
exec('exec func_text in globals_, lcl')
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass.
|
||||
|
||||
Drops the middle class upon creation.
|
||||
|
||||
Source: http://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/
|
||||
|
||||
"""
|
||||
|
||||
class metaclass(meta):
|
||||
__call__ = type.__call__
|
||||
__init__ = type.__init__
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
if this_bases is None:
|
||||
return type.__new__(cls, name, (), d)
|
||||
return meta(name, bases, d)
|
||||
return metaclass('temporary_class', None, {})
|
||||
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
try:
|
||||
from contextlib import nested
|
||||
except ImportError:
|
||||
# removed in py3k, credit to mitsuhiko for
|
||||
# workaround
|
||||
|
||||
@contextmanager
|
||||
def nested(*managers):
|
||||
exits = []
|
||||
vars = []
|
||||
exc = (None, None, None)
|
||||
try:
|
||||
for mgr in managers:
|
||||
exit = mgr.__exit__
|
||||
enter = mgr.__enter__
|
||||
vars.append(enter())
|
||||
exits.append(exit)
|
||||
yield vars
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
finally:
|
||||
while exits:
|
||||
exit = exits.pop()
|
||||
try:
|
||||
if exit(*exc):
|
||||
exc = (None, None, None)
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
if exc != (None, None, None):
|
||||
reraise(exc[0], exc[1], exc[2])
|
||||
146
lib/python3.7/site-packages/sqlalchemy/util/deprecations.py
Normal file
146
lib/python3.7/site-packages/sqlalchemy/util/deprecations.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# util/deprecations.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Helpers related to deprecation of functions, methods, classes, other
|
||||
functionality."""
|
||||
|
||||
from .. import exc
|
||||
import warnings
|
||||
import re
|
||||
from .langhelpers import decorator
|
||||
|
||||
|
||||
def warn_deprecated(msg, stacklevel=3):
|
||||
warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel)
|
||||
|
||||
|
||||
def warn_pending_deprecation(msg, stacklevel=3):
|
||||
warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
|
||||
|
||||
|
||||
def deprecated(version, message=None, add_deprecation_to_docstring=True):
|
||||
"""Decorates a function and issues a deprecation warning on use.
|
||||
|
||||
:param message:
|
||||
If provided, issue message in the warning. A sensible default
|
||||
is used if not provided.
|
||||
|
||||
:param add_deprecation_to_docstring:
|
||||
Default True. If False, the wrapped function's __doc__ is left
|
||||
as-is. If True, the 'message' is prepended to the docs if
|
||||
provided, or sensible default if message is omitted.
|
||||
|
||||
"""
|
||||
|
||||
if add_deprecation_to_docstring:
|
||||
header = ".. deprecated:: %s %s" % \
|
||||
(version, (message or ''))
|
||||
else:
|
||||
header = None
|
||||
|
||||
if message is None:
|
||||
message = "Call to deprecated function %(func)s"
|
||||
|
||||
def decorate(fn):
|
||||
return _decorate_with_warning(
|
||||
fn, exc.SADeprecationWarning,
|
||||
message % dict(func=fn.__name__), header)
|
||||
return decorate
|
||||
|
||||
|
||||
def pending_deprecation(version, message=None,
|
||||
add_deprecation_to_docstring=True):
|
||||
"""Decorates a function and issues a pending deprecation warning on use.
|
||||
|
||||
:param version:
|
||||
An approximate future version at which point the pending deprecation
|
||||
will become deprecated. Not used in messaging.
|
||||
|
||||
:param message:
|
||||
If provided, issue message in the warning. A sensible default
|
||||
is used if not provided.
|
||||
|
||||
:param add_deprecation_to_docstring:
|
||||
Default True. If False, the wrapped function's __doc__ is left
|
||||
as-is. If True, the 'message' is prepended to the docs if
|
||||
provided, or sensible default if message is omitted.
|
||||
"""
|
||||
|
||||
if add_deprecation_to_docstring:
|
||||
header = ".. deprecated:: %s (pending) %s" % \
|
||||
(version, (message or ''))
|
||||
else:
|
||||
header = None
|
||||
|
||||
if message is None:
|
||||
message = "Call to deprecated function %(func)s"
|
||||
|
||||
def decorate(fn):
|
||||
return _decorate_with_warning(
|
||||
fn, exc.SAPendingDeprecationWarning,
|
||||
message % dict(func=fn.__name__), header)
|
||||
return decorate
|
||||
|
||||
|
||||
def _sanitize_restructured_text(text):
|
||||
def repl(m):
|
||||
type_, name = m.group(1, 2)
|
||||
if type_ in ("func", "meth"):
|
||||
name += "()"
|
||||
return name
|
||||
return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text)
|
||||
|
||||
|
||||
def _decorate_with_warning(func, wtype, message, docstring_header=None):
|
||||
"""Wrap a function with a warnings.warn and augmented docstring."""
|
||||
|
||||
message = _sanitize_restructured_text(message)
|
||||
|
||||
@decorator
|
||||
def warned(fn, *args, **kwargs):
|
||||
warnings.warn(message, wtype, stacklevel=3)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
doc = func.__doc__ is not None and func.__doc__ or ''
|
||||
if docstring_header is not None:
|
||||
docstring_header %= dict(func=func.__name__)
|
||||
|
||||
doc = inject_docstring_text(doc, docstring_header, 1)
|
||||
|
||||
decorated = warned(func)
|
||||
decorated.__doc__ = doc
|
||||
return decorated
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def _dedent_docstring(text):
|
||||
split_text = text.split("\n", 1)
|
||||
if len(split_text) == 1:
|
||||
return text
|
||||
else:
|
||||
firstline, remaining = split_text
|
||||
if not firstline.startswith(" "):
|
||||
return firstline + "\n" + textwrap.dedent(remaining)
|
||||
else:
|
||||
return textwrap.dedent(text)
|
||||
|
||||
|
||||
def inject_docstring_text(doctext, injecttext, pos):
|
||||
doctext = _dedent_docstring(doctext or "")
|
||||
lines = doctext.split('\n')
|
||||
injectlines = textwrap.dedent(injecttext).split("\n")
|
||||
if injectlines[0]:
|
||||
injectlines.insert(0, "")
|
||||
|
||||
blanks = [num for num, line in enumerate(lines) if not line.strip()]
|
||||
blanks.insert(0, 0)
|
||||
|
||||
inject_pos = blanks[min(pos, len(blanks) - 1)]
|
||||
|
||||
lines = lines[0:inject_pos] + injectlines + lines[inject_pos:]
|
||||
return "\n".join(lines)
|
||||
1384
lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py
Normal file
1384
lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py
Normal file
File diff suppressed because it is too large
Load diff
199
lib/python3.7/site-packages/sqlalchemy/util/queue.py
Normal file
199
lib/python3.7/site-packages/sqlalchemy/util/queue.py
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# util/queue.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""An adaptation of Py2.3/2.4's Queue module which supports reentrant
|
||||
behavior, using RLock instead of Lock for its mutex object. The
|
||||
Queue object is used exclusively by the sqlalchemy.pool.QueuePool
|
||||
class.
|
||||
|
||||
This is to support the connection pool's usage of weakref callbacks to return
|
||||
connections to the underlying Queue, which can in extremely
|
||||
rare cases be invoked within the ``get()`` method of the Queue itself,
|
||||
producing a ``put()`` inside the ``get()`` and therefore a reentrant
|
||||
condition.
|
||||
|
||||
"""
|
||||
|
||||
from collections import deque
|
||||
from time import time as _time
|
||||
from .compat import threading
|
||||
|
||||
|
||||
__all__ = ['Empty', 'Full', 'Queue']
|
||||
|
||||
|
||||
class Empty(Exception):
|
||||
"Exception raised by Queue.get(block=0)/get_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Full(Exception):
|
||||
"Exception raised by Queue.put(block=0)/put_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Queue:
|
||||
def __init__(self, maxsize=0):
|
||||
"""Initialize a queue object with a given maximum size.
|
||||
|
||||
If `maxsize` is <= 0, the queue size is infinite.
|
||||
"""
|
||||
|
||||
self._init(maxsize)
|
||||
# mutex must be held whenever the queue is mutating. All methods
|
||||
# that acquire mutex must release it before returning. mutex
|
||||
# is shared between the two conditions, so acquiring and
|
||||
# releasing the conditions also acquires and releases mutex.
|
||||
self.mutex = threading.RLock()
|
||||
# Notify not_empty whenever an item is added to the queue; a
|
||||
# thread waiting to get is notified then.
|
||||
self.not_empty = threading.Condition(self.mutex)
|
||||
# Notify not_full whenever an item is removed from the queue;
|
||||
# a thread waiting to put is notified then.
|
||||
self.not_full = threading.Condition(self.mutex)
|
||||
|
||||
def qsize(self):
|
||||
"""Return the approximate size of the queue (not reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._qsize()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def empty(self):
|
||||
"""Return True if the queue is empty, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._empty()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def full(self):
|
||||
"""Return True if the queue is full, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
self.mutex.acquire()
|
||||
n = self._full()
|
||||
self.mutex.release()
|
||||
return n
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
"""Put an item into the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until a free slot is
|
||||
available. If `timeout` is a positive number, it blocks at
|
||||
most `timeout` seconds and raises the ``Full`` exception if no
|
||||
free slot was available within that time. Otherwise (`block`
|
||||
is false), put an item on the queue if a free slot is
|
||||
immediately available, else raise the ``Full`` exception
|
||||
(`timeout` is ignored in that case).
|
||||
"""
|
||||
|
||||
self.not_full.acquire()
|
||||
try:
|
||||
if not block:
|
||||
if self._full():
|
||||
raise Full
|
||||
elif timeout is None:
|
||||
while self._full():
|
||||
self.not_full.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._full():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Full
|
||||
self.not_full.wait(remaining)
|
||||
self._put(item)
|
||||
self.not_empty.notify()
|
||||
finally:
|
||||
self.not_full.release()
|
||||
|
||||
def put_nowait(self, item):
|
||||
"""Put an item into the queue without blocking.
|
||||
|
||||
Only enqueue the item if a free slot is immediately available.
|
||||
Otherwise raise the ``Full`` exception.
|
||||
"""
|
||||
return self.put(item, False)
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
"""Remove and return an item from the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until an item is available. If
|
||||
`timeout` is a positive number, it blocks at most `timeout`
|
||||
seconds and raises the ``Empty`` exception if no item was
|
||||
available within that time. Otherwise (`block` is false),
|
||||
return an item if one is immediately available, else raise the
|
||||
``Empty`` exception (`timeout` is ignored in that case).
|
||||
"""
|
||||
self.not_empty.acquire()
|
||||
try:
|
||||
if not block:
|
||||
if self._empty():
|
||||
raise Empty
|
||||
elif timeout is None:
|
||||
while self._empty():
|
||||
self.not_empty.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._empty():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Empty
|
||||
self.not_empty.wait(remaining)
|
||||
item = self._get()
|
||||
self.not_full.notify()
|
||||
return item
|
||||
finally:
|
||||
self.not_empty.release()
|
||||
|
||||
def get_nowait(self):
|
||||
"""Remove and return an item from the queue without blocking.
|
||||
|
||||
Only get an item if one is immediately available. Otherwise
|
||||
raise the ``Empty`` exception.
|
||||
"""
|
||||
|
||||
return self.get(False)
|
||||
|
||||
# Override these methods to implement other queue organizations
|
||||
# (e.g. stack or priority queue).
|
||||
# These will only be called with appropriate locks held
|
||||
|
||||
# Initialize the queue representation
|
||||
def _init(self, maxsize):
|
||||
self.maxsize = maxsize
|
||||
self.queue = deque()
|
||||
|
||||
def _qsize(self):
|
||||
return len(self.queue)
|
||||
|
||||
# Check whether the queue is empty
|
||||
def _empty(self):
|
||||
return not self.queue
|
||||
|
||||
# Check whether the queue is full
|
||||
def _full(self):
|
||||
return self.maxsize > 0 and len(self.queue) == self.maxsize
|
||||
|
||||
# Put a new item in the queue
|
||||
def _put(self, item):
|
||||
self.queue.append(item)
|
||||
|
||||
# Get an item from the queue
|
||||
def _get(self):
|
||||
return self.queue.popleft()
|
||||
100
lib/python3.7/site-packages/sqlalchemy/util/topological.py
Normal file
100
lib/python3.7/site-packages/sqlalchemy/util/topological.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
# util/topological.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Topological sorting algorithms."""
|
||||
|
||||
from ..exc import CircularDependencyError
|
||||
from .. import util
|
||||
|
||||
__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
|
||||
|
||||
|
||||
def sort_as_subsets(tuples, allitems, deterministic_order=False):
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[child].add(parent)
|
||||
|
||||
Set = util.OrderedSet if deterministic_order else set
|
||||
|
||||
todo = Set(allitems)
|
||||
|
||||
while todo:
|
||||
output = Set()
|
||||
for node in todo:
|
||||
if todo.isdisjoint(edges[node]):
|
||||
output.add(node)
|
||||
|
||||
if not output:
|
||||
raise CircularDependencyError(
|
||||
"Circular dependency detected.",
|
||||
find_cycles(tuples, allitems),
|
||||
_gen_edges(edges)
|
||||
)
|
||||
|
||||
todo.difference_update(output)
|
||||
yield output
|
||||
|
||||
|
||||
def sort(tuples, allitems, deterministic_order=False):
|
||||
"""sort the given list of items by dependency.
|
||||
|
||||
'tuples' is a list of tuples representing a partial ordering.
|
||||
'deterministic_order' keeps items within a dependency tier in list order.
|
||||
"""
|
||||
|
||||
for set_ in sort_as_subsets(tuples, allitems, deterministic_order):
|
||||
for s in set_:
|
||||
yield s
|
||||
|
||||
|
||||
def find_cycles(tuples, allitems):
|
||||
# adapted from:
|
||||
# http://neopythonic.blogspot.com/2009/01/detecting-cycles-in-directed-graph.html
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[parent].add(child)
|
||||
nodes_to_test = set(edges)
|
||||
|
||||
output = set()
|
||||
|
||||
# we'd like to find all nodes that are
|
||||
# involved in cycles, so we do the full
|
||||
# pass through the whole thing for each
|
||||
# node in the original list.
|
||||
|
||||
# we can go just through parent edge nodes.
|
||||
# if a node is only a child and never a parent,
|
||||
# by definition it can't be part of a cycle. same
|
||||
# if it's not in the edges at all.
|
||||
for node in nodes_to_test:
|
||||
stack = [node]
|
||||
todo = nodes_to_test.difference(stack)
|
||||
while stack:
|
||||
top = stack[-1]
|
||||
for node in edges[top]:
|
||||
if node in stack:
|
||||
cyc = stack[stack.index(node):]
|
||||
todo.difference_update(cyc)
|
||||
output.update(cyc)
|
||||
|
||||
if node in todo:
|
||||
stack.append(node)
|
||||
todo.remove(node)
|
||||
break
|
||||
else:
|
||||
node = stack.pop()
|
||||
return output
|
||||
|
||||
|
||||
def _gen_edges(edges):
|
||||
return set([
|
||||
(right, left)
|
||||
for left in edges
|
||||
for right in edges[left]
|
||||
])
|
||||
Loading…
Add table
Add a link
Reference in a new issue